@Before public void setUp() { MockitoAnnotations.initMocks(this); tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "unsafe-test"); spillFilesCreated.clear(); taskContext = mock(TaskContext.class);
@After public void tearDown() { try { assertEquals(0L, taskMemoryManager.cleanUpAllAllocatedMemory()); } finally { Utils.deleteRecursively(tempDir); tempDir = null; } }
return new ObjectPair<Long, Integer>(-1L, -1); int executorMemoryInMB = Utils.memoryStringToMb( sparkConf.get("spark.executor.memory", "512m")); double memoryFraction = 1.0 - sparkConf.getDouble("spark.storage.memoryFraction", 0.6);
PrefixSpanModel<Integer> model = prefixSpan.run(sequences); File tempDir = Utils.createTempDir( System.getProperty("java.io.tmpdir"), "JavaPrefixSpanSuite"); String outputPath = tempDir.getPath(); Utils.deleteRecursively(tempDir);
/** * Release N bytes of execution memory for a MemoryConsumer. */ public void releaseExecutionMemory(long size, MemoryConsumer consumer) { logger.debug("Task {} release {} from {}", taskAttemptId, Utils.bytesToString(size), consumer); memoryManager.releaseExecutionMemory(size, taskAttemptId, consumer.getMode()); }
String oomStackTrace = Utils.exceptionString(oom); assertThat("expected OutOfMemoryError in " + "org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset",
boolean copyThrewException = true; try { lengths[i] = Utils.copyStream(in, out, false, transferToEnabled); copyThrewException = false; } finally {
final FileChannel spillInputChannel = spillInputChannels[i]; final long writeStartTime = System.nanoTime(); Utils.copyFileStreamNIO( spillInputChannel, mergedFileOutputChannel,
PrefixSpanModel<Integer> model = prefixSpan.run(sequences); File tempDir = Utils.createTempDir( System.getProperty("java.io.tmpdir"), "JavaPrefixSpanSuite"); String outputPath = tempDir.getPath(); Utils.deleteRecursively(tempDir);
/** * Release N bytes of execution memory for a MemoryConsumer. */ public void releaseExecutionMemory(long size, MemoryConsumer consumer) { logger.debug("Task {} release {} from {}", taskAttemptId, Utils.bytesToString(size), consumer); memoryManager.releaseExecutionMemory(size, taskAttemptId, consumer.getMode()); }
String oomStackTrace = Utils.exceptionString(oom); assertThat("expected OutOfMemoryError in " + "org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset",
boolean copyThrewException = true; try { lengths[i] = Utils.copyStream(in, out, false, transferToEnabled); copyThrewException = false; } finally {
final FileChannel spillInputChannel = spillInputChannels[i]; final long writeStartTime = System.nanoTime(); Utils.copyFileStreamNIO( spillInputChannel, mergedFileOutputChannel,
@Before public void setUp() { MockitoAnnotations.initMocks(this); tempDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "unsafe-test"); spillFilesCreated.clear(); taskContext = mock(TaskContext.class);
.run(rdd); File tempDir = Utils.createTempDir( System.getProperty("java.io.tmpdir"), "JavaFPGrowthSuite"); String outputPath = tempDir.getPath(); Utils.deleteRecursively(tempDir);
@Test public void sequenceFile() { File tempDir = Files.createTempDir(); tempDir.deleteOnExit(); String outputDir = new File(tempDir, "output").getAbsolutePath(); List<Tuple2<Integer, String>> pairs = Arrays.asList( new Tuple2<>(1, "a"), new Tuple2<>(2, "aa"), new Tuple2<>(3, "aaa") ); JavaPairRDD<Integer, String> rdd = sc.parallelizePairs(pairs); rdd.mapToPair(pair -> new Tuple2<>(new IntWritable(pair._1()), new Text(pair._2()))) .saveAsHadoopFile(outputDir, IntWritable.class, Text.class, SequenceFileOutputFormat.class); // Try reading the output back as an object file JavaPairRDD<Integer, String> readRDD = sc.sequenceFile(outputDir, IntWritable.class, Text.class) .mapToPair(pair -> new Tuple2<>(pair._1().get(), pair._2().toString())); Assert.assertEquals(pairs, readRDD.collect()); Utils.deleteRecursively(tempDir); }
/** * Release N bytes of execution memory for a MemoryConsumer. */ public void releaseExecutionMemory(long size, MemoryConsumer consumer) { logger.debug("Task {} release {} from {}", taskAttemptId, Utils.bytesToString(size), consumer); memoryManager.releaseExecutionMemory(size, taskAttemptId, consumer.getMode()); }
return new ObjectPair<Long, Integer>(-1L, -1); int executorMemoryInMB = Utils.memoryStringToMb( sparkConf.get("spark.executor.memory", "512m")); double memoryFraction = 1.0 - sparkConf.getDouble("spark.storage.memoryFraction", 0.6);
String oomStackTrace = Utils.exceptionString(oom); assertThat("expected OutOfMemoryError in " + "org.apache.spark.util.collection.unsafe.sort.UnsafeInMemorySorter.reset",
boolean copyThrewException = true; try { lengths[i] = Utils.copyStream(in, out, false, transferToEnabled); copyThrewException = false; } finally {