@Test public void spillFilesAreDeletedWhenStoppingAfterError() throws IOException { final UnsafeShuffleWriter<Object, Object> writer = createWriter(false); writer.insertRecordIntoSorter(new Tuple2<>(1, 1)); writer.insertRecordIntoSorter(new Tuple2<>(2, 2)); writer.forceSorterToSpill(); writer.insertRecordIntoSorter(new Tuple2<>(2, 2)); writer.stop(false); assertSpillFilesWereCleanedUp(); }
@Test public void spillFilesAreDeletedWhenStoppingAfterError() throws IOException { final UnsafeShuffleWriter<Object, Object> writer = createWriter(false); writer.insertRecordIntoSorter(new Tuple2<>(1, 1)); writer.insertRecordIntoSorter(new Tuple2<>(2, 2)); writer.forceSorterToSpill(); writer.insertRecordIntoSorter(new Tuple2<>(2, 2)); writer.stop(false); assertSpillFilesWereCleanedUp(); }
@Test public void spillFilesAreDeletedWhenStoppingAfterError() throws IOException { final UnsafeShuffleWriter<Object, Object> writer = createWriter(false); writer.insertRecordIntoSorter(new Tuple2<>(1, 1)); writer.insertRecordIntoSorter(new Tuple2<>(2, 2)); writer.forceSorterToSpill(); writer.insertRecordIntoSorter(new Tuple2<>(2, 2)); writer.stop(false); assertSpillFilesWereCleanedUp(); }
@Override public void write(scala.collection.Iterator<Product2<K, V>> records) throws IOException { // Keep track of success so we know if we encountered an exception // We do this rather than a standard try/catch/re-throw to handle // generic throwables. boolean success = false; try { while (records.hasNext()) { insertRecordIntoSorter(records.next()); } closeAndWriteOutput(); success = true; } finally { if (sorter != null) { try { sorter.cleanupResources(); } catch (Exception e) { // Only throw this error if we won't be masking another // error. if (success) { throw e; } else { logger.error("In addition to a failure during writing, we failed during " + "cleanup.", e); } } } } }
@Override public void write(scala.collection.Iterator<Product2<K, V>> records) throws IOException { // Keep track of success so we know if we encountered an exception // We do this rather than a standard try/catch/re-throw to handle // generic throwables. boolean success = false; try { while (records.hasNext()) { insertRecordIntoSorter(records.next()); } closeAndWriteOutput(); success = true; } finally { if (sorter != null) { try { sorter.cleanupResources(); } catch (Exception e) { // Only throw this error if we won't be masking another // error. if (success) { throw e; } else { logger.error("In addition to a failure during writing, we failed during " + "cleanup.", e); } } } } }
@Override public void write(scala.collection.Iterator<Product2<K, V>> records) throws IOException { // Keep track of success so we know if we encountered an exception // We do this rather than a standard try/catch/re-throw to handle // generic throwables. boolean success = false; try { while (records.hasNext()) { insertRecordIntoSorter(records.next()); } closeAndWriteOutput(); success = true; } finally { if (sorter != null) { try { sorter.cleanupResources(); } catch (Exception e) { // Only throw this error if we won't be masking another // error. if (success) { throw e; } else { logger.error("In addition to a failure during writing, we failed during " + "cleanup.", e); } } } } }
dataToWrite.add(new Tuple2<>(i, i)); writer.insertRecordIntoSorter(dataToWrite.get(0)); writer.insertRecordIntoSorter(dataToWrite.get(1)); writer.insertRecordIntoSorter(dataToWrite.get(2)); writer.insertRecordIntoSorter(dataToWrite.get(3)); writer.forceSorterToSpill(); writer.insertRecordIntoSorter(dataToWrite.get(4)); writer.insertRecordIntoSorter(dataToWrite.get(5)); writer.closeAndWriteOutput(); final Option<MapStatus> mapStatus = writer.stop(true);
dataToWrite.add(new Tuple2<>(i, i)); writer.insertRecordIntoSorter(dataToWrite.get(0)); writer.insertRecordIntoSorter(dataToWrite.get(1)); writer.insertRecordIntoSorter(dataToWrite.get(2)); writer.insertRecordIntoSorter(dataToWrite.get(3)); writer.forceSorterToSpill(); writer.insertRecordIntoSorter(dataToWrite.get(4)); writer.insertRecordIntoSorter(dataToWrite.get(5)); writer.closeAndWriteOutput(); final Option<MapStatus> mapStatus = writer.stop(true);
dataToWrite.add(new Tuple2<>(i, i)); writer.insertRecordIntoSorter(dataToWrite.get(0)); writer.insertRecordIntoSorter(dataToWrite.get(1)); writer.insertRecordIntoSorter(dataToWrite.get(2)); writer.insertRecordIntoSorter(dataToWrite.get(3)); writer.forceSorterToSpill(); writer.insertRecordIntoSorter(dataToWrite.get(4)); writer.insertRecordIntoSorter(dataToWrite.get(5)); writer.closeAndWriteOutput(); final Option<MapStatus> mapStatus = writer.stop(true);
try { for (int i = 0; i < numRecordsPerPage * 10; i++) { writer.insertRecordIntoSorter(new Tuple2<Object, Object>(1, 1)); newPeakMemory = writer.getPeakMemoryUsedBytes(); if (i % numRecordsPerPage == 0) { assertEquals(previousPeakMemory, newPeakMemory); for (int i = 0; i < numRecordsPerPage; i++) { writer.insertRecordIntoSorter(new Tuple2<Object, Object>(1, 1));
try { for (int i = 0; i < numRecordsPerPage * 10; i++) { writer.insertRecordIntoSorter(new Tuple2<Object, Object>(1, 1)); newPeakMemory = writer.getPeakMemoryUsedBytes(); if (i % numRecordsPerPage == 0) { assertEquals(previousPeakMemory, newPeakMemory); for (int i = 0; i < numRecordsPerPage; i++) { writer.insertRecordIntoSorter(new Tuple2<Object, Object>(1, 1));
try { for (int i = 0; i < numRecordsPerPage * 10; i++) { writer.insertRecordIntoSorter(new Tuple2<Object, Object>(1, 1)); newPeakMemory = writer.getPeakMemoryUsedBytes(); if (i % numRecordsPerPage == 0) { assertEquals(previousPeakMemory, newPeakMemory); for (int i = 0; i < numRecordsPerPage; i++) { writer.insertRecordIntoSorter(new Tuple2<Object, Object>(1, 1));