@Override public void close(TaskAttemptContext context) throws IOException, InterruptedException { for (RecordWriter writer : writers) { writer.close(context); } } }
public void close(TaskAttemptContext context) throws IOException, InterruptedException { writer.close(context); } }
@Override public void close(final Reporter reporter) throws IOException { try { realWriter.close(taskContext); } catch (final InterruptedException e) { throw new IOException(e); } }
@Override public void close(final Reporter reporter) throws IOException { try { realWriter.close(taskContext); } catch (final InterruptedException e) { throw new IOException(e); } }
/** * Closes all the opened outputs. * * This should be called from cleanup method of map/reduce task. * If overridden subclasses must invoke <code>super.close()</code> at the * end of their <code>close()</code> * */ @SuppressWarnings("unchecked") public void close() throws IOException, InterruptedException { for (RecordWriter writer : recordWriters.values()) { writer.close(context); } } }
/** * Closes writer. * * @throws Exception If fails and logger hasn't been specified. */ protected void closeWriter() throws Exception { RecordWriter writer = hadoopCtx.writer(); if (writer != null) writer.close(hadoopCtx); }
@Override public void close(TaskAttemptContext context) throws IOException, InterruptedException { for (Entry<String, BaseRecordWriterContainer> entry : baseRecordWriters.entrySet()) { BaseRecordWriterContainer baseRWContainer = entry.getValue(); LOGGER.info("Closing record writer for alias: " + entry.getKey()); baseRWContainer.getRecordWriter().close(baseRWContainer.getContext()); } }
/** * commit the task by moving the output file out from the temporary directory. * @throws java.io.IOException */ @Override public void close() throws IOException { // enforce sequential close() calls synchronized (CLOSE_MUTEX) { try { this.recordWriter.close(this.context); } catch (InterruptedException e) { throw new IOException("Could not close RecordReader.", e); } if (this.outputCommitter.needsTaskCommit(this.context)) { this.outputCommitter.commitTask(this.context); } Path outputPath = new Path(this.configuration.get("mapred.output.dir")); // rename tmp-file to final name FileSystem fs = FileSystem.get(outputPath.toUri(), this.configuration); String taskNumberStr = Integer.toString(this.taskNumber); String tmpFileTemplate = "tmp-r-00000"; String tmpFile = tmpFileTemplate.substring(0, 11 - taskNumberStr.length()) + taskNumberStr; if (fs.exists(new Path(outputPath.toString() + "/" + tmpFile))) { fs.rename(new Path(outputPath.toString() + "/" + tmpFile), new Path(outputPath.toString() + "/" + taskNumberStr)); } } }
@Override public void close(boolean abort) throws IOException { try { fileWriter.close(null); if (abort) { return;
@Test public void testCloseWithNeedsTaskCommitFalse() throws Exception { RecordWriter<String, Long> recordWriter = Mockito.mock(DummyRecordWriter.class); OutputCommitter outputCommitter = setupOutputCommitter(false); HadoopOutputFormat<String, Long> hadoopOutputFormat = setupHadoopOutputFormat(new DummyOutputFormat(), Job.getInstance(), recordWriter, outputCommitter, new Configuration()); hadoopOutputFormat.close(); verify(outputCommitter, times(0)).commitTask(nullable(TaskAttemptContext.class)); verify(recordWriter, times(1)).close(nullable(TaskAttemptContext.class)); }
@Test public void testCloseWithNeedsTaskCommitTrue() throws Exception { RecordWriter<String, Long> recordWriter = Mockito.mock(DummyRecordWriter.class); OutputCommitter outputCommitter = setupOutputCommitter(true); HadoopOutputFormat<String, Long> hadoopOutputFormat = setupHadoopOutputFormat(new DummyOutputFormat(), Job.getInstance(), recordWriter, outputCommitter, new Configuration()); hadoopOutputFormat.close(); verify(outputCommitter, times(1)).commitTask(nullable(TaskAttemptContext.class)); verify(recordWriter, times(1)).close(nullable(TaskAttemptContext.class)); }
private Map<Integer, Emp> addTestData() throws IOException, InterruptedException { int days = 2000; int sal = 20; RecordWriter<Object, Emp> writer = outputFormat.getRecordWriter(fakeTaskAttemptContext); Map<Integer, Emp> inputMap = new HashMap<>(); for (int i = 0; i < 10; i++) { String name = "name " + i; Emp e = new Emp(i, name, days + i, sal + i); writer.write(null, e); inputMap.put(i, e); } writer.close(fakeTaskAttemptContext); return inputMap; }
KeyValue kv = new KeyValue(b, b, b, HConstants.LATEST_TIMESTAMP, b, tags); writer.write(new ImmutableBytesWritable(), kv); writer.close(context); writer = null; FileSystem fs = dir.getFileSystem(conf); if (writer != null && context != null) writer.close(context); dir.getFileSystem(conf).delete(dir, true);
writer.close(context);
writer.write(null, rec); writer.close(cntxt); if (committer.needsTaskCommit(cntxt)) { committer.commitTask(cntxt);
@Override public void close(TaskAttemptContext context) throws IOException, InterruptedException { for (String w : dataWriters.keySet()) { dataWriters.get(w).close(context); } errorWriter.close(); }
assertTrue(original.equals(kv)); } finally { if (writer != null && context != null) writer.close(context); dir.getFileSystem(conf).delete(dir, true);
private void writeMapFileOutput(RecordWriter theRecordWriter, TaskAttemptContext context) throws IOException, InterruptedException { try { int key = 0; for (int i = 0 ; i < 10; ++i) { key = i; Text val = (i%2 == 1) ? val1 : val2; theRecordWriter.write(new LongWritable(key), val); } } finally { theRecordWriter.close(context); } }
private void writeOutput(RecordWriter theRecordWriter, TaskAttemptContext context) throws IOException, InterruptedException { NullWritable nullWritable = NullWritable.get(); try { theRecordWriter.write(key1, val1); theRecordWriter.write(null, nullWritable); theRecordWriter.write(null, val1); theRecordWriter.write(nullWritable, val2); theRecordWriter.write(key2, nullWritable); theRecordWriter.write(key1, null); theRecordWriter.write(null, null); theRecordWriter.write(key2, val2); } finally { theRecordWriter.close(context); } }