Refine search
@Test public void testWriteRecord() throws Exception { RecordWriter<String, Long> recordWriter = mock(DummyRecordWriter.class); HadoopOutputFormat<String, Long> hadoopOutputFormat = setupHadoopOutputFormat(new DummyOutputFormat(), Job.getInstance(), recordWriter, null, new Configuration()); hadoopOutputFormat.writeRecord(new Tuple2<String, Long>()); verify(recordWriter, times(1)).write(nullable(String.class), nullable(Long.class)); }
/** * commit the task by moving the output file out from the temporary directory. * @throws java.io.IOException */ @Override public void close() throws IOException { // enforce sequential close() calls synchronized (CLOSE_MUTEX) { try { this.recordWriter.close(this.context); } catch (InterruptedException e) { throw new IOException("Could not close RecordReader.", e); } if (this.outputCommitter.needsTaskCommit(this.context)) { this.outputCommitter.commitTask(this.context); } Path outputPath = new Path(this.configuration.get("mapred.output.dir")); // rename tmp-file to final name FileSystem fs = FileSystem.get(outputPath.toUri(), this.configuration); String taskNumberStr = Integer.toString(this.taskNumber); String tmpFileTemplate = "tmp-r-00000"; String tmpFile = tmpFileTemplate.substring(0, 11 - taskNumberStr.length()) + taskNumberStr; if (fs.exists(new Path(outputPath.toString() + "/" + tmpFile))) { fs.rename(new Path(outputPath.toString() + "/" + tmpFile), new Path(outputPath.toString() + "/" + taskNumberStr)); } } }
private Map<Integer, Emp> addTestData() throws IOException, InterruptedException { int days = 2000; int sal = 20; RecordWriter<Object, Emp> writer = outputFormat.getRecordWriter(fakeTaskAttemptContext); Map<Integer, Emp> inputMap = new HashMap<>(); for (int i = 0; i < 10; i++) { String name = "name " + i; Emp e = new Emp(i, name, days + i, sal + i); writer.write(null, e); inputMap.put(i, e); } writer.close(fakeTaskAttemptContext); return inputMap; }
Configuration conf = new Configuration(this.util.getConfiguration()); RecordWriter<ImmutableBytesWritable, Cell> writer = null; TaskAttemptContext context = null; conf.set("io.seqfile.compression.type", "NONE"); conf.set("hbase.fs.tmp.dir", dir.toString()); conf.setBoolean(HFileOutputFormat2.LOCALITY_SENSITIVE_CONF_KEY, false); writer.close(context); FileSystem fs = dir.getFileSystem(conf); assertEquals(htd.getFamilies().size(), families.length); for (FileStatus f : families) { String familyStr = f.getPath().getName(); HColumnDescriptor hcd = htd.getFamily(Bytes.toBytes(familyStr));
public void test_WritingTagData() throws Exception { Configuration conf = new Configuration(this.util.getConfiguration()); final String HFILE_FORMAT_VERSION_CONF_KEY = "hfile.format.version"; conf.setInt(HFILE_FORMAT_VERSION_CONF_KEY, HFile.MIN_FORMAT_VERSION_WITH_TAGS); RecordWriter<ImmutableBytesWritable, Cell> writer = null; TaskAttemptContext context = null; util.getDataTestDir("WritingTagData"); try { conf.set(HFileOutputFormat2.OUTPUT_TABLE_NAME_CONF_KEY, TABLE_NAMES[0].getNameAsString()); conf.setBoolean(HFileOutputFormat2.LOCALITY_SENSITIVE_CONF_KEY, false); tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(978670))); KeyValue kv = new KeyValue(b, b, b, HConstants.LATEST_TIMESTAMP, b, tags); writer.write(new ImmutableBytesWritable(), kv); writer.close(context); writer = null; FileSystem fs = dir.getFileSystem(conf); RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(dir, true); while(iterator.hasNext()) { if (writer != null && context != null) writer.close(context); dir.getFileSystem(conf).delete(dir, true);
@Override public void close(boolean abort) throws IOException { try { fileWriter.close(null); if (abort) { return; FileSystem fs = outputdir.getFileSystem(jc); fs.mkdirs(columnFamilyPath); Path srcDir = taskAttemptOutputdir; if (srcDir.getName().equals(columnFamilyName)) { break; fs.rename( regionFile.getPath(), new Path( columnFamilyPath, regionFile.getPath().getName()));
@Ignore("Goes zombie too frequently; needs work. See HBASE-14563") @Test public void test_TIMERANGE() throws Exception { Configuration conf = new Configuration(this.util.getConfiguration()); RecordWriter<ImmutableBytesWritable, Cell> writer = null; TaskAttemptContext context = null; writer.write(new ImmutableBytesWritable(), kv); assertEquals(original,kv); writer.write(new ImmutableBytesWritable(), kv); assertEquals(original, kv); writer.close(context); Path attemptDirectory = hof.getDefaultWorkFile(context, "").getParent(); FileStatus[] sub1 = fs.listStatus(attemptDirectory); FileStatus[] file = fs.listStatus(sub1[0].getPath()); rd.close(); } finally { if (writer != null && context != null) writer.close(context); dir.getFileSystem(conf).delete(dir, true);
@Test public void testCloseWithNeedsTaskCommitTrue() throws Exception { RecordWriter<String, Long> recordWriter = Mockito.mock(DummyRecordWriter.class); OutputCommitter outputCommitter = setupOutputCommitter(true); HadoopOutputFormat<String, Long> hadoopOutputFormat = setupHadoopOutputFormat(new DummyOutputFormat(), Job.getInstance(), recordWriter, outputCommitter, new Configuration()); hadoopOutputFormat.close(); verify(outputCommitter, times(1)).commitTask(nullable(TaskAttemptContext.class)); verify(recordWriter, times(1)).close(nullable(TaskAttemptContext.class)); }
@Override public void write(Object key, Object value) throws IOException, InterruptedException { for (RecordWriter writer : writers) { writer.write(key, value); } }
@Override public void close(TaskAttemptContext context) throws IOException, InterruptedException { for (RecordWriter writer : writers) { writer.close(context); } } }
public void test_LATEST_TIMESTAMP_isReplaced() throws Exception { Configuration conf = new Configuration(this.util.getConfiguration()); RecordWriter<ImmutableBytesWritable, Cell> writer = null; TaskAttemptContext context = null; writer.write(new ImmutableBytesWritable(), kv); assertFalse(original.equals(kv)); assertTrue(Bytes.equals(CellUtil.cloneRow(original), CellUtil.cloneRow(kv))); writer.write(new ImmutableBytesWritable(), kv); assertTrue(original.equals(kv)); } finally { if (writer != null && context != null) writer.close(context); dir.getFileSystem(conf).delete(dir, true);
@Test public void testCloseWithNeedsTaskCommitFalse() throws Exception { RecordWriter<String, Long> recordWriter = Mockito.mock(DummyRecordWriter.class); OutputCommitter outputCommitter = setupOutputCommitter(false); HadoopOutputFormat<String, Long> hadoopOutputFormat = setupHadoopOutputFormat(new DummyOutputFormat(), Job.getInstance(), recordWriter, outputCommitter, new Configuration()); hadoopOutputFormat.close(); verify(outputCommitter, times(0)).commitTask(nullable(TaskAttemptContext.class)); verify(recordWriter, times(1)).close(nullable(TaskAttemptContext.class)); }
@Override public void writeRecord(Tuple2<K, V> record) throws IOException { try { this.recordWriter.write(record.f0, record.f1); } catch (InterruptedException e) { throw new IOException("Could not write Record.", e); } } }
public void close(TaskAttemptContext context) throws IOException, InterruptedException { writer.close(context); } }
@Override public void write(final NullWritable key, final ParquetHiveRecord value) throws IOException { try { realWriter.write(key, value); } catch (final InterruptedException e) { throw new IOException(e); } }
@Override public void close(final Reporter reporter) throws IOException { try { realWriter.close(taskContext); } catch (final InterruptedException e) { throw new IOException(e); } }
private void writeOutput(RecordWriter theRecordWriter, TaskAttemptContext context) throws IOException, InterruptedException { NullWritable nullWritable = NullWritable.get(); try { theRecordWriter.write(key1, val1); theRecordWriter.write(null, nullWritable); theRecordWriter.write(null, val1); theRecordWriter.write(nullWritable, val2); theRecordWriter.write(key2, nullWritable); theRecordWriter.write(key1, null); theRecordWriter.write(null, null); theRecordWriter.write(key2, val2); } finally { theRecordWriter.close(context); } }
@Override public void write(final NullWritable key, final ParquetHiveRecord value) throws IOException { try { realWriter.write(key, value); } catch (final InterruptedException e) { throw new IOException(e); } }
@Override public void close(final Reporter reporter) throws IOException { try { realWriter.close(taskContext); } catch (final InterruptedException e) { throw new IOException(e); } }