@Override public void write(Object key, Object value) throws IOException, InterruptedException { for (RecordWriter writer : writers) { writer.write(key, value); } }
@Override public void writeRecord(Tuple2<K, V> record) throws IOException { try { this.recordWriter.write(record.f0, record.f1); } catch (InterruptedException e) { throw new IOException("Could not write Record.", e); } } }
@Override public void write(final NullWritable key, final ParquetHiveRecord value) throws IOException { try { realWriter.write(key, value); } catch (final InterruptedException e) { throw new IOException(e); } }
@Override public void write(final NullWritable key, final ParquetHiveRecord value) throws IOException { try { realWriter.write(key, value); } catch (final InterruptedException e) { throw new IOException(e); } }
@Override public void write(Writable key, Writable value) throws IOException, InterruptedException { Text _key = (Text) key; KeyValue _value = (KeyValue) value; String alias = new String(_key.getBytes(), 0, _key.getLength()); BaseRecordWriterContainer baseRWContainer = baseRecordWriters.get(alias); if (baseRWContainer == null) { throw new IllegalArgumentException("OutputFormat with alias " + alias + " has not been added"); } baseRWContainer.getRecordWriter().write(_value.getKey(), _value.getValue()); }
@Override public void putNext(Tuple t) throws IOException { pigTuple.setTuple(t); if (trace) { log.trace("Writing out tuple " + t); } try { writer.write(null, pigTuple); } catch (InterruptedException ex) { throw new EsHadoopIllegalArgumentException("interrupted", ex); } }
/** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public void write(Object key, Object val) throws IOException, InterruptedException { if (cancelled) throw new HadoopTaskCancelledException("Task cancelled."); if (writer != null) writer.write(key, val); else { try { output.write(key, val); } catch (IgniteCheckedException e) { throw new IOException(e); } } }
valBytes); try { fileWriter.write(null, kv); } catch (IOException e) { LOG.error("Failed while writing row: " + s);
@SuppressWarnings({"unchecked"}) public void write(Object key, Object value) throws IOException, InterruptedException { context.getCounter(COUNTERS_GROUP, counterName).increment(1); writer.write(key, value); }
private void writePut(PutWritable put) throws IOException { ImmutableBytesWritable row = new ImmutableBytesWritable(put.getPut().getRow()); SortedMap<byte[], List<Cell>> cells = put.getPut().getFamilyCellMap(); for (Map.Entry<byte[], List<Cell>> entry : cells.entrySet()) { Collections.sort(entry.getValue(), new CellComparatorImpl()); for (Cell c : entry.getValue()) { try { fileWriter.write(row, KeyValueUtil.copyToNewKeyValue(c)); } catch (InterruptedException e) { throw (InterruptedIOException) new InterruptedIOException().initCause(e); } } } }
@Override public void putNext(Tuple tuple) throws IOException { List<Object> outgoing = new ArrayList<Object>(tuple.size()); int i = 0; for (HCatFieldSchema fSchema : computedSchema.getFields()) { outgoing.add(getJavaObj(tuple.get(i++), fSchema)); } try { writer.write(null, new DefaultHCatRecord(outgoing)); } catch (InterruptedException e) { throw new BackendException("Error while writing tuple: " + tuple, PigHCatUtil.PIG_EXCEPTION_CODE, e); } }
/** * Write key and value to baseOutputPath using the namedOutput. * * @param namedOutput the named output name * @param key the key * @param value the value * @param baseOutputPath base-output path to write the record to. * Note: Framework will generate unique filename for the baseOutputPath */ @SuppressWarnings("unchecked") public void write(String namedOutput, Object key, Object value, String baseOutputPath) throws IOException, InterruptedException { checkNamedOutputName(context, namedOutput, false); checkBaseOutputPath(baseOutputPath); if (!namedOutputs.contains(namedOutput)) { throw new IllegalArgumentException("Undefined named output '" + namedOutput + "'"); } TaskAttemptContext taskContext = getContext(namedOutput); getRecordWriter(taskContext, baseOutputPath).write(key, value); }
@Test public void testWriteRecord() throws Exception { RecordWriter<String, Long> recordWriter = mock(DummyRecordWriter.class); HadoopOutputFormat<String, Long> hadoopOutputFormat = setupHadoopOutputFormat(new DummyOutputFormat(), Job.getInstance(), recordWriter, null, new Configuration()); hadoopOutputFormat.writeRecord(new Tuple2<String, Long>()); verify(recordWriter, times(1)).write(nullable(String.class), nullable(Long.class)); }
/** * Write key value to an output file name. * * Gets the record writer from job's output format. Job's output format should * be a FileOutputFormat. * * @param key the key * @param value the value * @param keySchema keySchema to use * @param valSchema ValueSchema to use * @param baseOutputPath base-output path to write the record to. Note: Framework will * generate unique filename for the baseOutputPath */ @SuppressWarnings("unchecked") public void write(Object key, Object value, Schema keySchema, Schema valSchema, String baseOutputPath) throws IOException, InterruptedException { checkBaseOutputPath(baseOutputPath); Job job = new Job(context.getConfiguration()); setSchema(job, keySchema, valSchema); TaskAttemptContext taskContext = createTaskAttemptContext(job.getConfiguration(), context.getTaskAttemptID()); getRecordWriter(taskContext, baseOutputPath).write(key, value); }
private Map<Integer, Emp> addTestData() throws IOException, InterruptedException { int days = 2000; int sal = 20; RecordWriter<Object, Emp> writer = outputFormat.getRecordWriter(fakeTaskAttemptContext); Map<Integer, Emp> inputMap = new HashMap<>(); for (int i = 0; i < 10; i++) { String name = "name " + i; Emp e = new Emp(i, name, days + i, sal + i); writer.write(null, e); inputMap.put(i, e); } writer.close(fakeTaskAttemptContext); return inputMap; }
/** * Write random values to the writer assuming a table created using * {@link #FAMILIES} as column family descriptors */ private void writeRandomKeyValues(RecordWriter<ImmutableBytesWritable, Cell> writer, TaskAttemptContext context, Set<byte[]> families, int numRows) throws IOException, InterruptedException { byte keyBytes[] = new byte[Bytes.SIZEOF_INT]; int valLength = 10; byte valBytes[] = new byte[valLength]; int taskId = context.getTaskAttemptID().getTaskID().getId(); assert taskId < Byte.MAX_VALUE : "Unit tests dont support > 127 tasks!"; final byte [] qualifier = Bytes.toBytes("data"); Random random = new Random(); for (int i = 0; i < numRows; i++) { Bytes.putInt(keyBytes, 0, i); random.nextBytes(valBytes); ImmutableBytesWritable key = new ImmutableBytesWritable(keyBytes); for (byte[] family : families) { Cell kv = new KeyValue(keyBytes, family, qualifier, valBytes); writer.write(key, kv); } } }
tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(978670))); KeyValue kv = new KeyValue(b, b, b, HConstants.LATEST_TIMESTAMP, b, tags); writer.write(new ImmutableBytesWritable(), kv); writer.close(context); writer = null;
while (recordItr.hasNext()) { HCatRecord rec = recordItr.next(); writer.write(null, rec);