@Override public Object set(final Object o, final String string) { return new BytesWritable( string == null ? null : string.getBytes(StandardCharsets.UTF_8)); }
public BytesWritable toBytesWritable() { return new BytesWritable(toBytes()); }
@Override public void configure(JobConf job) { StringBuilder builder = new StringBuilder(); int size = job.getInt("value.size", -1); for(int i = 0; i < size; i++) builder.append('a'); this.value = new BytesWritable(builder.toString().getBytes()); } }
@Override public Object terminate(AggregationBuffer agg) throws HiveException { HyperLogLog hll = ((HyperLogLogBuffer)agg).hll; output.reset(); try { HyperLogLogUtils.serializeHLL(output, hll); } catch(IOException ioe) { throw new HiveException(ioe); } return new BytesWritable(output.toByteArray()); } }
public ExportSnapshotInputSplit(final List<Pair<SnapshotFileInfo, Long>> snapshotFiles) { this.files = new ArrayList(snapshotFiles.size()); for (Pair<SnapshotFileInfo, Long> fileInfo: snapshotFiles) { this.files.add(new Pair<>( new BytesWritable(fileInfo.getFirst().toByteArray()), fileInfo.getSecond())); this.length += fileInfo.getSecond(); } }
@Override protected void innerMap( InputRow inputRow, Context context ) throws IOException, InterruptedException { final List<Object> groupKey = Rows.toGroupKey( rollupGranularity.bucketStart(inputRow.getTimestamp()).getMillis(), inputRow ); context.write( new BytesWritable(HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsBytes(groupKey)), NullWritable.get() ); context.getCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_PROCESSED_COUNTER).increment(1); } }
@Override public void run(Context context) throws IOException, InterruptedException { setup(context); while (context.nextKeyValue()) { map(context.getCurrentKey(), context.getCurrentValue(), context); } for (Map.Entry<Interval, HyperLogLogCollector> entry : hyperLogLogs.entrySet()) { context.write( new LongWritable(entry.getKey().getStartMillis()), new BytesWritable(entry.getValue().toByteArray()) ); } cleanup(context); }
@Override public BytesWritable getCurrentKey() throws IOException, InterruptedException { byte[] bytes = new byte[ROWKEY_LENGTH]; rand.nextBytes(bytes); return new BytesWritable(bytes); }
private static BytesWritable bytes(int... items) { BytesWritable result = new BytesWritable(); result.setSize(items.length); for(int i=0; i < items.length; ++i) { result.getBytes()[i] = (byte) items[i]; } return result; }
@Override public Object encode(byte[] record) throws SerializationError { try { BytesWritable blob = new BytesWritable(); blob.set(record, 0, record.length); return serde.deserialize(blob); } catch (SerDeException e) { throw new SerializationError("Unable to convert byte[] record into Object", e); } }
private void flushIndexToContextAndClose(BytesWritable key, IncrementalIndex index, Context context) throws IOException, InterruptedException { final List<String> dimensions = index.getDimensionNames(); Iterator<Row> rows = index.iterator(); while (rows.hasNext()) { context.progress(); Row row = rows.next(); InputRow inputRow = getInputRowFromRow(row, dimensions); // reportParseExceptions is true as any unparseable data is already handled by the mapper. InputRowSerde.SerializeResult serializeResult = InputRowSerde.toBytes(typeHelperMap, inputRow, combiningAggs); context.write( key, new BytesWritable(serializeResult.getSerializedRow()) ); } index.close(); }
private void runAndVerifyBin(byte[] binV, String expResult, UDFMd5 udf) throws HiveException { BytesWritable binWr = binV != null ? new BytesWritable(binV) : null; Text output = (Text) udf.evaluate(binWr); assertEquals("md5() test ", expResult, output != null ? output.toString() : null); } }
private void runAndVerifyBin(byte[] binV, String expResult, UDFSha1 udf) throws HiveException { BytesWritable binWr = binV != null ? new BytesWritable(binV) : null; Text output = (Text) udf.evaluate(binWr); assertEquals("sha1() test ", expResult, output != null ? output.toString() : null); } }
private void runAndVerifyStr(String strBase64, Text keyWr, String expResult, GenericUDFAesDecrypt udf) throws HiveException { DeferredObject valueObj0 = new DeferredJavaObject( strBase64 != null ? new BytesWritable(Base64.decodeBase64(strBase64)) : null); DeferredObject valueObj1 = new DeferredJavaObject(keyWr); DeferredObject[] args = { valueObj0, valueObj1 }; BytesWritable output = (BytesWritable) udf.evaluate(args); String expResultHex = expResult == null ? null : Hex.encodeHexString(expResult.getBytes()); assertEquals("aes_decrypt() test ", expResultHex, output != null ? copyBytesAndHex(output) : null); }
private void runAndVerifyBin(String strBase64, BytesWritable keyWr, String expResult, GenericUDFAesDecrypt udf) throws HiveException { DeferredObject valueObj0 = new DeferredJavaObject( strBase64 != null ? new BytesWritable(Base64.decodeBase64(strBase64)) : null); DeferredObject valueObj1 = new DeferredJavaObject(keyWr); DeferredObject[] args = { valueObj0, valueObj1 }; BytesWritable output = (BytesWritable) udf.evaluate(args); String expResultHex = expResult == null ? null : Hex.encodeHexString(expResult.getBytes()); assertEquals("aes_decrypt() test ", expResultHex, output != null ? copyBytesAndHex(output) : null); }
public void testHexConversion(){ byte[] bytes = "string".getBytes(); // Let's make sure we only read the relevant part of the writable in case of reuse byte[] longBytes = "longer string".getBytes(); BytesWritable writable = new BytesWritable(longBytes); writable.set(bytes, 0, bytes.length); UDFHex udf = new UDFHex(); Text text = udf.evaluate(writable); String hexString = text.toString(); assertEquals("737472696E67", hexString); } }
private Writable getWritableValue(TypeInfo ti, byte[] value) { if (ti.equals(TypeInfoFactory.stringTypeInfo)) { return new Text(value); } else if (ti.equals(TypeInfoFactory.varcharTypeInfo)) { return new HiveVarcharWritable( new HiveVarchar(new Text(value).toString(), -1)); } else if (ti.equals(TypeInfoFactory.binaryTypeInfo)) { return new BytesWritable(value); } return null; }
private void runAndVerifyBin(byte[] binV, Long expResult, UDFCrc32 udf) throws HiveException { BytesWritable binWr = binV != null ? new BytesWritable(binV) : null; LongWritable output = (LongWritable) udf.evaluate(binWr); if (expResult == null) { assertNull(output); } else { assertNotNull(output); assertEquals("crc32() test ", expResult.longValue(), output.get()); } } }
@Test public void testReuse() { BytesWritable value = new BytesWritable(); byte[] first = "hello world".getBytes(UTF_8); value.set(first, 0, first.length); byte[] second = "bye".getBytes(UTF_8); value.set(second, 0, second.length); Type type = new TypeToken<Map<BytesWritable, Long>>() {}.getType(); ObjectInspector inspector = getInspector(type); Block actual = getBlockObject(mapType(createUnboundedVarcharType(), BIGINT), ImmutableMap.of(value, 0L), inspector); Block expected = mapBlockOf(createUnboundedVarcharType(), BIGINT, "bye", 0L); assertBlockEquals(actual, expected); }
BigRow(byte[] val, long rowId, long origTxn, int bucket) { field = new BytesWritable(val); bucket = BucketCodec.V1.encode(new AcidOutputFormat.Options(null).bucket(bucket)); this.rowId = new RecordIdentifier(origTxn, bucket, rowId); }