public BytesWritable evaluate(BytesWritable bw, IntWritable pos, IntWritable len) { if ((bw == null) || (pos == null) || (len == null)) { return null; } if ((len.get() <= 0)) { return new BytesWritable(); } int[] index = makeIndex(pos.get(), len.get(), bw.getLength()); if (index == null) { return new BytesWritable(); } return new BytesWritable(Arrays.copyOfRange(bw.getBytes(), index[0], index[1])); }
public static BytesWritable getBinaryFromText(Text text) { BytesWritable bw = new BytesWritable(); bw.set(text.getBytes(), 0, text.getLength()); return bw; }
private static BytesWritable bytes(int... items) { BytesWritable result = new BytesWritable(); result.setSize(items.length); for(int i=0; i < items.length; ++i) { result.getBytes()[i] = (byte) items[i]; } return result; }
public static BytesWritable copyBytesWritable(BytesWritable bw) { BytesWritable copy = new BytesWritable(); copy.set(bw); return copy; }
@Override public BytesWritable copyObject(Object o) { if (null == o) { return null; } BytesWritable incoming = (BytesWritable)o; byte[] bytes = new byte[incoming.getLength()]; System.arraycopy(incoming.getBytes(),0, bytes, 0, incoming.getLength()); return new BytesWritable(bytes); }
/** * gets a byte[] with copy of data from source BytesWritable * @param sourceBw - source BytesWritable */ public static byte[] createByteArray(BytesWritable sourceBw){ //TODO should replace with BytesWritable.copyData() once Hive //removes support for the Hadoop 0.20 series. return Arrays.copyOf(sourceBw.getBytes(), sourceBw.getLength()); }
return new ShortWritable((short) 0); case INT: return new IntWritable(0); case LONG: return new LongWritable(0); case TIMESTAMP: return new TimestampWritable(new Timestamp(0)); case DATE: return new DateWritable(new Date(0)); case FLOAT: return new FloatWritable(0); case DOUBLE: return new DoubleWritable(0); case BINARY: return new BytesWritable(ArrayUtils.EMPTY_BYTE_ARRAY); case STRING: return new Text(ArrayUtils.EMPTY_BYTE_ARRAY); case VARCHAR: return new HiveVarcharWritable(new HiveVarchar(StringUtils.EMPTY, -1)); case CHAR: return new HiveCharWritable(new HiveChar(StringUtils.EMPTY, -1)); case DECIMAL: return new HiveDecimalWritable();
case BOOLEAN: ((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = (((BooleanWritable) object).get() ? 1 : 0); break; case BYTE: case INT: ((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = ((IntWritable) object).get(); break; case LONG: ((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = ((LongWritable) object).get(); break; case TIMESTAMP: batchIndex, bw.getBytes(), 0, bw.getLength()); batchIndex, tw.getBytes(), 0, tw.getLength()); hiveChar = (HiveChar) object; } else { hiveChar = ((HiveCharWritable) object).getHiveChar();
writable = new BooleanWritable(); ((BooleanWritable) writable).set(deserializeRead.currentBoolean); writable = new IntWritable(); ((IntWritable) writable).set(deserializeRead.currentInt); writable = new LongWritable(); ((LongWritable) writable).set(deserializeRead.currentLong); writable = new FloatWritable(); ((FloatWritable) writable).set(deserializeRead.currentFloat); writable = new BytesWritable(); ((BytesWritable) writable).set( deserializeRead.currentBytes, deserializeRead.currentBytesStart, writable = new Text(); writable = new HiveCharWritable(); field.getMaxLength());
TestCase.fail("Boolean expected writable not Boolean"); boolean expected = ((BooleanWritable) object).get(); if (value != expected) { TestCase.fail("Boolean field mismatch (expected " + expected + " found " + value + ")"); TestCase.fail("Integer expected writable not Integer"); int expected = ((IntWritable) object).get(); if (value != expected) { TestCase.fail("Int field mismatch (expected " + expected + " found " + value + ")"); TestCase.fail("Long expected writable not Long"); Long expected = ((LongWritable) object).get(); if (value != expected) { TestCase.fail("Long field mismatch (expected " + expected + " found " + value + ")"); TestCase.fail("Float expected writable not Float"); deserializeRead.currentBytesStart + deserializeRead.currentBytesLength); BytesWritable bytesWritable = (BytesWritable) object; byte[] expected = Arrays.copyOfRange(bytesWritable.getBytes(), 0, bytesWritable.getLength()); if (byteArray.length != expected.length){ TestCase.fail("Byte Array field mismatch (expected " + Arrays.toString(expected)
partialResult[0] = new Text(); partialResult[1] = new LongWritable(0); partialResult[2] = new LongWritable(0); partialResult[3] = new LongWritable(0); partialResult[4] = new LongWritable(0); partialResult[5] = new BytesWritable(); result[0] = new Text(); result[1] = new LongWritable(0); result[2] = new DoubleWritable(0); result[3] = new LongWritable(0); result[4] = new LongWritable(0); result[5] = new BytesWritable();
void mergePersons(String key, Iterable<BytesWritable> values) throws IOException, InterruptedException { List<PersonProtos.PersonWrapper> pwList = new ArrayList<>(); values.forEach((bw) -> { try { pwList.add(PersonProtos.PersonWrapper.parseFrom(bw.copyBytes())); } catch (InvalidProtocolBufferException ex) { Logger.getLogger(HBaseToProtosReducer.class.getName()).log(Level.SEVERE, null, ex); } }); PersonProtos.PersonWrapper merged = personMerger.merge(pwList); mos.write(new Text(key), new BytesWritable(merged.toByteArray()), Type.PERSON.name()+"/"); }
private Writable getWritableValue(TypeInfo ti, byte[] value) { if (ti.equals(TypeInfoFactory.stringTypeInfo)) { return new Text(value); } else if (ti.equals(TypeInfoFactory.varcharTypeInfo)) { return new HiveVarcharWritable( new HiveVarchar(new Text(value).toString(), -1)); } else if (ti.equals(TypeInfoFactory.binaryTypeInfo)) { return new BytesWritable(value); } return null; }
Text t; if (value instanceof String) { t = new Text((String) value); } else { t = ((WritableStringObjectInspector) outputOI).getPrimitiveWritableObject(value); bv.setVal(i, t.getBytes(), 0, t.getLength()); } else if (outputOI instanceof WritableHiveCharObjectInspector) { WritableHiveCharObjectInspector writableHiveCharObjectOI = (WritableHiveCharObjectInspector) outputOI; hiveCharWritable = writableHiveCharObjectOI.getPrimitiveWritableObject(value); Text t = hiveCharWritable.getTextValue(); ts = ((WritableDateObjectInspector) outputOI).getPrimitiveJavaObject(value); long l = DateWritable.dateToDays(ts); lv.vector[i] = l; } else if (outputOI instanceof WritableBooleanObjectInspector) { BytesWritable bw = (BytesWritable) value; BytesColumnVector bv = (BytesColumnVector) colVec; bv.setVal(i, bw.getBytes(), 0, bw.getLength()); } else { throw new RuntimeException("Unhandled object type " + outputOI.getTypeName() +
actualValue = ((BooleanWritable) actualValue).get(); actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes()); actualValue = new SqlDate(((DateWritable) actualValue).getDays()); actualValue = ((FloatWritable) actualValue).get(); actualValue = ((IntWritable) actualValue).get(); actualValue = ((HiveCharWritable) actualValue).getPaddedValue().toString(); actualValue = ((LongWritable) actualValue).get();
private void runAndVerifyBin(byte[] binV, String expResult, UDFSha1 udf) throws HiveException { BytesWritable binWr = binV != null ? new BytesWritable(binV) : null; Text output = (Text) udf.evaluate(binWr); assertEquals("sha1() test ", expResult, output != null ? output.toString() : null); } }
public void testUnbase64Conversion(){ Text base64 = new Text(); // Let's make sure we only read the relevant part of the writable in case of reuse base64.set("Garbage 64. Should be ignored."); base64.set("c3RyaW5n"); BytesWritable expected = new BytesWritable("string".getBytes()); UDFUnbase64 udf = new UDFUnbase64(); BytesWritable output = udf.evaluate(base64); assertEquals(expected, output); } }
Writable getHiveWritable(MetadataColumn metadataColumn) { switch (metadataColumn) { case OFFSET: return new LongWritable(getOffset()); case PARTITION: return new IntWritable(getPartition()); case TIMESTAMP: return new LongWritable(getTimestamp()); case KEY: return getRecordKey() == null ? null : new BytesWritable(getRecordKey()); default: throw new IllegalArgumentException("Unknown metadata column [" + metadataColumn.getName() + "]"); } }
byte[] bytes = new byte[bw.getLength()]; System.arraycopy(bw.getBytes(), 0, bytes, 0, bw.getLength()); bloomFilter = BloomFilter.deserialize(new ByteArrayInputStream(bytes)); } catch ( IOException e) { DateWritable vDate = ((DateObjectInspector) valObjectInspector). getPrimitiveWritableObject(arguments[0].get()); return bloomFilter.testLong(vDate.getDays()); case TIMESTAMP: Timestamp vTimeStamp = ((TimestampObjectInspector) valObjectInspector). case CHAR: Text vChar = ((HiveCharObjectInspector) valObjectInspector). getPrimitiveWritableObject(arguments[0].get()).getStrippedValue(); return bloomFilter.testBytes(vChar.getBytes(), 0, vChar.getLength()); case VARCHAR: Text vVarchar = ((HiveVarcharObjectInspector) valObjectInspector). getPrimitiveWritableObject(arguments[0].get()).getTextValue(); return bloomFilter.testBytes(vVarchar.getBytes(), 0, vVarchar.getLength()); case STRING: Text vString = ((StringObjectInspector) valObjectInspector). BytesWritable vBytes = ((BinaryObjectInspector) valObjectInspector). getPrimitiveWritableObject(arguments[0].get()); return bloomFilter.testBytes(vBytes.getBytes(), 0, vBytes.getLength()); default: throw new UDFArgumentTypeException(0, "Bad primitive category " +