/** * Convert a UTF-8 encoded byte array back into a string. * * @throws IOException if the byte array is invalid UTF8 */ public static String fromBytes(byte[] bytes) throws IOException { DataInputBuffer dbuf = new DataInputBuffer(); dbuf.reset(bytes, 0, bytes.length); StringBuilder buf = new StringBuilder(bytes.length); readChars(dbuf, buf, bytes.length); return buf.toString(); }
protected WritableComparator(Class<? extends WritableComparable> keyClass, Configuration conf, boolean createInstances) { this.keyClass = keyClass; this.conf = (conf != null) ? conf : new Configuration(); if (createInstances) { key1 = newKey(); key2 = newKey(); buffer = new DataInputBuffer(); } else { key1 = key2 = null; buffer = null; } }
public TextRecordInputStream(FileStatus f) throws IOException { final Path fpath = f.getPath(); final Configuration lconf = getConf(); r = new SequenceFile.Reader(lconf, SequenceFile.Reader.file(fpath)); key = ReflectionUtils.newInstance( r.getKeyClass().asSubclass(Writable.class), lconf); val = ReflectionUtils.newInstance( r.getValueClass().asSubclass(Writable.class), lconf); inbuf = new DataInputBuffer(); outbuf = new DataOutputBuffer(); }
public static SubmitWorkInfo fromBytes(byte[] submitWorkInfoBytes) throws IOException { DataInputBuffer dib = new DataInputBuffer(); dib.reset(submitWorkInfoBytes, 0, submitWorkInfoBytes.length); SubmitWorkInfo submitWorkInfo = new SubmitWorkInfo(); submitWorkInfo.readFields(dib); return submitWorkInfo; }
/** * Parses all the HRegionInfo instances from the passed in stream until EOF. Presumes the * HRegionInfo's were serialized to the stream with {@link #toDelimitedByteArray()} * @param bytes serialized bytes * @param offset the start offset into the byte[] buffer * @param length how far we should read into the byte[] buffer * @return All the hregioninfos that are in the byte array. Keeps reading till we hit the end. * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 * Use {@link RegionInfo#parseDelimitedFrom(byte[], int, int)}. */ @Deprecated public static List<HRegionInfo> parseDelimitedFrom(final byte[] bytes, final int offset, final int length) throws IOException { if (bytes == null) { throw new IllegalArgumentException("Can't build an object with empty bytes array"); } DataInputBuffer in = new DataInputBuffer(); List<HRegionInfo> hris = new ArrayList<>(); try { in.reset(bytes, offset, length); while (in.available() > 0) { HRegionInfo hri = parseFrom(in); hris.add(hri); } } finally { in.close(); } return hris; }
throw new IllegalArgumentException("Writable cannot be null"); DataInputBuffer in = new DataInputBuffer(); try { in.reset(bytes, offset, length);
/** * Parses all the RegionInfo instances from the passed in stream until EOF. Presumes the * RegionInfo's were serialized to the stream with oDelimitedByteArray() * @param bytes serialized bytes * @param offset the start offset into the byte[] buffer * @param length how far we should read into the byte[] buffer * @return All the RegionInfos that are in the byte array. Keeps reading till we hit the end. * @throws IOException */ static List<RegionInfo> parseDelimitedFrom(final byte[] bytes, final int offset, final int length) throws IOException { if (bytes == null) { throw new IllegalArgumentException("Can't build an object with empty bytes array"); } DataInputBuffer in = new DataInputBuffer(); List<RegionInfo> ris = new ArrayList<>(); try { in.reset(bytes, offset, length); while (in.available() > 0) { RegionInfo ri = parseFrom(in); ris.add(ri); } } finally { in.close(); } return ris; } }
/** * transform a byte of crendetials to a hadoop Credentials object. * @param binaryCredentials credentials in byte format as they would * usually be when received from protobuffers * @return a hadoop Credentials object */ public static Credentials credentialsFromByteArray(byte[] binaryCredentials) throws IOException { Credentials credentials = new Credentials(); DataInputBuffer dib = new DataInputBuffer(); dib.reset(binaryCredentials, binaryCredentials.length); credentials.readTokenStorageStream(dib); return credentials; } }
@Override public void writeUncompressedBytes(DataOutputStream outStream) throws IOException { if (decompressedStream == null) { rawData = new DataInputBuffer(); decompressedStream = codec.createInputStream(rawData); } else { decompressedStream.resetState(); } rawData.reset(data, 0, dataSize); byte[] buffer = new byte[8192]; int bytesRead = 0; while ((bytesRead = decompressedStream.read(buffer, 0, 8192)) != -1) { outStream.write(buffer, 0, bytesRead); } }
/** Used by child copy constructors. */ protected synchronized void copy(Writable other) { if (other != null) { try { DataOutputBuffer out = new DataOutputBuffer(); other.write(out); DataInputBuffer in = new DataInputBuffer(); in.reset(out.getData(), out.getLength()); readFields(in); } catch (IOException e) { throw new IllegalArgumentException("map cannot be copied: " + e.getMessage()); } } else { throw new IllegalArgumentException("source map cannot be null"); } }
/** * Modify the writable to the value from the newValue. * @param obj the object to read into * @param newValue the string with the url-safe base64 encoded bytes * @throws IOException */ private static void decodeWritable(Writable obj, String newValue) throws IOException { if (newValue == null) { throw new HadoopIllegalArgumentException( "Invalid argument, newValue is null"); } Base64 decoder = new Base64(0, null, true); DataInputBuffer buf = new DataInputBuffer(); byte[] decoded = decoder.decode(newValue); buf.reset(decoded, decoded.length); obj.readFields(buf); }
public DefaultStringifier(Configuration conf, Class<T> c) { SerializationFactory factory = new SerializationFactory(conf); this.serializer = factory.getSerializer(c); this.deserializer = factory.getDeserializer(c); this.inBuf = new DataInputBuffer(); this.outBuf = new DataOutputBuffer(); try { serializer.open(outBuf); deserializer.open(inBuf); } catch (IOException ex) { throw new RuntimeException(ex); } }
keyDataInputStream = new DataInputBuffer(); valueBufferInputStream = new ChunkDecoder(); valueDataInputStream = new DataInputStream(valueBufferInputStream);
public static TestSignable deserialize(byte[] bytes) throws IOException { DataInputBuffer db = new DataInputBuffer(); db.reset(bytes, bytes.length); int keyId = db.readInt(); byte b = db.readByte(); db.close(); return new TestSignable(keyId, b); }
valBuffer = new DataInputBuffer(); if (decompress) { valDecompressor = CodecPool.getDecompressor(codec); keyLenBuffer = new DataInputBuffer(); keyBuffer = new DataInputBuffer(); valLenBuffer = new DataInputBuffer();
Token<LlapTokenIdentifier> llapToken = null; if (llapTokenBytes != null) { DataInputBuffer in = new DataInputBuffer(); in.reset(llapTokenBytes, 0, llapTokenBytes.length); llapToken = new Token<LlapTokenIdentifier>();
@Test public void testAuthMethodReadWrite() throws IOException { DataInputBuffer in = new DataInputBuffer(); DataOutputBuffer out = new DataOutputBuffer(); assertAuthMethodRead(in, AuthMethod.SIMPLE); assertAuthMethodRead(in, AuthMethod.KERBEROS); assertAuthMethodRead(in, AuthMethod.DIGEST); assertAuthMethodWrite(out, AuthMethod.SIMPLE); assertAuthMethodWrite(out, AuthMethod.KERBEROS); assertAuthMethodWrite(out, AuthMethod.DIGEST); }
@Test public void testWriteRead() throws IOException { DataOutputBuffer output = new DataOutputBuffer(); this.expectedInputSplit.write(output); KafkaInputSplit kafkaInputSplit = new KafkaInputSplit(); DataInputBuffer input = new DataInputBuffer(); input.reset(output.getData(), 0, output.getLength()); kafkaInputSplit.readFields(input); Assert.assertEquals(this.expectedInputSplit, kafkaInputSplit); }
/** * Test RegionInfo serialization * @throws Exception */ @Test public void testRegionInfo() throws Exception { HRegionInfo hri = createRandomRegion("testRegionInfo"); //test toByteArray() byte [] hrib = hri.toByteArray(); HRegionInfo deserializedHri = HRegionInfo.parseFrom(hrib); assertEquals(hri.getEncodedName(), deserializedHri.getEncodedName()); assertEquals(hri, deserializedHri); //test toDelimitedByteArray() hrib = hri.toDelimitedByteArray(); DataInputBuffer buf = new DataInputBuffer(); try { buf.reset(hrib, hrib.length); deserializedHri = HRegionInfo.parseFrom(buf); assertEquals(hri.getEncodedName(), deserializedHri.getEncodedName()); assertEquals(hri, deserializedHri); } finally { buf.close(); } }
private static void copy(Writable src, Writable dest) throws IOException { // not exactly efficient... DataOutputBuffer output = new DataOutputBuffer(); src.write(output); DataInputBuffer input = new DataInputBuffer(); input.reset(output.getData(), output.getLength()); dest.readFields(input); }