/** * Streaming access to the key. Useful for desrializing the key into * user objects. * * @return The input stream. */ public DataInputStream getKeyStream() { keyDataInputStream.reset(keyBuffer, klen); return keyDataInputStream; }
/** * Convert to a string, checking for valid UTF8. * @return the converted string * @throws UTFDataFormatException if the underlying bytes contain invalid * UTF8 data. */ public String toStringChecked() throws IOException { StringBuilder buffer = new StringBuilder(length); synchronized (IBUF) { IBUF.reset(bytes, length); readChars(IBUF, buffer, length); } return buffer.toString(); }
/** Convert to a String. */ @Override public String toString() { StringBuilder buffer = new StringBuilder(length); try { synchronized (IBUF) { IBUF.reset(bytes, length); readChars(IBUF, buffer, length); } } catch (IOException e) { throw new RuntimeException(e); } return buffer.toString(); }
@Override public T fromString(String str) throws IOException { try { byte[] bytes = Base64.decodeBase64(str.getBytes("UTF-8")); inBuf.reset(bytes, bytes.length); T restored = deserializer.deserialize(null); return restored; } catch (UnsupportedCharsetException ex) { throw new IOException(ex.toString()); } }
/** * Convert a UTF-8 encoded byte array back into a string. * * @throws IOException if the byte array is invalid UTF8 */ public static String fromBytes(byte[] bytes) throws IOException { DataInputBuffer dbuf = new DataInputBuffer(); dbuf.reset(bytes, 0, bytes.length); StringBuilder buf = new StringBuilder(bytes.length); readChars(dbuf, buf, bytes.length); return buf.toString(); }
/** Optimization hook. Override this to make SequenceFile.Sorter's scream. * * <p>The default implementation reads the data into two {@link * WritableComparable}s (using {@link * Writable#readFields(DataInput)}, then calls {@link * #compare(WritableComparable,WritableComparable)}. */ @Override public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { try { buffer.reset(b1, s1, l1); // parse key1 key1.readFields(buffer); buffer.reset(b2, s2, l2); // parse key2 key2.readFields(buffer); buffer.reset(null, 0, 0); // clean up reference } catch (IOException e) { throw new RuntimeException(e); } return compare(key1, key2); // compare them }
/** * Move the data from the output buffer to the input buffer. */ void moveData() { inBuffer.reset(outBuffer.getData(), outBuffer.getLength()); } }
public static SubmitWorkInfo fromBytes(byte[] submitWorkInfoBytes) throws IOException { DataInputBuffer dib = new DataInputBuffer(); dib.reset(submitWorkInfoBytes, 0, submitWorkInfoBytes.length); SubmitWorkInfo submitWorkInfo = new SubmitWorkInfo(); submitWorkInfo.readFields(dib); return submitWorkInfo; }
/** * Parses all the RegionInfo instances from the passed in stream until EOF. Presumes the * RegionInfo's were serialized to the stream with oDelimitedByteArray() * @param bytes serialized bytes * @param offset the start offset into the byte[] buffer * @param length how far we should read into the byte[] buffer * @return All the RegionInfos that are in the byte array. Keeps reading till we hit the end. * @throws IOException */ static List<RegionInfo> parseDelimitedFrom(final byte[] bytes, final int offset, final int length) throws IOException { if (bytes == null) { throw new IllegalArgumentException("Can't build an object with empty bytes array"); } DataInputBuffer in = new DataInputBuffer(); List<RegionInfo> ris = new ArrayList<>(); try { in.reset(bytes, offset, length); while (in.available() > 0) { RegionInfo ri = parseFrom(in); ris.add(ri); } } finally { in.close(); } return ris; } }
/** * transform a byte of crendetials to a hadoop Credentials object. * @param binaryCredentials credentials in byte format as they would * usually be when received from protobuffers * @return a hadoop Credentials object */ public static Credentials credentialsFromByteArray(byte[] binaryCredentials) throws IOException { Credentials credentials = new Credentials(); DataInputBuffer dib = new DataInputBuffer(); dib.reset(binaryCredentials, binaryCredentials.length); credentials.readTokenStorageStream(dib); return credentials; } }
@Override public void writeUncompressedBytes(DataOutputStream outStream) throws IOException { if (decompressedStream == null) { rawData = new DataInputBuffer(); decompressedStream = codec.createInputStream(rawData); } else { decompressedStream.resetState(); } rawData.reset(data, 0, dataSize); byte[] buffer = new byte[8192]; int bytesRead = 0; while ((bytesRead = decompressedStream.read(buffer, 0, 8192)) != -1) { outStream.write(buffer, 0, bytesRead); } }
/** Used by child copy constructors. */ protected synchronized void copy(Writable other) { if (other != null) { try { DataOutputBuffer out = new DataOutputBuffer(); other.write(out); DataInputBuffer in = new DataInputBuffer(); in.reset(out.getData(), out.getLength()); readFields(in); } catch (IOException e) { throw new IllegalArgumentException("map cannot be copied: " + e.getMessage()); } } else { throw new IllegalArgumentException("source map cannot be null"); } }
/** * Modify the writable to the value from the newValue. * @param obj the object to read into * @param newValue the string with the url-safe base64 encoded bytes * @throws IOException */ private static void decodeWritable(Writable obj, String newValue) throws IOException { if (newValue == null) { throw new HadoopIllegalArgumentException( "Invalid argument, newValue is null"); } Base64 decoder = new Base64(0, null, true); DataInputBuffer buf = new DataInputBuffer(); byte[] decoded = decoder.decode(newValue); buf.reset(decoded, decoded.length); obj.readFields(buf); }
private void checkKey(WritableComparable key) throws IOException { // check that keys are well-ordered if (size != 0 && comparator.compare(lastKey, key) > 0) throw new IOException("key out of order: "+key+" after "+lastKey); // update lastKey with a copy of key by writing and reading outBuf.reset(); key.write(outBuf); // write new key inBuf.reset(outBuf.getData(), outBuf.getLength()); lastKey.readFields(inBuf); // read into lastKey }
private void assertAuthMethodRead(DataInputBuffer in, AuthMethod authMethod) throws IOException { in.reset(new byte[] {authMethod.code}, 1); assertEquals(authMethod, AuthMethod.read(in)); }
@Override public int read() throws IOException { int ret; if (null == inbuf || -1 == (ret = inbuf.read())) { if (!r.next(key, val)) { return -1; } byte[] tmp = key.toString().getBytes(StandardCharsets.UTF_8); outbuf.write(tmp, 0, tmp.length); outbuf.write('\t'); tmp = val.toString().getBytes(StandardCharsets.UTF_8); outbuf.write(tmp, 0, tmp.length); outbuf.write('\n'); inbuf.reset(outbuf.getData(), outbuf.getLength()); outbuf.reset(); ret = inbuf.read(); } return ret; }
/** Read a compressed buffer */ private synchronized void readBuffer(DataInputBuffer buffer, CompressionInputStream filter) throws IOException { // Read data into a temporary buffer DataOutputBuffer dataBuffer = new DataOutputBuffer(); try { int dataBufferLength = WritableUtils.readVInt(in); dataBuffer.write(in, dataBufferLength); // Set up 'buffer' connected to the input-stream buffer.reset(dataBuffer.getData(), 0, dataBuffer.getLength()); } finally { dataBuffer.close(); } // Reset the codec filter.resetState(); }
public static TestSignable deserialize(byte[] bytes) throws IOException { DataInputBuffer db = new DataInputBuffer(); db.reset(bytes, bytes.length); int keyId = db.readInt(); byte b = db.readByte(); db.close(); return new TestSignable(keyId, b); }
@Test public void testWriteRead() throws IOException { DataOutputBuffer output = new DataOutputBuffer(); this.expectedInputSplit.write(output); KafkaInputSplit kafkaInputSplit = new KafkaInputSplit(); DataInputBuffer input = new DataInputBuffer(); input.reset(output.getData(), 0, output.getLength()); kafkaInputSplit.readFields(input); Assert.assertEquals(this.expectedInputSplit, kafkaInputSplit); }
/** * Test RegionInfo serialization * @throws Exception */ @Test public void testRegionInfo() throws Exception { HRegionInfo hri = createRandomRegion("testRegionInfo"); //test toByteArray() byte [] hrib = hri.toByteArray(); HRegionInfo deserializedHri = HRegionInfo.parseFrom(hrib); assertEquals(hri.getEncodedName(), deserializedHri.getEncodedName()); assertEquals(hri, deserializedHri); //test toDelimitedByteArray() hrib = hri.toDelimitedByteArray(); DataInputBuffer buf = new DataInputBuffer(); try { buf.reset(hrib, hrib.length); deserializedHri = HRegionInfo.parseFrom(buf); assertEquals(hri.getEncodedName(), deserializedHri.getEncodedName()); assertEquals(hri, deserializedHri); } finally { buf.close(); } }