StreamEventBuffer() { this.buffer = ByteBuffers.EMPTY_BUFFER; this.bufferInput = new ByteBufferInputStream(buffer); this.decoder = new BinaryDecoder(bufferInput); this.basePosition = -1L; }
public static Map<String, Integer> deserializeHashKeys(byte[] bytes, int off, int len) throws IOException { if (bytes == null || (len == 1 && bytes[off] == 0)) { // No hash keys. return ImmutableMap.of(); } ByteArrayInputStream bis = new ByteArrayInputStream(bytes, off, len); BinaryDecoder decoder = new BinaryDecoder(bis); int size = decoder.readInt(); Map<String, Integer> hashKeys = Maps.newHashMapWithExpectedSize(size); while (size > 0) { // per avro spec, ther ecan be multiple blocks while (size-- > 0) { String key = decoder.readString(); int value = decoder.readInt(); hashKeys.put(key, value); } size = decoder.readInt(); // next block length, will be always zero in this case } return hashKeys; } }
@Override public ByteBuffer readBytes() throws IOException { return ByteBuffer.wrap(rawReadBytes()); }
@Override public boolean readBool() throws IOException { return readByte() == 1; }
private byte[] rawReadBytes() throws IOException { int toRead = readInt(); byte[] bytes = new byte[toRead]; while (toRead > 0) { int byteRead = input.read(bytes, bytes.length - toRead, toRead); if (byteRead == -1) { throw new EOFException(); } toRead -= byteRead; } return bytes; }
@Override public void skipDouble() throws IOException { // Skip 8 bytes skipBytes(8L); }
private byte[] rawReadBytes() throws IOException { int toRead = readInt(); byte[] bytes = new byte[toRead]; while (toRead > 0) { int byteRead = input.read(bytes, bytes.length - toRead, toRead); if (byteRead == -1) { throw new EOFException(); } toRead -= byteRead; } return bytes; }
@Override public void skipFloat() throws IOException { // Skip 4 bytes skipBytes(4L); }
private T decode(byte[] bytes) { if (bytes == null) { return null; } // decode T using schema ByteArrayInputStream bis = new ByteArrayInputStream(bytes); BinaryDecoder decoder = new BinaryDecoder(bis); try { return getReflectionDatumReader().read(decoder, this.schema); } catch (IOException e) { // SHOULD NEVER happen throw new DataSetException("Failed to decode read object: " + e.getMessage(), e); } }
@Override public void skipDouble() throws IOException { // Skip 8 bytes skipBytes(8L); }
@Override public String readString() throws IOException { return new String(rawReadBytes(), Charsets.UTF_8); }
private T decode(byte[] bytes) { if (bytes == null) { return null; } // decode T using schema ByteArrayInputStream bis = new ByteArrayInputStream(bytes); BinaryDecoder decoder = new BinaryDecoder(bis); try { return getReflectionDatumReader().read(decoder, this.schema); } catch (IOException e) { // SHOULD NEVER happen throw new DataSetException("Failed to decode read object: " + e.getMessage(), e); } }
@Override public void skipFloat() throws IOException { // Skip 4 bytes skipBytes(4L); }
@Override public String readString() throws IOException { return new String(rawReadBytes(), Charsets.UTF_8); }
@Override public AccessToken decode(byte[] data) throws IOException { ByteArrayInputStream bis = new ByteArrayInputStream(data); Decoder decoder = new BinaryDecoder(bis); DatumReader<AccessToken> reader = readerFactory.create(ACCESS_TOKEN_TYPE, AccessToken.Schemas.getCurrentSchema()); int readVersion = decoder.readInt(); Schema readSchema = AccessToken.Schemas.getSchemaVersion(readVersion); if (readSchema == null) { throw new IOException("Unknown schema version for AccessToken: " + readVersion); } return reader.read(decoder, readSchema); } }