/** * Ensures that either the UTF-8 text has been set directly or by indirectly converting the Map contents to JSON. */ private void ensureTextSet() { if (_text.limit() == 0) { checkState(_map != null, "Neither JSON text nor map has been set"); _text.clear(); // First try reading the JSON directly into be buffer. try { JsonHelper.writeJson(new ByteBufferOutputStream(_text), _map); // Set the limit and move the position back to zero. _text.flip(); } catch (Exception e) { if (Iterables.tryFind(Throwables.getCausalChain(e), Predicates.instanceOf(BufferOverflowException.class)).isPresent()) { // Buffer was insufficient. Allocate a new array and read the bytes into it. byte[] utf8 = JsonHelper.asUtf8Bytes(_map); _text = ByteBuffer.wrap(utf8); } else { throw Throwables.propagate(e); } } } }
/** * Ensures that either the UTF-8 text has been set directly or by indirectly converting the Map contents to JSON. */ private void ensureTextSet() { if (_text.limit() == 0) { checkState(_map != null, "Neither JSON text nor map has been set"); _text.clear(); // First try reading the JSON directly into be buffer. try { JsonHelper.writeJson(new ByteBufferOutputStream(_text), _map); // Set the limit and move the position back to zero. _text.flip(); } catch (Exception e) { if (Iterables.tryFind(Throwables.getCausalChain(e), Predicates.instanceOf(BufferOverflowException.class)).isPresent()) { // Buffer was insufficient. Allocate a new array and read the bytes into it. byte[] utf8 = JsonHelper.asUtf8Bytes(_map); _text = ByteBuffer.wrap(utf8); } else { throw Throwables.propagate(e); } } } }
/** * Write the header into the buffer. * This requires that PKT_HEADER_LEN bytes are available. */ public void putInBuffer(final ByteBuffer buf) { assert proto.getSerializedSize() <= MAX_PROTO_SIZE : "Expected " + (MAX_PROTO_SIZE) + " got: " + proto.getSerializedSize(); try { buf.putInt(packetLen); buf.putShort((short) proto.getSerializedSize()); proto.writeTo(new ByteBufferOutputStream(buf)); } catch (IOException e) { throw new RuntimeException(e); } }
/** * Write the header into the buffer. * This requires that PKT_HEADER_LEN bytes are available. */ public void putInBuffer(final ByteBuffer buf) { assert proto.getSerializedSize() <= MAX_PROTO_SIZE : "Expected " + (MAX_PROTO_SIZE) + " got: " + proto.getSerializedSize(); try { buf.putInt(packetLen); buf.putShort((short) proto.getSerializedSize()); proto.writeTo(new ByteBufferOutputStream(buf)); } catch (IOException e) { throw new RuntimeException(e); } }
/** * Write the header into the buffer. * This requires that PKT_HEADER_LEN bytes are available. */ public void putInBuffer(final ByteBuffer buf) { assert proto.getSerializedSize() <= MAX_PROTO_SIZE : "Expected " + (MAX_PROTO_SIZE) + " got: " + proto.getSerializedSize(); try { buf.putInt(packetLen); buf.putShort((short) proto.getSerializedSize()); proto.writeTo(new ByteBufferOutputStream(buf)); } catch (IOException e) { throw new RuntimeException(e); } }
public boolean fetchNextRow() throws IOException { if (!_rows.hasNext()) { return false; } // TODO: Essentially we're streaming a JSON array of objects, converting the objects to Java Maps, // then converting the Maps back to JSON strings. There's possible efficiency improvement if we // don't use DataStore and call the split API directly with a custom JSON parser. However, // to take advantage of the established DataStore client this has not been done at this time. Map<String, Object> row = _rows.next(); try { // Attempt to read the row into the existing byte buffer. _buffer.clear(); JsonHelper.writeJson(_out, row); _buffer.flip(); } catch (Exception e) { if (Iterables.tryFind(Throwables.getCausalChain(e), Predicates.instanceOf(BufferOverflowException.class)).isPresent()) { // Buffer overflow. Allocate a new buffer and try again. byte[] content = JsonHelper.asUtf8Bytes(row); _buffer = ByteBuffer.wrap(content); _out = new ByteBufferOutputStream(_buffer); } else { Throwables.propagateIfPossible(e, IOException.class); throw new IOException("Failed to read next row", e); } } return true; }
public boolean fetchNextRow() throws IOException { if (!_rows.hasNext()) { return false; } // TODO: Essentially we're streaming a JSON array of objects, converting the objects to Java Maps, // then converting the Maps back to JSON strings. There's possible efficiency improvement if we // don't use DataStore and call the split API directly with a custom JSON parser. However, // to take advantage of the established DataStore client this has not been done at this time. Map<String, Object> row = _rows.next(); try { // Attempt to read the row into the existing byte buffer. _buffer.clear(); JsonHelper.writeJson(_out, row); _buffer.flip(); } catch (Exception e) { if (Iterables.tryFind(Throwables.getCausalChain(e), Predicates.instanceOf(BufferOverflowException.class)).isPresent()) { // Buffer overflow. Allocate a new buffer and try again. byte[] content = JsonHelper.asUtf8Bytes(row); _buffer = ByteBuffer.wrap(content); _out = new ByteBufferOutputStream(_buffer); } else { Throwables.propagateIfPossible(e, IOException.class); throw new IOException("Failed to read next row", e); } } return true; }