@Override protected GenericRecord nextAvroRecord() throws IOException { if (decoder.isEnd()) { return null; } try { genericRecord = datumReader.read(genericRecord, decoder); } catch (final EOFException eof) { return null; } return genericRecord; }
boolean hasNextBlock() { try { if (availableBlock) return true; if (vin.isEnd()) return false; blockRemaining = vin.readLong(); // read block count blockSize = vin.readLong(); // read block size if (blockSize > Integer.MAX_VALUE || blockSize < 0) { throw new IOException("Block size invalid or too large for this " + "implementation: " + blockSize); } blockCount = blockRemaining; availableBlock = true; return true; } catch (EOFException eof) { return false; } catch (IOException e) { throw new AvroRuntimeException(e); } }
boolean hasNextBlock() { try { if (availableBlock) return true; if (vin.isEnd()) return false; blockRemaining = vin.readLong(); // read block count blockSize = vin.readLong(); // read block size if (blockSize > Integer.MAX_VALUE || blockSize < 0) { throw new IOException("Block size invalid or too large for this " + "implementation: " + blockSize); } blockCount = blockRemaining; availableBlock = true; return true; } catch (EOFException eof) { return false; } catch (IOException e) { throw new AvroRuntimeException(e); } }
private void parseTopics() { if (state.getProperty(TOPIC_LIST) != null) { byte[] data = base64.decodeBase64(state.getProperty(TOPIC_LIST)); BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(data, null); SpecificDatumReader<Topic> avroReader = new SpecificDatumReader<>(Topic.class); try { // NOSONAR Topic decodedTopic; while (!decoder.isEnd()) { decodedTopic = avroReader.read(null, decoder); LOG.debug("Loaded {}", decodedTopic); topicMap.put(decodedTopic.getId(), decodedTopic); } } catch (Exception ex) { LOG.error("Unexpected exception occurred while reading information from decoder", ex); } } else { LOG.info("No topic list found in state"); } }
/** True if more entries remain in this file. */ @Override public boolean hasNext() { try { if (blockRemaining == 0) { // check that the previous block was finished if (null != datumIn) { boolean atEnd = datumIn.isEnd(); if (!atEnd) { throw new IOException("Block read partially, the data may be corrupt"); } } if (hasNextBlock()) { block = nextRawBlock(block); block.decompressUsing(codec); blockBuffer = block.getAsByteBuffer(); datumIn = DecoderFactory.get().binaryDecoder( blockBuffer.array(), blockBuffer.arrayOffset() + blockBuffer.position(), blockBuffer.remaining(), datumIn); } } return blockRemaining != 0; } catch (EOFException e) { // at EOF return false; } catch (IOException e) { throw new AvroRuntimeException(e); } }
/** True if more entries remain in this file. */ @Override public boolean hasNext() { try { if (blockRemaining == 0) { // check that the previous block was finished if (null != datumIn) { boolean atEnd = datumIn.isEnd(); if (!atEnd) { throw new IOException("Block read partially, the data may be corrupt"); } } if (hasNextBlock()) { block = nextRawBlock(block); block.decompressUsing(codec); blockBuffer = block.getAsByteBuffer(); datumIn = DecoderFactory.get().binaryDecoder( blockBuffer.array(), blockBuffer.arrayOffset() + blockBuffer.position(), blockBuffer.remaining(), datumIn); } } return blockRemaining != 0; } catch (EOFException e) { // at EOF return false; } catch (IOException e) { throw new AvroRuntimeException(e); } }
JsonEncoder jsonEncoder = EncoderFactory.get().jsonEncoder(schema, out, !noPretty); Object datum = null; while (!binaryDecoder.isEnd()){ datum = reader.read(datum, binaryDecoder); writer.write(datum, jsonEncoder);
@Test public void testInputStreamPartiallyUsed() throws IOException { BinaryDecoder bd = factory.binaryDecoder( new ByteArrayInputStream(data), null); InputStream test = bd.inputStream(); InputStream check = new ByteArrayInputStream(data); // triggers buffer fill if unused and tests isEnd() try { Assert.assertFalse(bd.isEnd()); } catch (UnsupportedOperationException e) { // this is ok if its a DirectBinaryDecoder. if (bd.getClass() != DirectBinaryDecoder.class) { throw e; } } bd.readFloat(); // use data, and otherwise trigger buffer fill check.skip(4); // skip the same # of bytes here validateInputStreamReads(test, check); }
@Test public void testSkipping() throws IOException { Decoder d = newDecoder(data); skipGenerated(d); if (d instanceof BinaryDecoder) { BinaryDecoder bd = (BinaryDecoder) d; try { Assert.assertTrue(bd.isEnd()); } catch (UnsupportedOperationException e) { // this is ok if its a DirectBinaryDecoder. if (bd.getClass() != DirectBinaryDecoder.class) { throw e; } } bd = factory.binaryDecoder(new ByteArrayInputStream(data), bd); skipGenerated(bd); try { Assert.assertTrue(bd.isEnd()); } catch (UnsupportedOperationException e) { // this is ok if its a DirectBinaryDecoder. if (bd.getClass() != DirectBinaryDecoder.class) { throw e; } } } }
@Override public boolean checkInputEnd() throws IOException { return _decoder.isEnd(); }
boolean hasNextBlock() { try { if (availableBlock) return true; if (vin.isEnd()) return false; blockRemaining = vin.readLong(); // read block count blockSize = vin.readLong(); // read block size if (blockSize > Integer.MAX_VALUE || blockSize < 0) { throw new IOException("Block size invalid or too large for this " + "implementation: " + blockSize); } availableBlock = true; return true; } catch (EOFException eof) { return false; } catch (IOException e) { throw new AvroRuntimeException(e); } }
boolean hasNextBlock() { try { if (availableBlock) return true; if (vin.isEnd()) return false; blockRemaining = vin.readLong(); // read block count blockSize = vin.readLong(); // read block size if (blockSize > Integer.MAX_VALUE || blockSize < 0) { throw new IOException("Block size invalid or too large for this " + "implementation: " + blockSize); } blockCount = blockRemaining; availableBlock = true; return true; } catch (EOFException eof) { return false; } catch (IOException e) { throw new AvroRuntimeException(e); } }
boolean hasNextBlock() { try { if (availableBlock) return true; if (vin.isEnd()) return false; blockRemaining = vin.readLong(); // read block count blockSize = vin.readLong(); // read block size if (blockSize > Integer.MAX_VALUE || blockSize < 0) { throw new IOException("Block size invalid or too large for this " + "implementation: " + blockSize); } blockCount = blockRemaining; availableBlock = true; return true; } catch (EOFException eof) { return false; } catch (IOException e) { throw new AvroRuntimeException(e); } }
boolean hasNextBlock() { try { if (availableBlock) return true; if (vin.isEnd()) return false; blockRemaining = vin.readLong(); // read block count blockSize = vin.readLong(); // read block size if (blockSize > Integer.MAX_VALUE || blockSize < 0) { throw new IOException("Block size invalid or too large for this " + "implementation: " + blockSize); } blockCount = blockRemaining; availableBlock = true; return true; } catch (EOFException eof) { return false; } catch (IOException e) { throw new AvroRuntimeException(e); } }
/** True if more entries remain in this file. */ @Override public boolean hasNext() { try { if (blockRemaining == 0) { // check that the previous block was finished if (null != datumIn) { boolean atEnd = datumIn.isEnd(); if (!atEnd) { throw new IOException("Block read partially, the data may be corrupt"); } } if (hasNextBlock()) { block = nextRawBlock(block); block.decompressUsing(codec); blockBuffer = block.getAsByteBuffer(); datumIn = DecoderFactory.get().binaryDecoder( blockBuffer.array(), blockBuffer.arrayOffset() + blockBuffer.position(), blockBuffer.remaining(), datumIn); } } return blockRemaining != 0; } catch (EOFException e) { // at EOF return false; } catch (IOException e) { throw new AvroRuntimeException(e); } }
/** True if more entries remain in this file. */ public boolean hasNext() { try { if (blockRemaining == 0) { // check that the previous block was finished if (null != datumIn) { boolean atEnd = datumIn.isEnd(); if (!atEnd) { throw new IOException("Block read partially, the data may be corrupt"); } } if (hasNextBlock()) { block = nextBlock(block); ByteBuffer blockBuffer = ByteBuffer.wrap(block.data, 0, block.blockSize); blockBuffer = codec.decompress(blockBuffer); datumIn = DecoderFactory.defaultFactory().createBinaryDecoder( blockBuffer.array(), blockBuffer.arrayOffset() + blockBuffer.position(), blockBuffer.remaining(), datumIn); } } return blockRemaining != 0; } catch (EOFException e) { // at EOF return false; } catch (IOException e) { throw new AvroRuntimeException(e); } }
/** True if more entries remain in this file. */ public boolean hasNext() { try { if (blockRemaining == 0) { // check that the previous block was finished if (null != datumIn) { boolean atEnd = datumIn.isEnd(); if (!atEnd) { throw new IOException("Block read partially, the data may be corrupt"); } } if (hasNextBlock()) { block = nextRawBlock(block); block.decompressUsing(codec); blockBuffer = block.getAsByteBuffer(); datumIn = DecoderFactory.defaultFactory().createBinaryDecoder( blockBuffer.array(), blockBuffer.arrayOffset() + blockBuffer.position(), blockBuffer.remaining(), datumIn); } } return blockRemaining != 0; } catch (EOFException e) { // at EOF return false; } catch (IOException e) { throw new AvroRuntimeException(e); } }
@Override public JsonToken nextToken() throws IOException { // 19-Jan-2017, tatu: May need to be called multiple times, for root-level // sequences. Because of this need to check for EOF. But only after reading // one token successfully... if (_rootReader) { JsonToken t = _decoder.isEnd() ? null : _wrappedReader.readValue(_parser, _decoder); return (_currToken = t); } _parser.setAvroContext(getParent()); return (_currToken = _wrappedReader.readValue(_parser, _decoder)); }
private final JsonToken _nextAtEndObject() throws IOException { AvroReadContext parent = getParent(); // as per [dataformats-binary#38], may need to reset, instead of bailing out if (parent.inRoot()) { if (!_decoder.isEnd()) { _state = STATE_START; _index = 0; return (_currToken = JsonToken.END_OBJECT); } } _state = STATE_DONE; _parser.setAvroContext(getParent()); return (_currToken = JsonToken.END_OBJECT); }
@Override public Bundle next() throws IOException { if (decoder.isEnd()) { return null; } reusableRecord = datumReader.read(reusableRecord, decoder); GenericData genericData = datumReader.getData(); Bundle bundle = factory.createBundle(); for (Schema.Field field : inputSchema.getFields()) { ValueObject value = DataSourceAvro.getValueObject( reusableRecord, field, genericData); if (value != null) { bundle.setValue(bundle.getFormat().getField(field.name()), value); } } return bundle; } };