@Override public void readFields(DataInput in) throws IOException { if (din == null) { din = new DataInputStream(); cin = CodedInputStream.newInstance(din); cin.setSizeLimit(Integer.MAX_VALUE); } din.in = in; message = cin.readMessage(parser, null); } }
private static DbFileSources.Data decodeHugeSourceData(byte[] binaryData) throws IOException { try (LZ4BlockInputStream lz4Input = new LZ4BlockInputStream(new ByteArrayInputStream(binaryData))) { CodedInputStream input = CodedInputStream.newInstance(lz4Input); input.setSizeLimit(Integer.MAX_VALUE); return DbFileSources.Data.parseFrom(input); } }
/** * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding * buffers when working with byte arrays * @param builder current message builder * @param b byte array * @throws IOException */ public static void mergeFrom(Message.Builder builder, byte[] b) throws IOException { final CodedInputStream codedInput = CodedInputStream.newInstance(b); codedInput.setSizeLimit(b.length); builder.mergeFrom(codedInput); codedInput.checkLastTagWas(0); }
public BinaryMapIndexReader(final RandomAccessFile raf, BinaryMapIndexReader referenceToSameFile) throws IOException { this.raf = raf; this.file = referenceToSameFile.file; codedIS = CodedInputStream.newInstance(raf); codedIS.setSizeLimit(Integer.MAX_VALUE); // 2048 MB version = referenceToSameFile.version; dateCreated = referenceToSameFile.dateCreated; transportAdapter = new BinaryMapTransportReaderAdapter(this); addressAdapter = new BinaryMapAddressReaderAdapter(this); poiAdapter = new BinaryMapPoiReaderAdapter(this); routeAdapter = new BinaryMapRouteReaderAdapter(this); mapIndexes = new ArrayList<BinaryMapIndexReader.MapIndex>(referenceToSameFile.mapIndexes); poiIndexes = new ArrayList<PoiRegion>(referenceToSameFile.poiIndexes); addressIndexes = new ArrayList<AddressRegion>(referenceToSameFile.addressIndexes); transportIndexes = new ArrayList<TransportIndex>(referenceToSameFile.transportIndexes); routingIndexes = new ArrayList<RouteRegion>(referenceToSameFile.routingIndexes); indexes = new ArrayList<BinaryIndexPart>(referenceToSameFile.indexes); basemap = referenceToSameFile.basemap; calculateCenterPointForRegions(); }
/** * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding * buffers when working with byte arrays * @param builder current message builder * @param b byte array * @param offset * @param length * @throws IOException */ public static void mergeFrom(Message.Builder builder, byte[] b, int offset, int length) throws IOException { final CodedInputStream codedInput = CodedInputStream.newInstance(b, offset, length); codedInput.setSizeLimit(length); builder.mergeFrom(codedInput); codedInput.checkLastTagWas(0); }
cis.setSizeLimit(InStream.PROTOBUF_MESSAGE_MAX_LIMIT); return generateSplitsFromPpd(SplitInfos.parseFrom(cis)); } else {
@Override public int skip(PositionedByteRange src) { CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder(); CodedInputStream is = inputStreamFromByteRange(src); is.setSizeLimit(src.getLength()); try { builder.mergeFrom(is); int consumed = is.getTotalBytesRead(); src.setPosition(src.getPosition() + consumed); return consumed; } catch (IOException e) { throw new RuntimeException("Error while skipping type.", e); } }
@Override public CellProtos.Cell decode(PositionedByteRange src) { CellProtos.Cell.Builder builder = CellProtos.Cell.newBuilder(); CodedInputStream is = inputStreamFromByteRange(src); is.setSizeLimit(src.getLength()); try { CellProtos.Cell ret = builder.mergeFrom(is).build(); src.setPosition(src.getPosition() + is.getTotalBytesRead()); return ret; } catch (IOException e) { throw new RuntimeException("Error while decoding type.", e); } }
public BinaryMapIndexReader(final RandomAccessFile raf, File file) throws IOException { this.raf = raf; this.file = file; codedIS = CodedInputStream.newInstance(raf); codedIS.setSizeLimit(Integer.MAX_VALUE); // 2048 MB transportAdapter = new BinaryMapTransportReaderAdapter(this); addressAdapter = new BinaryMapAddressReaderAdapter(this); poiAdapter = new BinaryMapPoiReaderAdapter(this); routeAdapter = new BinaryMapRouteReaderAdapter(this); init(); }
public BinaryMapIndexReader(final RandomAccessFile raf, File file, boolean init) throws IOException { this.raf = raf; this.file = file; codedIS = CodedInputStream.newInstance(raf); codedIS.setSizeLimit(Integer.MAX_VALUE); // 2048 MB transportAdapter = new BinaryMapTransportReaderAdapter(this); addressAdapter = new BinaryMapAddressReaderAdapter(this); poiAdapter = new BinaryMapPoiReaderAdapter(this); routeAdapter = new BinaryMapRouteReaderAdapter(this); if (init) { init(); } }
cis.setSizeLimit(InStream.PROTOBUF_MESSAGE_MAX_LIMIT); switch (sctx.kind) { case ROW_INDEX:
@Override public T parsePartialFrom(CodedInputStream input, ExtensionRegistryLite extensionRegistry) throws InvalidProtocolBufferException { input.setSizeLimit(Integer.MAX_VALUE); return parser.parsePartialFrom(input, extensionRegistry); } }
/** * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding * buffers when working with byte arrays * @param builder current message builder * @param b byte array * @throws IOException */ public static void mergeFrom(Message.Builder builder, byte[] b) throws IOException { final CodedInputStream codedInput = CodedInputStream.newInstance(b); codedInput.setSizeLimit(b.length); builder.mergeFrom(codedInput); codedInput.checkLastTagWas(0); }
/** * This version of protobuf's mergeFrom avoids the hard-coded 64MB limit for decoding * buffers when working with byte arrays * @param builder current message builder * @param b byte array * @param offset * @param length * @throws IOException */ public static void mergeFrom(Message.Builder builder, byte[] b, int offset, int length) throws IOException { final CodedInputStream codedInput = CodedInputStream.newInstance(b, offset, length); codedInput.setSizeLimit(length); builder.mergeFrom(codedInput); codedInput.checkLastTagWas(0); }
cis.setSizeLimit(maxDataLength);
public static BlockListAsLongs readFrom(InputStream is, int maxDataLength) throws IOException { CodedInputStream cis = CodedInputStream.newInstance(is); if (maxDataLength != IPC_MAXIMUM_DATA_LENGTH_DEFAULT) { cis.setSizeLimit(maxDataLength); } int numBlocks = -1; ByteString blocksBuf = null; while (!cis.isAtEnd()) { int tag = cis.readTag(); int field = WireFormat.getTagFieldNumber(tag); switch(field) { case 0: break; case 1: numBlocks = (int)cis.readInt32(); break; case 2: blocksBuf = cis.readBytes(); break; default: cis.skipField(tag); break; } } if (numBlocks != -1 && blocksBuf != null) { return decodeBuffer(numBlocks, blocksBuf, maxDataLength); } return null; }
private static DbFileSources.Data decodeHugeSourceData(byte[] binaryData) throws IOException { try (LZ4BlockInputStream lz4Input = new LZ4BlockInputStream(new ByteArrayInputStream(binaryData))) { CodedInputStream input = CodedInputStream.newInstance(lz4Input); input.setSizeLimit(Integer.MAX_VALUE); return DbFileSources.Data.parseFrom(input); } }
/** * Returns the loaded protocol buffer from the given byte stream. You normally want * {@link Wallet#loadFromFile(java.io.File, WalletExtension...)} instead - this method is designed for low level * work involving the wallet file format itself. */ public static Protos.Wallet parseToProto(InputStream input) throws IOException { CodedInputStream codedInput = CodedInputStream.newInstance(input); codedInput.setSizeLimit(WALLET_SIZE_LIMIT); return Protos.Wallet.parseFrom(codedInput); }
/** * Returns the loaded protocol buffer from the given byte stream. You normally want * {@link Wallet#loadFromFile(java.io.File, WalletExtension...)} instead - this method is designed for low level * work involving the wallet file format itself. */ public static Protos.Wallet parseToProto(InputStream input) throws IOException { CodedInputStream codedInput = CodedInputStream.newInstance(input); codedInput.setSizeLimit(WALLET_SIZE_LIMIT); return Protos.Wallet.parseFrom(codedInput); }
public static SerializedBlock parseFrom(InputStream in, int maxSize) throws InvalidProtocolBufferException, IOException { // create a CodedInputStream so that protobuf can enforce the configured max size // instead of using the default which may not be large enough for this data CodedInputStream codedInput = CodedInputStream.newInstance(in); codedInput.setSizeLimit(maxSize); DynamicMessage.Builder messageBuilder = DynamicMessage.newBuilder(messageDescriptor) .mergeFrom(codedInput); // verify we've read to the end codedInput.checkLastTagWas(0); return new SerializedBlock(messageBuilder.build()); }