/** Returns an {@link InputStream} for this {@code ByteArray} content. */ public final InputStream asInputStream() { return byteString.newInput(); }
InputStream inputStreamFrom = value.newInput(); if (offset != inputStreamFrom.skip(offset)) { throw new IllegalStateException("Skip failed? Should never happen.");
/** * Returns an {@link InputStream} for this {@code ByteArray} content. */ public final InputStream asInputStream() { return byteString.newInput(); }
/** Returns an {@link InputStream} for this {@code ByteArray} content. */ public final InputStream asInputStream() { return byteString.newInput(); }
public static InputStream toInputStream(com.google.protobuf.ByteString json, FragmentCodec codec) throws IOException { final FragmentCodec c = codec != null ? codec : FragmentCodec.NONE; final InputStream input = json.newInput(); switch(c) { case NONE: return input; case SNAPPY: return new SnappyInputStream(input); default: throw new UnsupportedOperationException("Do not know how to uncompress using " + c + " algorithm."); } }
@Private public static byte[] decompressByteStringToByteArray(ByteString byteString, Inflater inflater) throws IOException { inflater.reset(); return IOUtils.toByteArray(new InflaterInputStream(byteString.newInput(), inflater)); }
/** * Desserializes a {@link BidRequest} from a JSON string, provided as a {@link ByteString}. */ public BidRequest readBidRequest(ByteString bs) throws IOException { return readBidRequest(bs.newInput()); }
/** * Desserializes a {@link NativeRequest} from a JSON string, provided as a {@link ByteString}. */ public NativeRequest readNativeRequest(ByteString bs) throws IOException { return readNativeRequest(bs.newInput()); }
/** * Returns the data at a given index as an InputStream. * * @param columnIndex 1-based column number (0 is invalid) */ public InputStream getBinaryInputStream(int columnIndex) throws SQLException { ByteString rawValue = getRawValue(columnIndex); if (rawValue == null) { return null; } return rawValue.newInput(); }
/** * Desserializes a {@link BidResponse} from a JSON string, provided as a {@link ByteString}. */ public BidResponse readBidResponse(ByteString bs) throws IOException { return readBidResponse(bs.newInput()); }
/** * Desserializes a {@link NativeResponse} from a JSON string, provided as a {@link ByteString}. */ public NativeResponse readNativeResponse(ByteString bs) throws IOException { return readNativeResponse(bs.newInput()); }
@Override public InputStream stream(DynamicMessage abstractMessage) { return abstractMessage.toByteString().newInput(); }
/** * Deserialize the given byte string to an object using the given reader, employing the given codec algorithm. * * @param reader object reader * @param byteString byte string to deserialize * @param codec codec * @param logger logger * @param <T> object type of the deserialized object * @return deserialized object * @throws IOException in case of deserialization errors */ public static <T> T readValue(ObjectReader reader, ByteString byteString, Codec codec, Logger logger) throws IOException { if (logger.isTraceEnabled()) { // Costly conversion to UTF-8. Avoid if possible final String value = IOUtils.toString(codec.decompress(byteString.newInput())); logger.trace("Attempting to read {}", value); // Could reuse the value but to avoid so that logger level doesn't impact the program flow // Since level is trace, user probably doesn't care for performance right now // return reader.readValue(value); } try (InputStream is = codec.decompress(byteString.newInput())) { return reader.readValue(is); } }
/** * Convert a byte string to a Configuration object * * @param byteString byteString representation of the conf created using {@link * #createByteStringFromConf(org.apache.hadoop.conf.Configuration)} * @return Configuration * @throws java.io.IOException */ public static Configuration createConfFromByteString(ByteString byteString) throws IOException { Preconditions.checkNotNull(byteString, "ByteString must be specified"); // SnappyInputStream uncompressIs = new // SnappyInputStream(byteString.newInput()); InflaterInputStream uncompressIs = new InflaterInputStream(byteString.newInput()); DAGProtos.ConfigurationProto confProto = DAGProtos.ConfigurationProto.parseFrom(uncompressIs); Configuration conf = new Configuration(false); readConfFromPB(confProto, conf); return conf; }
private void copyFileToWorkingDirectory(Messages.TaskResult.OutputFile outputFile, MpiJob job, String stdOutGzFileName) throws IOException { if (outputFile.getName().equals(stdOutGzFileName)) { Path path = job.getWorkingDir().resolve(outputFile.getName().substring(0, outputFile.getName().length() - 3)); try (InputStream is = new GZIPInputStream(outputFile.getData().newInput()); OutputStream os = Files.newOutputStream(path)) { ByteStreams.copy(is, os); } } else { Path path = job.getWorkingDir().resolve(outputFile.getName()); try (InputStream is = outputFile.getData().newInput(); OutputStream os = Files.newOutputStream(path)) { ByteStreams.copy(is, os); } } }
@Override public boolean transform(DocumentProtos.Media media, String docNewId, DocumentProtos.DocumentMetadata.Builder dmBuider,DocumentProtos.DocumentWrapper.Builder builder) { List<DocumentProtos.DocumentWrapperOrBuilder> dwbList = reader.read(new InputStreamReader(media.getContent().newInput()), new DataciteOAIId(docNewId)); for (DocumentProtos.DocumentWrapperOrBuilder dw: dwbList) { dmBuider.mergeFrom(dw.getDocumentMetadata()); } dmBuider.clearCollection(); return true; }
@Override public boolean transform(DocumentProtos.Media media, String docNewId, DocumentProtos.DocumentMetadata.Builder dmBuider,DocumentProtos.DocumentWrapper.Builder builder) { List<YExportable> yExportableList = MetadataTransformers.BTF.getReader( BwmetaTransformerConstants.BWMETA_2_1, BwmetaTransformerConstants.Y).read( new InputStreamReader(media.getContent().newInput())); for (YExportable yExportable : yExportableList) { DocumentProtos.DocumentMetadata dm = parser .yelementToDocumentMetadata((YElement) yExportable, null, null, "synat"); dmBuider.mergeFrom(dm); } dmBuider.clearCollection(); return true; }
private static void loadStrategiesIndex(MarshallerReaderContext context, ProtobufMessages.Header _header) throws IOException, ClassNotFoundException { for ( ProtobufMessages.Header.StrategyIndex _entry : _header.getStrategyList() ) { ObjectMarshallingStrategy strategyObject = context.resolverStrategyFactory.getStrategyObject( _entry.getName() ); if ( strategyObject == null ) { throw new IllegalStateException( "No strategy of type " + _entry.getName() + " available." ); } context.usedStrategies.put( _entry.getId(), strategyObject ); Context ctx = strategyObject.createContext(); context.strategyContexts.put( strategyObject, ctx ); if( _entry.hasData() && ctx != null ) { ClassLoader classLoader = null; if (context.classLoader != null ){ classLoader = context.classLoader; } else if(context.ruleBase != null){ classLoader = context.ruleBase.getRootClassLoader(); } ctx.read( new DroolsObjectInputStream( _entry.getData().newInput(), classLoader) ); } } }
private void recoverApplication(ContainerManagerApplicationProto p) throws IOException { ApplicationId appId = new ApplicationIdPBImpl(p.getId()); Credentials creds = new Credentials(); creds.readTokenStorageStream( new DataInputStream(p.getCredentials().newInput())); List<ApplicationACLMapProto> aclProtoList = p.getAclsList(); Map<ApplicationAccessType, String> acls = new HashMap<ApplicationAccessType, String>(aclProtoList.size()); for (ApplicationACLMapProto aclProto : aclProtoList) { acls.put(ProtoUtils.convertFromProtoFormat(aclProto.getAccessType()), aclProto.getAcl()); } LogAggregationContext logAggregationContext = null; if (p.getLogAggregationContext() != null) { logAggregationContext = new LogAggregationContextPBImpl(p.getLogAggregationContext()); } LOG.info("Recovering application " + appId); ApplicationImpl app = new ApplicationImpl(dispatcher, p.getUser(), appId, creds, context); context.getApplications().put(appId, app); app.handle(new ApplicationInitEvent(appId, acls, logAggregationContext)); }
private void recoverApplication(ContainerManagerApplicationProto p) throws IOException { ApplicationId appId = new ApplicationIdPBImpl(p.getId()); Credentials creds = new Credentials(); creds.readTokenStorageStream( new DataInputStream(p.getCredentials().newInput())); List<ApplicationACLMapProto> aclProtoList = p.getAclsList(); Map<ApplicationAccessType, String> acls = new HashMap<ApplicationAccessType, String>(aclProtoList.size()); for (ApplicationACLMapProto aclProto : aclProtoList) { acls.put(ProtoUtils.convertFromProtoFormat(aclProto.getAccessType()), aclProto.getAcl()); } LogAggregationContext logAggregationContext = null; if (p.getLogAggregationContext() != null) { logAggregationContext = new LogAggregationContextPBImpl(p.getLogAggregationContext()); } LOG.info("Recovering application " + appId); ApplicationImpl app = new ApplicationImpl(dispatcher, p.getUser(), appId, creds, context); context.getApplications().put(appId, app); app.handle(new ApplicationInitEvent(appId, acls, logAggregationContext)); }