protected TProtocol createThriftProtocol(TTransport transport) { switch(this.protocol) { case BINARY: return new TBinaryProtocol(transport); case JSON: return new TJSONProtocol(transport); case SIMPLE_JSON: return new TSimpleJSONProtocol(transport); default: throw new IllegalArgumentException("Unknown Thrift Protocol."); } } }
static byte[] serialize(AccessControlEntry ace) throws IOException { TMemoryBuffer transport = new TMemoryBuffer(BUFFER_SIZE); TJSONProtocol protocol = new TJSONProtocol(transport); try { ace.write(protocol); transport.flush(); return transport.toString(UTF_8.name()).getBytes(UTF_8); } catch (TException e) { throw new IOException("Failed to serialize access control entry : ", e); } catch (UnsupportedEncodingException uee) { throw new IOException("Failed to serialize acesss control entry : ", uee); } }
String serialize(BKDLConfigFormat configFormat) { TMemoryBuffer transport = new TMemoryBuffer(BUFFER_SIZE); TJSONProtocol protocol = new TJSONProtocol(transport); try { configFormat.write(protocol); transport.flush(); return transport.toString("UTF-8"); } catch (TException e) { throw new RuntimeException("Failed to serialize BKDLConfig : ", e); } catch (UnsupportedEncodingException e) { throw new RuntimeException("Failed to serialize BKDLConfig : ", e); } }
static AccessControlEntry deserialize(String zkPath, byte[] data) throws IOException { if (data.length == 0) { return DEFAULT_ACCESS_CONTROL_ENTRY; } AccessControlEntry ace = new AccessControlEntry(); TMemoryInputTransport transport = new TMemoryInputTransport(data); TJSONProtocol protocol = new TJSONProtocol(transport); try { ace.read(protocol); } catch (TException e) { throw new CorruptedAccessControlException(zkPath, e); } return ace; }
public String toThriftJSONString() throws IOException { org.apache.hadoop.hive.ql.plan.api.Query q = getQueryPlan(); TMemoryBuffer tmb = new TMemoryBuffer(q.toString().length() * 5); TJSONProtocol oprot = new TJSONProtocol(tmb); try { q.write(oprot); } catch (TException e) { // TODO Auto-generated catch block e.printStackTrace(); return q.toString(); } return tmb.toString("UTF-8"); }
public String toThriftJSONString() throws IOException { org.apache.hadoop.hive.ql.plan.api.Query q = getQueryPlan(); TMemoryBuffer tmb = new TMemoryBuffer(q.toString().length() * 5); TJSONProtocol oprot = new TJSONProtocol(tmb); try { q.write(oprot); } catch (TException e) { // TODO Auto-generated catch block e.printStackTrace(); return q.toString(); } return tmb.toString("UTF-8"); }
public TProtocol getProtocol(TTransport trans) { return new TJSONProtocol(trans, fieldNamesAsString_); }
BKDLConfigFormat configFormat = new BKDLConfigFormat(); TMemoryInputTransport transport = new TMemoryInputTransport(data); TJSONProtocol protocol = new TJSONProtocol(transport); try { configFormat.read(protocol);
public TProtocol getProtocol(TTransport trans) { return new TJSONProtocol(trans); }
public TProtocol getProtocol(TTransport trans) { return new TJSONProtocol(trans); }
public TProtocol getProtocol(TTransport trans) { return new TJSONProtocol(trans); }
/** * 根据配置创建thrift管道的方法 * */ private TProtocol createTProtocol(TTransport transport) { if (tProtocolType == TProtocolType.BINARY) { return new TBinaryProtocol(transport); } else if (tProtocolType == TProtocolType.JSON) { return new TJSONProtocol(transport); } throw new IllegalStateException("暂不支持的管道类型:" + tProtocolType); }
/** * 根据配置创建thrift管道的方法 * */ private TProtocol createTProtocol(TTransport transport) { if (tProtocolType == TProtocolType.BINARY) { return new TBinaryProtocol(transport); } else if (tProtocolType == TProtocolType.JSON) { return new TJSONProtocol(transport); } throw new IllegalStateException("暂不支持的管道类型:" + tProtocolType); }
public byte[] serialize() throws IOException { TMemoryBuffer transport = new TMemoryBuffer(BUFFER_SIZE); TJSONProtocol protocol = new TJSONProtocol(transport); try { toThrift().write(protocol); transport.flush(); return transport.toString(UTF_8.name()).getBytes(UTF_8); } catch (TException e) { throw new IOException("Failed to serialize server load : ", e); } catch (UnsupportedEncodingException uee) { throw new IOException("Failed to serialize server load : ", uee); } }
public byte[] serialize() throws IOException { TMemoryBuffer transport = new TMemoryBuffer(BUFFER_SIZE); TJSONProtocol protocol = new TJSONProtocol(transport); try { toThrift().write(protocol); transport.flush(); return transport.toString(UTF_8.name()).getBytes(UTF_8); } catch (TException e) { throw new IOException("Failed to serialize stream load : ", e); } catch (UnsupportedEncodingException uee) { throw new IOException("Failed to serialize stream load : ", uee); } }
public static StreamLoad deserialize(byte[] data) throws IOException { org.apache.distributedlog.service.placement.thrift.StreamLoad tStreamLoad = new org.apache.distributedlog.service.placement.thrift.StreamLoad(); TMemoryInputTransport transport = new TMemoryInputTransport(data); TJSONProtocol protocol = new TJSONProtocol(transport); try { tStreamLoad.read(protocol); return new StreamLoad(tStreamLoad.getStream(), tStreamLoad.getLoad()); } catch (TException e) { throw new IOException("Failed to deserialize stream load : ", e); } }
public String toThriftJSONString() throws IOException { org.apache.hadoop.hive.ql.plan.api.Query q = getQueryPlan(); TMemoryBuffer tmb = new TMemoryBuffer(q.toString().length() * 5); TJSONProtocol oprot = new TJSONProtocol(tmb); try { q.write(oprot); } catch (TException e) { // TODO Auto-generated catch block e.printStackTrace(); return q.toString(); } return tmb.toString("UTF-8"); }
public String toThriftJSONString() throws IOException { org.apache.hadoop.hive.ql.plan.api.Query q = getQueryPlan(); TMemoryBuffer tmb = new TMemoryBuffer(q.toString().length() * 5); TJSONProtocol oprot = new TJSONProtocol(tmb); try { q.write(oprot); } catch (TException e) { // TODO Auto-generated catch block e.printStackTrace(); return q.toString(); } return tmb.toString("UTF-8"); }
private static TProtocol constructSyncProtocol(TTransport transport, ThriftExchangeProtocol exchangeProtocol, final ThriftNegotiationType negotiationType, final ThriftCompressionType compressionType) { if (negotiationType == ThriftNegotiationType.SSL) { // If negotiation passed over SSL/TLS the only binary transport is supported return new TBinaryProtocol(transport); } else if (compressionType == ThriftCompressionType.ZLIB) { return new TBinaryProtocol(new TZlibTransport(transport)); } else { switch (exchangeProtocol) { case BINARY: return new TBinaryProtocol(new TFramedTransport(transport)); case JSON: return new TJSONProtocol(new TFramedTransport(transport)); case SJSON: return new TSimpleJSONProtocol(new TFramedTransport(transport)); case COMPACT: return new TCompactProtocol(new TFramedTransport(transport)); default: throw new IllegalArgumentException("Exchange protocol " + exchangeProtocol + " not implemented"); } } }
public static ServerLoad deserialize(byte[] data) throws IOException { org.apache.distributedlog.service.placement.thrift.ServerLoad tServerLoad = new org.apache.distributedlog.service.placement.thrift.ServerLoad(); TMemoryInputTransport transport = new TMemoryInputTransport(data); TJSONProtocol protocol = new TJSONProtocol(transport); try { tServerLoad.read(protocol); ServerLoad serverLoad = new ServerLoad(tServerLoad.getServer()); if (tServerLoad.isSetStreams()) { for (org.apache.distributedlog.service.placement.thrift.StreamLoad tStreamLoad : tServerLoad.getStreams()) { serverLoad.addStream(new StreamLoad(tStreamLoad.getStream(), tStreamLoad.getLoad())); } } return serverLoad; } catch (TException e) { throw new IOException("Failed to deserialize server load : ", e); } }