/** * <code>optional .QueryIdentifierProto query_identifier = 1;</code> */ public Builder setQueryIdentifier( org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder builderForValue) { if (queryIdentifierBuilder_ == null) { queryIdentifier_ = builderForValue.build(); onChanged(); } else { queryIdentifierBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /**
subBuilder.mergeFrom(queryIdentifier_); queryIdentifier_ = subBuilder.buildPartial();
private QueryIdentifierProto constructQueryIdentifierProto(int dagIdentifier) { return QueryIdentifierProto.newBuilder() .setApplicationIdString(getContext().getCurrentAppIdentifier()).setDagIndex(dagIdentifier) .setAppAttemptNumber(getContext().getApplicationAttemptId().getAttemptId()) .build(); }
subBuilder.mergeFrom(queryIdentifier_); queryIdentifier_ = subBuilder.buildPartial();
subBuilder.mergeFrom(queryIdentifier_); queryIdentifier_ = subBuilder.buildPartial();
subBuilder.mergeFrom(queryIdentifier_); queryIdentifier_ = subBuilder.buildPartial();
QueryIdentifierProto.newBuilder().setApplicationIdString(appId.toString()) .setAppAttemptNumber(333).setDagIndex(tezDagId.getId()).build();
subBuilder.mergeFrom(queryIdentifier_); queryIdentifier_ = subBuilder.buildPartial();
subBuilder.mergeFrom(queryIdentifier_); queryIdentifier_ = subBuilder.buildPartial();
QueryIdentifierProto.newBuilder().setApplicationIdString(appId.toString()) .setAppAttemptNumber(333).setDagIndex(300).build();
.setQueryIdentifier( QueryIdentifierProto.newBuilder() .setApplicationIdString(appId.toString()) .setAppAttemptNumber(0) .setDagIndex(dagId.getId()) .build()) .setVertexIndex(vId.getId()) .setVertexName("MockVertex")
.setQueryIdentifier( LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder() .setApplicationIdString(appId) .setAppAttemptNumber(0) .setDagIndex(dagId) .build()) .setVertexIndex(vId) .setDagName(dagName)
.setQueryIdentifier( QueryIdentifierProto.newBuilder() .setApplicationIdString(appId) .setDagIndex(dagId) .build()) .build(); containerRunner.registerDag(request);
private SignedMessage createSignedVertexSpec(LlapSigner signer, TaskSpec taskSpec, ApplicationId applicationId, String queryUser, String queryIdString) throws IOException { QueryIdentifierProto queryIdentifierProto = QueryIdentifierProto.newBuilder().setApplicationIdString(applicationId.toString()) .setDagIndex(taskSpec.getDagIdentifier()).setAppAttemptNumber(0).build(); final SignableVertexSpec.Builder svsb = Converters.constructSignableVertexSpec( taskSpec, queryIdentifierProto, applicationId.toString(), queryUser, queryIdString); if (signer == null) { SignedMessage result = new SignedMessage(); result.message = serializeVertexSpec(svsb); return result; } return signer.serializeAndSign(new Signable() { @Override public void setSignInfo(int masterKeyId) { svsb.setSignatureKeyId(masterKeyId); } @Override public byte[] serialize() throws IOException { return serializeVertexSpec(svsb); } }); }
private SignedMessage createSignedVertexSpec(LlapSigner signer, TaskSpec taskSpec, ApplicationId applicationId, String queryUser, String queryIdString) throws IOException { QueryIdentifierProto queryIdentifierProto = QueryIdentifierProto.newBuilder().setApplicationIdString(applicationId.toString()) .setDagIndex(taskSpec.getDagIdentifier()).setAppAttemptNumber(0).build(); final SignableVertexSpec.Builder svsb = Converters.constructSignableVertexSpec( taskSpec, queryIdentifierProto, applicationId.toString(), queryUser, queryIdString); svsb.setIsExternalSubmission(true); if (signer == null) { SignedMessage result = new SignedMessage(); result.message = serializeVertexSpec(svsb); return result; } return signer.serializeAndSign(new Signable() { @Override public void setSignInfo(int masterKeyId) { svsb.setSignatureKeyId(masterKeyId); } @Override public byte[] serialize() throws IOException { return serializeVertexSpec(svsb); } }); }
/** * <code>optional .QueryIdentifierProto query_identifier = 1;</code> */ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { if (queryIdentifierBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) { queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial(); } else { queryIdentifier_ = value; } onChanged(); } else { queryIdentifierBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .QueryIdentifierProto query_identifier = 1;</code> */ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { if (queryIdentifierBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) { queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial(); } else { queryIdentifier_ = value; } onChanged(); } else { queryIdentifierBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>required .QueryIdentifierProto query_identifier = 2;</code> */ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { if (queryIdentifierBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) { queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial(); } else { queryIdentifier_ = value; } onChanged(); } else { queryIdentifierBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /**
/** * <code>optional .QueryIdentifierProto query_identifier = 1;</code> */ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { if (queryIdentifierBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) { queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial(); } else { queryIdentifier_ = value; } onChanged(); } else { queryIdentifierBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .QueryIdentifierProto query_identifier = 3;</code> */ public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { if (queryIdentifierBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) { queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial(); } else { queryIdentifier_ = value; } onChanged(); } else { queryIdentifierBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /**