/** * Submit the work for actual execution. */ public void submitWork(SubmitWorkRequestProto request, String llapHost, int llapPort) { // Register the pending events to be sent for this spec. VertexOrBinary vob = request.getWorkSpec(); assert vob.hasVertexBinary() != vob.hasVertex(); SignableVertexSpec vertex = null; try { vertex = vob.hasVertex() ? vob.getVertex() : SignableVertexSpec.parseFrom(vob.getVertexBinary()); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } QueryIdentifierProto queryIdentifierProto = vertex.getQueryIdentifier(); TezTaskAttemptID attemptId = Converters.createTaskAttemptId(queryIdentifierProto, vertex.getVertexIndex(), request.getFragmentNumber(), request.getAttemptNumber()); final String fragmentId = attemptId.toString(); this.requestInfo = new RequestInfo(request, queryIdentifierProto, fragmentId, llapHost, llapPort); this.tezEvents = Lists.<TezEvent>newArrayList(); registerClient(); // Send out the actual SubmitWorkRequest final LlapTaskUmbilicalExternalClient client = this; communicator.start(); submitWork(); }
VertexOrBinary.Builder vertexBuilder = VertexOrBinary.newBuilder(); vertexBuilder.setVertexBinary(ByteString.copyFrom(submitWorkInfo.getVertexBinary())); if (submitWorkInfo.getVertexSignature() != null) {
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary buildPartial() { org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (vertexBuilder_ == null) { result.vertex_ = vertex_; } else { result.vertex_ = vertexBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.vertexBinary_ = vertexBinary_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
VertexOrBinary.Builder vertexBuilder = VertexOrBinary.newBuilder(); vertexBuilder.setVertexBinary(ByteString.copyFrom(submitWorkInfo.getVertexBinary())); if (submitWorkInfo.getVertexSignature() != null) {
builder.setWorkSpec(VertexOrBinary.newBuilder().setVertex(Converters.constructSignableVertexSpec( taskSpec, currentQueryIdentifierProto, getTokenIdentifier(), user, hiveQueryId)).build());
/** * Submit the work for actual execution. */ public void submitWork(SubmitWorkRequestProto request, String llapHost, int llapPort) { // Register the pending events to be sent for this spec. VertexOrBinary vob = request.getWorkSpec(); assert vob.hasVertexBinary() != vob.hasVertex(); SignableVertexSpec vertex = null; try { vertex = vob.hasVertex() ? vob.getVertex() : SignableVertexSpec.parseFrom(vob.getVertexBinary()); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } QueryIdentifierProto queryIdentifierProto = vertex.getQueryIdentifier(); TezTaskAttemptID attemptId = Converters.createTaskAttemptId(queryIdentifierProto, vertex.getVertexIndex(), request.getFragmentNumber(), request.getAttemptNumber()); final String fragmentId = attemptId.toString(); this.requestInfo = new RequestInfo(request, queryIdentifierProto, fragmentId, llapHost, llapPort); this.tezEvents = Lists.<TezEvent>newArrayList(); registerClient(); // Send out the actual SubmitWorkRequest final LlapTaskUmbilicalExternalClient client = this; communicator.start(); submitWork(); }
private SignableVertexSpec extractVertexSpec(SubmitWorkRequestProto request, LlapTokenInfo tokenInfo) throws InvalidProtocolBufferException, IOException { VertexOrBinary vob = request.getWorkSpec(); SignableVertexSpec vertex = vob.hasVertex() ? vob.getVertex() : null; ByteString vertexBinary = vob.hasVertexBinary() ? vob.getVertexBinary() : null; if (vertexBinary != null) { if (vertex != null) { throw new IOException( "Vertex and vertexBinary in VertexOrBinary cannot be set at the same time"); } vertex = SignableVertexSpec.parseFrom(vob.getVertexBinary()); } if (tokenInfo.isSigningRequired) { checkSignature(vertex, vertexBinary, request, tokenInfo.userName); } return vertex; }
if (hasWorkSpec()) { result = result && getWorkSpec() .equals(other.getWorkSpec());
.setFragmentNumber(fragmentNumber) .setWorkSpec( VertexOrBinary.newBuilder().setVertex( SignableVertexSpec.newBuilder() .setDagName(dagName)
.setFragmentNumber(fragmentNumber) .setWorkSpec( LlapDaemonProtocolProtos.VertexOrBinary.newBuilder().setVertex( LlapDaemonProtocolProtos.SignableVertexSpec .newBuilder()
private SubmitWorkRequestProto constructSubmitWorkRequest(ContainerId containerId, TaskSpec taskSpec, FragmentRuntimeInfo fragmentRuntimeInfo, String hiveQueryId) throws IOException { SubmitWorkRequestProto.Builder builder = SubmitWorkRequestProto.newBuilder(); builder.setFragmentNumber(taskSpec.getTaskAttemptID().getTaskID().getId()); builder.setAttemptNumber(taskSpec.getTaskAttemptID().getId()); builder.setContainerIdString(containerId.toString()); builder.setAmHost(getAmHostString()); builder.setAmPort(getAddress().getPort()); Preconditions.checkState(currentQueryIdentifierProto.getDagIndex() == taskSpec.getTaskAttemptID().getTaskID().getVertexID().getDAGId().getId()); builder.setCredentialsBinary( getCredentials(getContext().getCurrentDagInfo().getCredentials())); builder.setWorkSpec(VertexOrBinary.newBuilder().setVertex(Converters.constructSignableVertexSpec( taskSpec, currentQueryIdentifierProto, getTokenIdentifier(), user, hiveQueryId)).build()); // Don't call builder.setWorkSpecSignature() - Tez doesn't sign fragments builder.setFragmentRuntimeInfo(fragmentRuntimeInfo); if (scheduler != null) { // May be null in tests // TODO: see javadoc builder.setIsGuaranteed(scheduler.isInitialGuaranteed(taskSpec.getTaskAttemptID())); } return builder.build(); }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary)) { return super.equals(obj); } org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary) obj; boolean result = true; result = result && (hasVertex() == other.hasVertex()); if (hasVertex()) { result = result && getVertex() .equals(other.getVertex()); } result = result && (hasVertexBinary() == other.hasVertexBinary()); if (hasVertexBinary()) { result = result && getVertexBinary() .equals(other.getVertexBinary()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
/** * <code>optional .VertexOrBinary work_spec = 1;</code> */ public Builder mergeWorkSpec(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary value) { if (workSpecBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && workSpec_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance()) { workSpec_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.newBuilder(workSpec_).mergeFrom(value).buildPartial(); } else { workSpec_ = value; } onChanged(); } else { workSpecBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasVertex()) { hash = (37 * hash) + VERTEX_FIELD_NUMBER; hash = (53 * hash) + getVertex().hashCode(); } if (hasVertexBinary()) { hash = (37 * hash) + VERTEXBINARY_FIELD_NUMBER; hash = (53 * hash) + getVertexBinary().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
private SignableVertexSpec extractVertexSpec(SubmitWorkRequestProto request, LlapTokenInfo tokenInfo) throws InvalidProtocolBufferException, IOException { VertexOrBinary vob = request.getWorkSpec(); SignableVertexSpec vertex = vob.hasVertex() ? vob.getVertex() : null; ByteString vertexBinary = vob.hasVertexBinary() ? vob.getVertexBinary() : null; if (vertexBinary != null) { if (vertex != null) { throw new IOException( "Vertex and vertexBinary in VertexOrBinary cannot be set at the same time"); } vertex = SignableVertexSpec.parseFrom(vob.getVertexBinary()); } if (tokenInfo.isSigningRequired) { checkSignature(vertex, vertexBinary, request, tokenInfo.userName); } return vertex; }
public Builder toBuilder() { return newBuilder(this); }
public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary prototype) {
public MockRequest(SubmitWorkRequestProto requestProto, QueryFragmentInfo fragmentInfo, boolean canFinish, boolean canFinishQueue, long workTime, TezEvent initialEvent, boolean isGuaranteed) { super(requestProto, fragmentInfo, new Configuration(), new ExecutionContextImpl("localhost"), null, new Credentials(), 0, mock(AMReporter.class), null, mock( LlapDaemonExecutorMetrics.class), mock(KilledTaskHandler.class), mock( FragmentCompletionHandler.class), new DefaultHadoopShim(), null, requestProto.getWorkSpec().getVertex(), initialEvent, null, mock( SchedulerFragmentCompletingListener.class), mock(SocketFactory.class), isGuaranteed, null); this.workTime = workTime; this.canFinish = canFinish; this.canFinishQueue = canFinishQueue; }
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); }
/** * <code>optional bytes vertexBinary = 2;</code> * * <pre> * SignableVertexSpec * </pre> */ public Builder clearVertexBinary() { bitField0_ = (bitField0_ & ~0x00000002); vertexBinary_ = getDefaultInstance().getVertexBinary(); onChanged(); return this; }