@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasUser()) { hash = (37 * hash) + USER_FIELD_NUMBER; hash = (53 * hash) + getUser().hashCode(); } if (hasQueryIdentifier()) { hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER; hash = (53 * hash) + getQueryIdentifier().hashCode(); } if (hasCredentialsBinary()) { hash = (37 * hash) + CREDENTIALS_BINARY_FIELD_NUMBER; hash = (53 * hash) + getCredentialsBinary().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto buildPartial() { org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.user_ = user_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } if (queryIdentifierBuilder_ == null) { result.queryIdentifier_ = queryIdentifier_; } else { result.queryIdentifier_ = queryIdentifierBuilder_.build(); } if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.credentialsBinary_ = credentialsBinary_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto.getDefaultInstance(); case 1: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.getDefaultInstance(); case 2: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.getDefaultInstance(); case 3: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.getDefaultInstance(); case 4: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.getDefaultInstance(); case 5: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }
RegisterDagRequestProto.Builder builder = RegisterDagRequestProto.newBuilder(); if (currentQueryIdentifierProto == null) { return false;
TokenCache.setSessionToken(sessionToken, credentials); RegisterDagRequestProto request = RegisterDagRequestProto.newBuilder() .setUser(testUser) .setCredentialsBinary(ByteString.copyFrom(LlapTezUtils.serializeCredentials(credentials)))
public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto.getDefaultInstance(); case 1: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.getDefaultInstance(); case 2: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.getDefaultInstance(); case 3: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.getDefaultInstance(); case 4: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.getDefaultInstance(); case 5: return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UpdateFragmentRequestProto.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto)) { return super.equals(obj); } org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto) obj; boolean result = true; result = result && (hasUser() == other.hasUser()); if (hasUser()) { result = result && getUser() .equals(other.getUser()); } result = result && (hasQueryIdentifier() == other.hasQueryIdentifier()); if (hasQueryIdentifier()) { result = result && getQueryIdentifier() .equals(other.getQueryIdentifier()); } result = result && (hasCredentialsBinary() == other.hasCredentialsBinary()); if (hasCredentialsBinary()) { result = result && getCredentialsBinary() .equals(other.getCredentialsBinary()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@Override public RegisterDagResponseProto registerDag(RegisterDagRequestProto request) throws IOException { QueryIdentifierProto identifier = request.getQueryIdentifier(); Credentials credentials; if (request.hasCredentialsBinary()) { credentials = LlapUtil.credentialsFromByteArray( request.getCredentialsBinary().toByteArray()); } else { credentials = new Credentials(); } queryTracker.registerDag(identifier.getApplicationIdString(), identifier.getDagIndex(), request.getUser(), credentials); if (LOG.isInfoEnabled()) { LOG.info("Application with id={}, dagId={} registered", identifier.getApplicationIdString(), identifier.getDagIndex()); } return RegisterDagResponseProto.newBuilder().build(); }
public Builder toBuilder() { return newBuilder(this); }
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto getDefaultInstanceForType() { return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto.getDefaultInstance(); }
public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto other) { if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto.getDefaultInstance()) return this; if (other.hasUser()) { bitField0_ |= 0x00000001; user_ = other.user_; onChanged(); } if (other.hasQueryIdentifier()) { mergeQueryIdentifier(other.getQueryIdentifier()); } if (other.hasCredentialsBinary()) { setCredentialsBinary(other.getCredentialsBinary()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto prototype) {
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getUserBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeMessage(2, queryIdentifier_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, credentialsBinary_); } getUnknownFields().writeTo(output); }
/** * <code>optional string user = 1;</code> */ public Builder clearUser() { bitField0_ = (bitField0_ & ~0x00000001); user_ = getDefaultInstance().getUser(); onChanged(); return this; } /**
/** * <code>optional bytes credentials_binary = 3;</code> */ public Builder clearCredentialsBinary() { bitField0_ = (bitField0_ & ~0x00000004); credentialsBinary_ = getDefaultInstance().getCredentialsBinary(); onChanged(); return this; }