@Override public QueryCompleteResponseProto queryComplete( QueryCompleteRequestProto request) throws IOException { QueryIdentifier queryIdentifier = new QueryIdentifier(request.getQueryIdentifier().getApplicationIdString(), request.getQueryIdentifier().getDagIndex()); LOG.info("Processing queryComplete notification for {}", queryIdentifier); QueryInfo queryInfo = queryTracker.queryComplete(queryIdentifier, request.getDeleteDelay(), false); if (queryInfo != null) { List<QueryFragmentInfo> knownFragments = queryInfo.getRegisteredFragments(); LOG.info("DBG: Pending fragment count for completed query {} = {}", queryIdentifier, knownFragments.size()); for (QueryFragmentInfo fragmentInfo : knownFragments) { LOG.info("Issuing killFragment for completed query {} {}", queryIdentifier, fragmentInfo.getFragmentIdentifierString()); executorService.killFragment(fragmentInfo.getFragmentIdentifierString()); } amReporter.queryComplete(queryIdentifier); } return QueryCompleteResponseProto.getDefaultInstance(); }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasQueryIdentifier()) { hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER; hash = (53 * hash) + getQueryIdentifier().hashCode(); } if (hasDeleteDelay()) { hash = (37 * hash) + DELETE_DELAY_FIELD_NUMBER; hash = (53 * hash) + hashLong(getDeleteDelay()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto)) { return super.equals(obj); } org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto) obj; boolean result = true; result = result && (hasQueryIdentifier() == other.hasQueryIdentifier()); if (hasQueryIdentifier()) { result = result && getQueryIdentifier() .equals(other.getQueryIdentifier()); } result = result && (hasDeleteDelay() == other.hasDeleteDelay()); if (hasDeleteDelay()) { result = result && (getDeleteDelay() == other.getDeleteDelay()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@Override public QueryCompleteResponseProto queryComplete( QueryCompleteRequestProto request) throws IOException { QueryIdentifier queryIdentifier = new QueryIdentifier(request.getQueryIdentifier().getApplicationIdString(), request.getQueryIdentifier().getDagIndex()); LOG.info("Processing queryComplete notification for {}", queryIdentifier); QueryInfo queryInfo = queryTracker.queryComplete(queryIdentifier, request.getDeleteDelay(), false); if (queryInfo != null) { List<QueryFragmentInfo> knownFragments = queryInfo.getRegisteredFragments(); LOG.info("DBG: Pending fragment count for completed query {} = {}", queryIdentifier, knownFragments.size()); for (QueryFragmentInfo fragmentInfo : knownFragments) { LOG.info("Issuing killFragment for completed query {} {}", queryIdentifier, fragmentInfo.getFragmentIdentifierString()); executorService.killFragment(fragmentInfo.getFragmentIdentifierString()); } amReporter.queryComplete(queryIdentifier); } return QueryCompleteResponseProto.getDefaultInstance(); }
public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto other) { if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.getDefaultInstance()) return this; if (other.hasQueryIdentifier()) { mergeQueryIdentifier(other.getQueryIdentifier()); } if (other.hasDeleteDelay()) { setDeleteDelay(other.getDeleteDelay()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }