protected void pushRow(InspectableObject row) throws HiveException { operator.process(row.o, 0); }
protected void pushRow(InspectableObject row) throws HiveException { operator.process(row.o, 0); }
public boolean forward(Object row) throws HiveException { if (op.getDone()) { return false; } op.process(row, 0); return true; } }
public boolean forward(Object row) throws HiveException { if (op.getDone()) { return false; } op.process(row, 0); return true; } }
@Override public void closeOp(boolean abort) throws HiveException { if (!abort && oneRootOperator != null && !oneRootOperator.getDone() && currentReadType != VectorMapOperatorReadType.VECTORIZED_INPUT_FILE_FORMAT) { if (deserializerBatch.size > 0) { numRows += deserializerBatch.size; batchCounter++; oneRootOperator.process(deserializerBatch, 0); deserializerBatch.size = 0; } } super.closeOp(abort); }
@Override public void closeOp(boolean abort) throws HiveException { if (!abort && oneRootOperator != null && !oneRootOperator.getDone() && currentReadType != VectorMapOperatorReadType.VECTORIZED_INPUT_FILE_FORMAT) { if (deserializerBatch.size > 0) { numRows += deserializerBatch.size; batchCounter++; oneRootOperator.process(deserializerBatch, 0); deserializerBatch.size = 0; } } super.closeOp(abort); }
private boolean flushDeserializerBatch() throws HiveException { if (deserializerBatch.size > 0) { batchCounter++; oneRootOperator.process(deserializerBatch, 0); deserializerBatch.reset(); if (oneRootOperator.getDone()) { setDone(true); return false; } } return true; }
reducer.process(row, tag); } catch (Exception e) { String rowString = null;
@Override public void process(Object row, int tag) throws HiveException { if (LOG.isInfoEnabled()) { cntrs[tag]++; if (cntrs[tag] == nextCntrs[tag]) { LOG.info(id + ", tag=" + tag + ", forwarding " + cntrs[tag] + " rows"); nextCntrs[tag] = getNextCntr(cntrs[tag]); } } int childrenDone = 0; for (int i = 0; i < childOperatorsArray.length; i++) { Operator<? extends OperatorDesc> child = childOperatorsArray[i]; if (child.getDone()) { childrenDone++; } else { if (forward[tag]) { // No need to evaluate, just forward it. child.process(row, tag); } else { // Call the corresponding handler to evaluate this row and // forward the result child.process(handlers[tag].process(row), handlers[tag].getTag()); } } } // if all children are done, this operator is also done if (childrenDone == childOperatorsArray.length) { setDone(true); } }
return; forwardOp.process(row.o, tag);
private void forwardBatch(boolean resetValueColumnsOnly) throws HiveException { reducer.process(batch, 0); if (resetValueColumnsOnly) { // Reset just the value columns and value buffer. for (int i = firstValueColumnOffset; i < batch.numCols; i++) { // Note that reset also resets the data buffer for bytes column vectors. batch.cols[i].reset(); } batch.size = 0; } else { // Reset key and value columns; and batch.size batch.reset(); } batchBytes = 0; incrementRowNumber(); }
/** * @param key key to process * @param value value to process * @return true if it is not done and can take more inputs */ private boolean processRow(Object key, Object value) { // reset the execContext for each new row execContext.resetRow(); try { if (mergeOp.getDone()) { return false; //done } else { row[0] = key; row[1] = value; mergeOp.process(row, 0); } } catch (Throwable e) { setAborted(true); if (e instanceof OutOfMemoryError) { // Don't create a new object if we are already out of memory throw (OutOfMemoryError) e; } else { l4j.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } } return true; //give me more }
protected void forward(Object row, ObjectInspector rowInspector) throws HiveException { runTimeNumRows++; if (getDone()) { return; } int childrenDone = 0; for (int i = 0; i < childOperatorsArray.length; i++) { Operator<? extends OperatorDesc> o = childOperatorsArray[i]; if (o.getDone()) { childrenDone++; } else { o.process(row, childOperatorsTag[i]); } } // if all children are done, this operator is also done if (childrenDone != 0 && childrenDone == childOperatorsArray.length) { setDone(true); } }
@Override public void process(Object row, int tag) throws HiveException { int currentChildIndex = newTagToChildIndex[tag]; // Check if we start to forward rows to a new child. // If so, in the current key group, rows will not be forwarded // to those children which have an index less than the currentChildIndex. // We can call flush the buffer of children from lastChildIndex (inclusive) // to currentChildIndex (exclusive) and propagate processGroup to those children. endGroupIfNecessary(currentChildIndex); int oldTag = newTagToOldTag[tag]; if (LOG.isDebugEnabled()) { cntrs[tag]++; if (cntrs[tag] == nextCntrs[tag]) { LOG.debug(id + " (newTag, childIndex, oldTag)=(" + tag + ", " + currentChildIndex + ", " + oldTag + "), forwarding " + cntrs[tag] + " rows"); nextCntrs[tag] = getNextCntr(cntrs[tag]); } } Operator<? extends OperatorDesc> child = childOperatorsArray[currentChildIndex]; if (child.getDone()) { childrenDone++; } else { child.process(row, oldTag); } // if all children are done, this operator is also done if (childrenDone == childOperatorsArray.length) { setDone(true); } }
protected void forward(Object row, ObjectInspector rowInspector) throws HiveException { runTimeNumRows++; if (getDone()) { return; } int childrenDone = 0; for (int i = 0; i < childOperatorsArray.length; i++) { Operator<? extends OperatorDesc> o = childOperatorsArray[i]; if (o.getDone()) { childrenDone++; } else { o.process(row, childOperatorsTag[i]); } } // if all children are done, this operator is also done if (childrenDone != 0 && childrenDone == childOperatorsArray.length) { setDone(true); } }
private boolean deliverVectorizedRowBatch(Writable value) throws HiveException { batchCounter++; if (value != null) { VectorizedRowBatch batch = (VectorizedRowBatch) value; numRows += batch.size; if (hasRowIdentifier) { final int idx = batchContext.findVirtualColumnNum(VirtualColumn.ROWID); if (idx < 0) { setRowIdentiferToNull(batch); } } } oneRootOperator.process(value, 0); if (oneRootOperator.getDone()) { setDone(true); return false; } return true; }
break; forwardOp.process(row.o, 0);
/** * Get the next row and push down it to operator tree. * Currently only used by FetchTask. **/ public boolean pushRow() throws IOException, HiveException { if (operator == null) { return false; } if (work.getRowsComputedUsingStats() != null) { for (List<Object> row : work.getRowsComputedUsingStats()) { operator.process(row, 0); } flushRow(); return true; } InspectableObject row = getNextRow(); if (row != null) { pushRow(row); } else { flushRow(); } return row != null; }
/** * Get the next row and push down it to operator tree. * Currently only used by FetchTask. **/ public boolean pushRow() throws IOException, HiveException { if (operator == null) { return false; } if (work.getRowsComputedUsingStats() != null) { for (List<Object> row : work.getRowsComputedUsingStats()) { operator.process(row, 0); } flushRow(); return true; } InspectableObject row = getNextRow(); if (row != null) { pushRow(row); } else { flushRow(); } return row != null; }
private boolean next(Integer current) throws IOException, HiveException { if (keyFields == null) { byte tag = tagForAlias(alias); // joinKeys/joinKeysOI are initialized after making merge queue, so setup lazily at runtime keyFields = joinKeys[tag]; keyFieldOIs = joinKeysObjectInspectors[tag]; } InspectableObject nextRow = segments[current].getNextRow(); while (nextRow != null) { sinkOp.reset(); if (keys[current] == null) { keys[current] = new ObjectPair<List<Object>, InspectableObject>(); } // Pass the row though the operator tree. It is guaranteed that not more than 1 row can // be produced from a input row. forwardOp.process(nextRow.o, 0); nextRow = sinkOp.getResult(); // It is possible that the row got absorbed in the operator tree. if (nextRow.o != null) { // todo this should be changed to be evaluated lazily, especially for single segment case keys[current].setFirst(JoinUtil.computeKeys(nextRow.o, keyFields, keyFieldOIs)); keys[current].setSecond(nextRow); return true; } nextRow = segments[current].getNextRow(); } keys[current] = null; return false; } }