return false; ObjectPair tem = new ObjectPair(); tem.setFirst(ReflectionUtils.copy(job, key, tem.getFirst())); tem.setSecond(ReflectionUtils.copy(job, value, tem.getSecond())); buffer.add(tem);
private ObjectPair<ExprType,ColumnInfo> analyzeExpr(ASTNode expr) { ColumnInfo cInfo = null; if ( forHavingClause ) { try { cInfo = parentQueryRR.getExpression(expr); if ( cInfo != null) { return ObjectPair.create(ExprType.REFERS_PARENT, cInfo); } } catch(SemanticException se) { } } if ( expr.getType() == HiveParser.DOT) { ASTNode dot = firstDot(expr); cInfo = resolveDot(dot); if ( cInfo != null ) { return ObjectPair.create(ExprType.REFERS_PARENT, cInfo); } return ObjectPair.create(ExprType.REFERS_SUBQUERY, null); } else if ( expr.getType() == HiveParser.TOK_TABLE_OR_COL ) { return ObjectPair.create(ExprType.REFERS_SUBQUERY, null); } else { ExprType exprType = ExprType.REFERS_NONE; int cnt = expr.getChildCount(); for(int i=0; i < cnt; i++) { ASTNode child = (ASTNode) expr.getChild(i); exprType = exprType.combine(analyzeExpr(child).getFirst()); } return ObjectPair.create(exprType, null); } }
@Override public boolean equals(Object that) { if (that == null) { return false; } if (that instanceof ObjectPair) { return this.equals((ObjectPair<F, S>)that); } return false; }
/** * Creates a pair. Constructor doesn't infer template args but * the method does, so the code becomes less ugly. */ public static <T1, T2> ObjectPair<T1, T2> create(T1 f, T2 s) { return new ObjectPair<T1, T2>(f, s); }
GenMapRedUtils.setKeyAndValueDesc(reduceWork, rsOp); context.leafOpToFollowingWorkInfo.put(rsOp, ObjectPair.create(edgeProp, reduceWork)); LOG.debug("Removing " + parent + " as parent from " + root); root.removeParent(parent); ObjectPair<SparkEdgeProperty, ReduceWork> childWorkInfo = context. leafOpToFollowingWorkInfo.get(operator); SparkEdgeProperty edgeProp = childWorkInfo.getFirst(); ReduceWork childWork = childWorkInfo.getSecond();
aliasToViewInfo.put(alias, new ObjectPair<String, ReadEntity>(fullViewName, viewInput)); String aliasId = getAliasId(alias, qb); if (aliasId != null) { ReadEntity newParentInput = null; if (wasView) { viewsExpanded.add(aliasToViewInfo.get(alias).getFirst()); newParentInput = aliasToViewInfo.get(alias).getSecond(); } else if (wasCTE) { ctesExpanded.add(sqAliasToCTEName.get(alias));
private boolean next(Integer current) throws IOException, HiveException { if (keyFields == null) { byte tag = tagForAlias(alias); // joinKeys/joinKeysOI are initialized after making merge queue, so setup lazily at runtime keyFields = joinKeys[tag]; keyFieldOIs = joinKeysObjectInspectors[tag]; } InspectableObject nextRow = segments[current].getNextRow(); while (nextRow != null) { sinkOp.reset(); if (keys[current] == null) { keys[current] = new ObjectPair<List<Object>, InspectableObject>(); } // Pass the row though the operator tree. It is guaranteed that not more than 1 row can // be produced from a input row. forwardOp.process(nextRow.o, 0); nextRow = sinkOp.getResult(); // It is possible that the row got absorbed in the operator tree. if (nextRow.o != null) { // todo this should be changed to be evaluated lazily, especially for single segment case keys[current].setFirst(JoinUtil.computeKeys(nextRow.o, keyFields, keyFieldOIs)); keys[current].setSecond(nextRow); return true; } nextRow = segments[current].getNextRow(); } keys[current] = null; return false; } }
public final InspectableObject getNextRow() throws IOException { if (currentMinSegment != null) { adjustPriorityQueue(currentMinSegment); } Integer current = top(); if (current == null) { if (LOG.isInfoEnabled()) { LOG.info("MergeQueue forwarded " + counter + " rows"); } return null; } counter++; return keys[currentMinSegment = current].getSecond(); }
static synchronized public ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> prepareExpr( ExprNodeGenericFuncDesc expr, List<String> partColumnNames, List<PrimitiveTypeInfo> partColumnTypeInfos) throws HiveException { // Create the row object List<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>(); for (int i = 0; i < partColumnNames.size(); i++) { partObjectInspectors.add(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( partColumnTypeInfos.get(i))); } StructObjectInspector objectInspector = ObjectInspectorFactory .getStandardStructObjectInspector(partColumnNames, partObjectInspectors); ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(expr); ObjectInspector evaluateResultOI = evaluator.initialize(objectInspector); return ObjectPair.create((PrimitiveObjectInspector)evaluateResultOI, evaluator); }
public HiveKVResultCache() { writeBuffer = new ObjectPair[IN_MEMORY_NUM_ROWS]; readBuffer = new ObjectPair[IN_MEMORY_NUM_ROWS]; for (int i = 0; i < IN_MEMORY_NUM_ROWS; i++) { writeBuffer[i] = new ObjectPair<HiveKey, BytesWritable>(); readBuffer[i] = new ObjectPair<HiveKey, BytesWritable>(); } }
GenMapRedUtils.setKeyAndValueDesc(reduceWork, rsOp); context.leafOpToFollowingWorkInfo.put(rsOp, ObjectPair.create(edgeProp, reduceWork)); LOG.debug("Removing " + parent + " as parent from " + root); root.removeParent(parent); ObjectPair<SparkEdgeProperty, ReduceWork> childWorkInfo = context. leafOpToFollowingWorkInfo.get(operator); SparkEdgeProperty edgeProp = childWorkInfo.getFirst(); ReduceWork childWork = childWorkInfo.getSecond();
aliasToViewInfo.put(alias, new ObjectPair<String, ReadEntity>(fullViewName, viewInput)); String aliasId = getAliasId(alias, qb); if (aliasId != null) { ReadEntity newParentInput = null; if (wasView) { viewsExpanded.add(aliasToViewInfo.get(alias).getFirst()); newParentInput = aliasToViewInfo.get(alias).getSecond(); } else if (wasCTE) { ctesExpanded.add(sqAliasToCTEName.get(alias));
private boolean next(Integer current) throws IOException, HiveException { if (keyFields == null) { byte tag = tagForAlias(alias); // joinKeys/joinKeysOI are initialized after making merge queue, so setup lazily at runtime keyFields = joinKeys[tag]; keyFieldOIs = joinKeysObjectInspectors[tag]; } InspectableObject nextRow = segments[current].getNextRow(); while (nextRow != null) { sinkOp.reset(); if (keys[current] == null) { keys[current] = new ObjectPair<List<Object>, InspectableObject>(); } // Pass the row though the operator tree. It is guaranteed that not more than 1 row can // be produced from a input row. forwardOp.process(nextRow.o, 0); nextRow = sinkOp.getResult(); // It is possible that the row got absorbed in the operator tree. if (nextRow.o != null) { // todo this should be changed to be evaluated lazily, especially for single segment case keys[current].setFirst(JoinUtil.computeKeys(nextRow.o, keyFields, keyFieldOIs)); keys[current].setSecond(nextRow); return true; } nextRow = segments[current].getNextRow(); } keys[current] = null; return false; } }
public final InspectableObject getNextRow() throws IOException { if (currentMinSegment != null) { adjustPriorityQueue(currentMinSegment); } Integer current = top(); if (current == null) { if (isLogInfoEnabled) { LOG.info("MergeQueue forwarded " + counter + " rows"); } return null; } counter++; return keys[currentMinSegment = current].getSecond(); }