StringBuilder nullOrderString) throws SemanticException { List<PTFExpressionDef> partColList = tabDef.getPartition().getExpressions();
StringBuilder nullOrderString) throws SemanticException { List<PTFExpressionDef> partColList = tabDef.getPartition().getExpressions();
List<String> partitionKeys = new ArrayList<>(); PartitionDef partition = ptfOp.getConf().getFuncDef().getPartition(); if (partition != null && partition.getExpressions() != null) {
protected void setupKeysWrapper(ObjectInspector inputOI) throws HiveException { PartitionDef pDef = conf.getStartOfChain().getPartition(); List<PTFExpressionDef> exprs = pDef.getExpressions(); int numExprs = exprs.size(); ExprNodeEvaluator[] keyFields = new ExprNodeEvaluator[numExprs]; ObjectInspector[] keyOIs = new ObjectInspector[numExprs]; ObjectInspector[] currentKeyOIs = new ObjectInspector[numExprs]; for(int i=0; i<numExprs; i++) { PTFExpressionDef exprDef = exprs.get(i); /* * Why cannot we just use the ExprNodeEvaluator on the column? * - because on the reduce-side it is initialized based on the rowOI of the HiveTable * and not the OI of the parent of this Operator on the reduce-side */ keyFields[i] = ExprNodeEvaluatorFactory.get(exprDef.getExprNode()); keyOIs[i] = keyFields[i].initialize(inputOI); currentKeyOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(keyOIs[i], ObjectInspectorCopyOption.WRITABLE); } keyWrapperFactory = new KeyWrapperFactory(keyFields, keyOIs, currentKeyOIs); newKeys = keyWrapperFactory.getKeyWrapper(); }
protected void setupKeysWrapper(ObjectInspector inputOI) throws HiveException { PartitionDef pDef = conf.getStartOfChain().getPartition(); List<PTFExpressionDef> exprs = pDef.getExpressions(); int numExprs = exprs.size(); ExprNodeEvaluator[] keyFields = new ExprNodeEvaluator[numExprs]; ObjectInspector[] keyOIs = new ObjectInspector[numExprs]; ObjectInspector[] currentKeyOIs = new ObjectInspector[numExprs]; for(int i=0; i<numExprs; i++) { PTFExpressionDef exprDef = exprs.get(i); /* * Why cannot we just use the ExprNodeEvaluator on the column? * - because on the reduce-side it is initialized based on the rowOI of the HiveTable * and not the OI of the parent of this Operator on the reduce-side */ keyFields[i] = ExprNodeEvaluatorFactory.get(exprDef.getExprNode()); keyOIs[i] = keyFields[i].initialize(inputOI); currentKeyOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(keyOIs[i], ObjectInspectorCopyOption.WRITABLE); } keyWrapperFactory = new KeyWrapperFactory(keyFields, keyOIs, currentKeyOIs); newKeys = keyWrapperFactory.getKeyWrapper(); }
List<PTFExpressionDef> partitionExpressions = funcDef.getPartition().getExpressions(); final int partitionKeyCount = partitionExpressions.size(); ExprNodeDesc[] partitionExprNodeDescs = getPartitionExprNodeDescs(partitionExpressions);
StringBuilder orderString) throws SemanticException { List<PTFExpressionDef> partColList = tabDef.getPartition().getExpressions();
protected void setupKeysWrapper(ObjectInspector inputOI) throws HiveException { PartitionDef pDef = conf.getStartOfChain().getPartition(); List<PTFExpressionDef> exprs = pDef.getExpressions(); int numExprs = exprs.size(); ExprNodeEvaluator[] keyFields = new ExprNodeEvaluator[numExprs]; ObjectInspector[] keyOIs = new ObjectInspector[numExprs]; ObjectInspector[] currentKeyOIs = new ObjectInspector[numExprs]; for(int i=0; i<numExprs; i++) { PTFExpressionDef exprDef = exprs.get(i); /* * Why cannot we just use the ExprNodeEvaluator on the column? * - because on the reduce-side it is initialized based on the rowOI of the HiveTable * and not the OI of the parent of this Operator on the reduce-side */ keyFields[i] = ExprNodeEvaluatorFactory.get(exprDef.getExprNode()); keyOIs[i] = keyFields[i].initialize(inputOI); currentKeyOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(keyOIs[i], ObjectInspectorCopyOption.WRITABLE); } keyWrapperFactory = new KeyWrapperFactory(keyFields, keyOIs, currentKeyOIs); newKeys = keyWrapperFactory.getKeyWrapper(); }