private PartitionDef translate(ShapeDetails inpShape, PartitionSpec spec) throws SemanticException { if (spec == null || spec.getExpressions() == null || spec.getExpressions().size() == 0) { return null; } PartitionDef pDef = new PartitionDef(); for (PartitionExpression pExpr : spec.getExpressions()) { PTFExpressionDef expDef = translate(inpShape, pExpr); pDef.addExpression(expDef); } return pDef; }
@Explain(displayName = "partition by", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getPartitionExplain() { if (partition == null || partition.getExpressions() == null) { return null; } StringBuilder builder = new StringBuilder(); for (PTFExpressionDef expression : partition.getExpressions()) { if (builder.length() > 0) { builder.append(", "); } builder.append(expression.getExprNode().getExprString()); } return builder.toString(); }
@Explain(displayName = "partition by", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getPartitionExplain() { if (partition == null || partition.getExpressions() == null) { return null; } StringBuilder builder = new StringBuilder(); for (PTFExpressionDef expression : partition.getExpressions()) { if (builder.length() > 0) { builder.append(", "); } builder.append(expression.getExprNode().getExprString()); } return builder.toString(); }
private PartitionDef translate(ShapeDetails inpShape, PartitionSpec spec) throws SemanticException { if (spec == null || spec.getExpressions() == null || spec.getExpressions().size() == 0) { return null; } PartitionDef pDef = new PartitionDef(); for (PartitionExpression pExpr : spec.getExpressions()) { PTFExpressionDef expDef = translate(inpShape, pExpr); pDef.addExpression(expDef); } return pDef; }
public OrderDef(PartitionDef pDef) { for(PTFExpressionDef eDef : pDef.getExpressions()) { addExpression(new OrderExpressionDef(eDef)); } }
private PartitionDef translate(ShapeDetails inpShape, PartitionSpec spec) throws SemanticException { if (spec == null || spec.getExpressions() == null || spec.getExpressions().size() == 0) { return null; } PartitionDef pDef = new PartitionDef(); for (PartitionExpression pExpr : spec.getExpressions()) { PTFExpressionDef expDef = translate(inpShape, pExpr); pDef.addExpression(expDef); } return pDef; }
public OrderDef(PartitionDef pDef) { for(PTFExpressionDef eDef : pDef.getExpressions()) { addExpression(new OrderExpressionDef(eDef)); } }
StringBuilder nullOrderString) throws SemanticException { List<PTFExpressionDef> partColList = tabDef.getPartition().getExpressions();
if (partition != null && partition.getExpressions() != null) { for (PTFExpressionDef expression : partition.getExpressions()) { ExprNodeDesc exprNode = expression.getExprNode(); if (!(exprNode instanceof ExprNodeColumnDesc)) {
StringBuilder nullOrderString) throws SemanticException { List<PTFExpressionDef> partColList = tabDef.getPartition().getExpressions();
protected void setupKeysWrapper(ObjectInspector inputOI) throws HiveException { PartitionDef pDef = conf.getStartOfChain().getPartition(); List<PTFExpressionDef> exprs = pDef.getExpressions(); int numExprs = exprs.size(); ExprNodeEvaluator[] keyFields = new ExprNodeEvaluator[numExprs]; ObjectInspector[] keyOIs = new ObjectInspector[numExprs]; ObjectInspector[] currentKeyOIs = new ObjectInspector[numExprs]; for(int i=0; i<numExprs; i++) { PTFExpressionDef exprDef = exprs.get(i); /* * Why cannot we just use the ExprNodeEvaluator on the column? * - because on the reduce-side it is initialized based on the rowOI of the HiveTable * and not the OI of the parent of this Operator on the reduce-side */ keyFields[i] = ExprNodeEvaluatorFactory.get(exprDef.getExprNode()); keyOIs[i] = keyFields[i].initialize(inputOI); currentKeyOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(keyOIs[i], ObjectInspectorCopyOption.WRITABLE); } keyWrapperFactory = new KeyWrapperFactory(keyFields, keyOIs, currentKeyOIs); newKeys = keyWrapperFactory.getKeyWrapper(); }
protected void setupKeysWrapper(ObjectInspector inputOI) throws HiveException { PartitionDef pDef = conf.getStartOfChain().getPartition(); List<PTFExpressionDef> exprs = pDef.getExpressions(); int numExprs = exprs.size(); ExprNodeEvaluator[] keyFields = new ExprNodeEvaluator[numExprs]; ObjectInspector[] keyOIs = new ObjectInspector[numExprs]; ObjectInspector[] currentKeyOIs = new ObjectInspector[numExprs]; for(int i=0; i<numExprs; i++) { PTFExpressionDef exprDef = exprs.get(i); /* * Why cannot we just use the ExprNodeEvaluator on the column? * - because on the reduce-side it is initialized based on the rowOI of the HiveTable * and not the OI of the parent of this Operator on the reduce-side */ keyFields[i] = ExprNodeEvaluatorFactory.get(exprDef.getExprNode()); keyOIs[i] = keyFields[i].initialize(inputOI); currentKeyOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(keyOIs[i], ObjectInspectorCopyOption.WRITABLE); } keyWrapperFactory = new KeyWrapperFactory(keyFields, keyOIs, currentKeyOIs); newKeys = keyWrapperFactory.getKeyWrapper(); }
List<PTFExpressionDef> partitionExpressions = funcDef.getPartition().getExpressions(); final int partitionKeyCount = partitionExpressions.size(); ExprNodeDesc[] partitionExprNodeDescs = getPartitionExprNodeDescs(partitionExpressions);
for (PTFExpressionDef col : tDef.getPartition().getExpressions()) { ExprNodeDesc exprNode = col.getExprNode(); prunedCols = mergeFieldNodesWithDesc(prunedCols, exprNode);
for (PTFExpressionDef col : tDef.getPartition().getExpressions()) { ExprNodeDesc exprNode = col.getExprNode(); prunedCols = mergeFieldNodesWithDesc(prunedCols, exprNode);
@Explain(displayName = "partition by", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getPartitionExplain() { if (partition == null || partition.getExpressions() == null) { return null; } StringBuilder builder = new StringBuilder(); for (PTFExpressionDef expression : partition.getExpressions()) { if (builder.length() > 0) { builder.append(", "); } builder.append(expression.getExprNode().getExprString()); } return builder.toString(); }
public OrderDef(PartitionDef pDef) { for(PTFExpressionDef eDef : pDef.getExpressions()) { addExpression(new OrderExpressionDef(eDef)); } }
StringBuilder orderString) throws SemanticException { List<PTFExpressionDef> partColList = tabDef.getPartition().getExpressions();
protected void setupKeysWrapper(ObjectInspector inputOI) throws HiveException { PartitionDef pDef = conf.getStartOfChain().getPartition(); List<PTFExpressionDef> exprs = pDef.getExpressions(); int numExprs = exprs.size(); ExprNodeEvaluator[] keyFields = new ExprNodeEvaluator[numExprs]; ObjectInspector[] keyOIs = new ObjectInspector[numExprs]; ObjectInspector[] currentKeyOIs = new ObjectInspector[numExprs]; for(int i=0; i<numExprs; i++) { PTFExpressionDef exprDef = exprs.get(i); /* * Why cannot we just use the ExprNodeEvaluator on the column? * - because on the reduce-side it is initialized based on the rowOI of the HiveTable * and not the OI of the parent of this Operator on the reduce-side */ keyFields[i] = ExprNodeEvaluatorFactory.get(exprDef.getExprNode()); keyOIs[i] = keyFields[i].initialize(inputOI); currentKeyOIs[i] = ObjectInspectorUtils.getStandardObjectInspector(keyOIs[i], ObjectInspectorCopyOption.WRITABLE); } keyWrapperFactory = new KeyWrapperFactory(keyFields, keyOIs, currentKeyOIs); newKeys = keyWrapperFactory.getKeyWrapper(); }
for (PTFExpressionDef col : tDef.getPartition().getExpressions()) { ExprNodeDesc exprNode = col.getExprNode(); Utilities.mergeUniqElems(prunedCols, exprNode.getCols());