public static void alterOutputOIForStreaming(PTFDesc ptfDesc) { PartitionedTableFunctionDef tDef = ptfDesc.getFuncDef(); TableFunctionEvaluator tEval = tDef.getTFunction(); if ( tEval.canIterateOutput() ) { tDef.getOutputShape().setOI(tEval.getOutputOI()); } }
public static void alterOutputOIForStreaming(PTFDesc ptfDesc) { PartitionedTableFunctionDef tDef = ptfDesc.getFuncDef(); TableFunctionEvaluator tEval = tDef.getTFunction(); if ( tEval.canIterateOutput() ) { tDef.getOutputShape().setOI(tEval.getOutputOI()); } }
public PTFPartition execute(PTFPartition iPart) throws HiveException { if ( ptfDesc.isMapSide() ) { return transformRawInput(iPart); } PTFPartitionIterator<Object> pItr = iPart.iterator(); PTFOperator.connectLeadLagFunctionsToPartition(ptfDesc.getLlInfo(), pItr); if ( outputPartition == null ) { outputPartition = PTFPartition.create(ptfDesc.getCfg(), tableDef.getOutputShape().getSerde(), OI, tableDef.getOutputShape().getOI()); } else { outputPartition.reset(); } execute(pItr, outputPartition); return outputPartition; }
public PTFPartition execute(PTFPartition iPart) throws HiveException { if ( ptfDesc.isMapSide() ) { return transformRawInput(iPart); } PTFPartitionIterator<Object> pItr = iPart.iterator(); PTFOperator.connectLeadLagFunctionsToPartition(ptfDesc.getLlInfo(), pItr); if ( outputPartition == null ) { outputPartition = PTFPartition.create(ptfDesc.getCfg(), tableDef.getOutputShape().getSerde(), OI, tableDef.getOutputShape().getOI()); } else { outputPartition.reset(); } execute(pItr, outputPartition); return outputPartition; }
PTFDesc ptfDesc = translator.translate(wSpec, semanticAnalyzer, hiveConf, rr, unparseTranslator); RowResolver ptfOpRR = ptfDesc.getFuncDef().getOutputShape().getRr();
PTFDesc ptfDesc = translator.translate(wSpec, semanticAnalyzer, hiveConf, rr, unparseTranslator); RowResolver ptfOpRR = ptfDesc.getFuncDef().getOutputShape().getRr();
RowResolver ptfOpRR = ptfDesc.getFuncDef().getOutputShape().getRr(); input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc, new RowSchema(ptfOpRR.getColumnInfos()),
RowResolver ptfOpRR = ptfDesc.getFuncDef().getOutputShape().getRr(); input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc, new RowSchema(ptfOpRR.getColumnInfos()),
PTFTranslator translator = new PTFTranslator(); PTFDesc ptfDesc = translator.translate(wSpec, this, conf, rr, unparseTranslator); RowResolver ptfOpRR = ptfDesc.getFuncDef().getOutputShape().getRr(); input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc, new RowSchema(ptfOpRR.getColumnInfos()), input), ptfOpRR);
PTFTranslator translator = new PTFTranslator(); PTFDesc ptfDesc = translator.translate(wSpec, this, conf, rr, unparseTranslator); RowResolver ptfOpRR = ptfDesc.getFuncDef().getOutputShape().getRr(); input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc, new RowSchema(ptfOpRR.getColumnInfos()), input), ptfOpRR);
@Override protected void initializeOp(Configuration jobConf) throws HiveException { super.initializeOp(jobConf); hiveConf = jobConf; isMapOperator = conf.isMapSide(); currentKeys = null; reconstructQueryDef(hiveConf); if (isMapOperator) { PartitionedTableFunctionDef tDef = conf.getStartOfChain(); outputObjInspector = tDef.getRawInputShape().getOI(); } else { outputObjInspector = conf.getFuncDef().getOutputShape().getOI(); } setupKeysWrapper(inputObjInspectors[0]); ptfInvocation = setupChain(); ptfInvocation.initializeStreaming(jobConf, isMapOperator); firstMapRow = true; }
@Override protected void initializeOp(Configuration jobConf) throws HiveException { super.initializeOp(jobConf); hiveConf = jobConf; isMapOperator = conf.isMapSide(); currentKeys = null; reconstructQueryDef(hiveConf); if (isMapOperator) { PartitionedTableFunctionDef tDef = conf.getStartOfChain(); outputObjInspector = tDef.getRawInputShape().getOI(); } else { outputObjInspector = conf.getFuncDef().getOutputShape().getOI(); } setupKeysWrapper(inputObjInspectors[0]); ptfInvocation = setupChain(); ptfInvocation.initializeStreaming(jobConf, isMapOperator); firstMapRow = true; }
private List<ColumnInfo> prunedColumnsList(List<FieldNode> prunedCols, RowSchema oldRS, PartitionedTableFunctionDef pDef) throws SemanticException { pDef.getOutputShape().setRr(null); pDef.getOutputShape().setColumnNames(null); if (pDef instanceof WindowTableFunctionDef) { WindowTableFunctionDef tDef = (WindowTableFunctionDef) pDef; pDef.getOutputShape().setRr(buildPrunedRR(prunedCols, oldRS));
private List<ColumnInfo> prunedColumnsList(List<FieldNode> prunedCols, RowSchema oldRS, PartitionedTableFunctionDef pDef) throws SemanticException { pDef.getOutputShape().setRr(null); pDef.getOutputShape().setColumnNames(null); if (pDef instanceof WindowTableFunctionDef) { WindowTableFunctionDef tDef = (WindowTableFunctionDef) pDef; pDef.getOutputShape().setRr(buildPrunedRR(prunedCols, oldRS));
initialize(def.getOutputShape(), tEval.getOutputOI());
initialize(def.getOutputShape(), tEval.getOutputOI());
public static void alterOutputOIForStreaming(PTFDesc ptfDesc) { PartitionedTableFunctionDef tDef = ptfDesc.getFuncDef(); TableFunctionEvaluator tEval = tDef.getTFunction(); if ( tEval.canIterateOutput() ) { tDef.getOutputShape().setOI(tEval.getOutputOI()); } }
public PTFPartition execute(PTFPartition iPart) throws HiveException { if ( ptfDesc.isMapSide() ) { return transformRawInput(iPart); } PTFPartitionIterator<Object> pItr = iPart.iterator(); PTFOperator.connectLeadLagFunctionsToPartition(ptfDesc, pItr); if ( outputPartition == null ) { outputPartition = PTFPartition.create(ptfDesc.getCfg(), tableDef.getOutputShape().getSerde(), OI, tableDef.getOutputShape().getOI()); } else { outputPartition.reset(); } execute(pItr, outputPartition); return outputPartition; }
Operator genWindowingPlan(WindowingSpec wSpec, Operator input) throws SemanticException { wSpec.validateAndMakeEffective(); WindowingComponentizer groups = new WindowingComponentizer(wSpec); RowResolver rr = opParseCtx.get(input).getRowResolver(); while(groups.hasNext() ) { wSpec = groups.next(conf, this, unparseTranslator, rr); input = genReduceSinkPlanForWindowing(wSpec, rr, input); rr = opParseCtx.get(input).getRowResolver(); PTFTranslator translator = new PTFTranslator(); PTFDesc ptfDesc = translator.translate(wSpec, this, conf, rr, unparseTranslator); RowResolver ptfOpRR = ptfDesc.getFuncDef().getOutputShape().getRr(); input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc, new RowSchema(ptfOpRR.getColumnInfos()), input), ptfOpRR); input = genSelectAllDesc(input); rr = ptfOpRR; } return input; }
@Override protected Collection<Future<?>> initializeOp(Configuration jobConf) throws HiveException { Collection<Future<?>> result = super.initializeOp(jobConf); hiveConf = jobConf; isMapOperator = conf.isMapSide(); reconstructQueryDef(hiveConf); if (isMapOperator) { PartitionedTableFunctionDef tDef = conf.getStartOfChain(); outputObjInspector = tDef.getRawInputShape().getOI(); } else { outputObjInspector = conf.getFuncDef().getOutputShape().getOI(); } setupKeysWrapper(inputObjInspectors[0]); ptfInvocation = setupChain(); ptfInvocation.initializeStreaming(jobConf, isMapOperator); firstMapRow = true; return result; }