public DummyOperator() { super(new CompilationOpContext()); }
public Operator(CompilationOpContext cContext) { this(String.valueOf(cContext.nextOperatorId()), cContext); }
/** * @return col stats */ public Map<String, ColumnStatsList> getColStatsCache() { return ctx.getOpContext().getColStatsCache(); }
public void resetOpContext() { opContext = new CompilationOpContext(); sequencer = new AtomicInteger(); }
public Operator(CompilationOpContext cContext) { this(String.valueOf(cContext.nextOperatorId()), cContext); }
/** * @param partList * @return col stats */ public ColumnStatsList getColStatsCached(PrunedPartitionList partList) { return ctx.getOpContext().getColStatsCache().get(partList.getKey()); }
public void resetOpContext(){ opContext = new CompilationOpContext(); sequencer = new AtomicInteger(); }
public void setCompilationOpContext(CompilationOpContext ctx) { if (cContext == ctx) { return; } cContext = ctx; id = String.valueOf(ctx.nextOperatorId()); initOperatorId(); }
prunedPartitions.clear(); if (ctx != null) { ctx.getOpContext().getColStatsCache().clear();
this.viewsTokenRewriteStreams = new HashMap<>(); this.rewrittenStatementContexts = new HashSet<>(); this.opContext = new CompilationOpContext();
private Operator<TableScanDesc> getTsOp(int i) { Table tblMetadata = new Table("db", "table"); TableScanDesc desc = new TableScanDesc("alias_" + cCtx.nextOperatorId(), tblMetadata); List<ExprNodeDesc> as = Lists.newArrayList(new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, Integer.valueOf(i)), new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "c1", "aa", false)); ExprNodeGenericFuncDesc f1 = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, udf, as); desc.setFilterExpr(f1); Operator<TableScanDesc> ts = OperatorFactory.get(cCtx, desc); return ts; }
/** * Get optimized logical plan for the given QB tree in the semAnalyzer. * * @return * @throws SemanticException */ RelNode logicalPlan() throws SemanticException { RelNode optimizedOptiqPlan = null; CalcitePlannerAction calcitePlannerAction = null; if (this.columnAccessInfo == null) { this.columnAccessInfo = new ColumnAccessInfo(); } calcitePlannerAction = new CalcitePlannerAction( prunedPartitions, ctx.getOpContext().getColStatsCache(), this.columnAccessInfo); try { optimizedOptiqPlan = Frameworks.withPlanner(calcitePlannerAction, Frameworks .newConfigBuilder().typeSystem(new HiveTypeSystemImpl()).build()); } catch (Exception e) { rethrowCalciteException(e); throw new AssertionError("rethrowCalciteException didn't throw for " + e.getMessage()); } return optimizedOptiqPlan; }
/** * Create a Context with a given executionId. ExecutionId, together with * user name and conf, will determine the temporary directory locations. */ private Context(Configuration conf, String executionId) { this.conf = conf; this.executionId = executionId; this.rewrittenStatementContexts = new HashSet<>(); // local & non-local tmp location is configurable. however it is the same across // all external file systems nonLocalScratchPath = new Path(SessionState.getHDFSSessionPath(conf), executionId); localScratchDir = new Path(SessionState.getLocalSessionPath(conf), executionId).toUri().getPath(); scratchDirPermission = HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIRPERMISSION); stagingDir = HiveConf.getVar(conf, HiveConf.ConfVars.STAGINGDIR); opContext = new CompilationOpContext(); viewsTokenRewriteStreams = new HashMap<>(); }
ctx.getOpContext().getColStatsCache(), this.columnAccessInfo);
/** * Create a Context with a given executionId. ExecutionId, together with * user name and conf, will determine the temporary directory locations. */ public Context(Configuration conf, String executionId) { this.conf = conf; this.executionId = executionId; // local & non-local tmp location is configurable. however it is the same across // all external file systems nonLocalScratchPath = new Path(SessionState.getHDFSSessionPath(conf), executionId); localScratchDir = new Path(SessionState.getLocalSessionPath(conf), executionId).toUri().getPath(); scratchDirPermission = HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIRPERMISSION); stagingDir = HiveConf.getVar(conf, HiveConf.ConfVars.STAGINGDIR); opContext = new CompilationOpContext(); viewsTokenRewriteStreams = new HashMap<>(); }
@Override protected void setUp() { mr = PlanUtils.getMapRedWork(); ctx = new CompilationOpContext(); }
public static FakeVectorDataSourceOperator addFakeVectorDataSourceParent( Iterable<VectorizedRowBatch> source, Operator<? extends OperatorDesc> op) { FakeVectorDataSourceOperator parent = new FakeVectorDataSourceOperator( new CompilationOpContext(), source); List<Operator<? extends OperatorDesc>> listParents = new ArrayList<Operator<? extends OperatorDesc>>(1); listParents.add(parent); op.setParentOperators(listParents); List<Operator<? extends OperatorDesc>> listChildren = new ArrayList<Operator<? extends OperatorDesc>>(1); listChildren.add(op); parent.setChildOperators(listChildren); return parent; }
MapredLocalTask localTask = new MapredLocalTask(localWork, job, false); HashTableSinkOperator sink = new TemporaryHashSinkOperator(new CompilationOpContext(), desc); sink.setParentOperators(new ArrayList<Operator<? extends OperatorDesc>>(directWorks));
MapredLocalTask localTask = new MapredLocalTask(localWork, job, false); HashTableSinkOperator sink = new TemporaryHashSinkOperator(new CompilationOpContext(), desc); sink.setParentOperators(new ArrayList<Operator<? extends OperatorDesc>>(directWorks));
public void testScriptOperatorBlacklistedEnvVarsProcessing() { ScriptOperator scriptOperator = new ScriptOperator(new CompilationOpContext()); Configuration hconf = new JobConf(ScriptOperator.class); Map<String, String> env = new HashMap<String, String>(); HiveConf.setVar(hconf, HiveConf.ConfVars.HIVESCRIPT_ENV_BLACKLIST, "foobar"); hconf.set("foobar", "foobar"); hconf.set("barfoo", "barfoo"); scriptOperator.addJobConfToEnvironment(hconf, env); Assert.assertFalse(env.containsKey("foobar")); Assert.assertTrue(env.containsKey("barfoo")); }