public CompilationOpContext getOpContext() { return ctx.getOpContext(); }
public CompilationOpContext getOpContext() { return ctx.getOpContext(); }
/** * @return col stats */ public Map<String, ColumnStatsList> getColStatsCache() { return ctx.getOpContext().getColStatsCache(); }
/** * @param partList * @return col stats */ public ColumnStatsList getColStatsCached(PrunedPartitionList partList) { return ctx.getOpContext().getColStatsCache().get(partList.getKey()); }
prunedPartitions.clear(); if (ctx != null) { ctx.getOpContext().getColStatsCache().clear();
@Override public void resetFetch() throws IOException { if (lDrvState.driverState == DriverState.DESTROYED || lDrvState.driverState == DriverState.CLOSED) { throw new IOException("FAILED: driver has been cancelled, closed or destroyed."); } if (isFetchingTable()) { try { fetchTask.clearFetch(); } catch (Exception e) { throw new IOException("Error closing the current fetch task", e); } // FetchTask should not depend on the plan. fetchTask.initialize(queryState, null, null, ctx.getOpContext()); } else { ctx.resetStream(); resStream = null; } }
public void resetFetch() throws IOException { if (lDrvState.driverState == DriverState.DESTROYED || lDrvState.driverState == DriverState.CLOSED) { throw new IOException("FAILED: driver has been cancelled, closed or destroyed."); } if (isFetchingTable()) { try { fetchTask.clearFetch(); } catch (Exception e) { throw new IOException("Error closing the current fetch task", e); } // FetchTask should not depend on the plan. fetchTask.initialize(queryState, null, null, ctx.getOpContext()); } else { ctx.resetStream(); resStream = null; } }
/** * Returns EXPLAIN EXTENDED output for a semantically * analyzed query. * * @param sem semantic analyzer for analyzed query * @param plan query plan * @param astTree AST tree dump * @throws java.io.IOException */ private String getExplainOutput(BaseSemanticAnalyzer sem, QueryPlan plan, ASTNode astTree) throws IOException { String ret = null; ExplainTask task = new ExplainTask(); task.initialize(queryState, plan, null, ctx.getOpContext()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(baos); try { List<Task<?>> rootTasks = sem.getAllRootTasks(); task.getJSONPlan(ps, rootTasks, sem.getFetchTask(), false, true, true); ret = baos.toString(); } catch (Exception e) { LOG.warn("Exception generating explain output: " + e, e); } return ret; }
private void useFetchFromCache(CacheEntry cacheEntry) { // Change query FetchTask to use new location specified in results cache. FetchTask fetchTaskFromCache = (FetchTask) TaskFactory.get(cacheEntry.getFetchWork()); fetchTaskFromCache.initialize(queryState, plan, null, ctx.getOpContext()); plan.setFetchTask(fetchTaskFromCache); cacheUsage = new CacheUsage(CacheUsage.CacheStatus.QUERY_USING_CACHE, cacheEntry); }
String ret = null; ExplainTask task = new ExplainTask(); task.initialize(queryState, plan, null, ctx.getOpContext()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(baos);
/** * Get optimized logical plan for the given QB tree in the semAnalyzer. * * @return * @throws SemanticException */ RelNode logicalPlan() throws SemanticException { RelNode optimizedOptiqPlan = null; CalcitePlannerAction calcitePlannerAction = null; if (this.columnAccessInfo == null) { this.columnAccessInfo = new ColumnAccessInfo(); } calcitePlannerAction = new CalcitePlannerAction( prunedPartitions, ctx.getOpContext().getColStatsCache(), this.columnAccessInfo); try { optimizedOptiqPlan = Frameworks.withPlanner(calcitePlannerAction, Frameworks .newConfigBuilder().typeSystem(new HiveTypeSystemImpl()).build()); } catch (Exception e) { rethrowCalciteException(e); throw new AssertionError("rethrowCalciteException didn't throw for " + e.getMessage()); } return optimizedOptiqPlan; }
ctx.setOpContext(origCtx.getOpContext()); ctx.setExplainConfig(origCtx.getExplainConfig());
console.printInfo("Launching Job " + cxt.getCurJobNo() + " out of " + jobs); tsk.initialize(queryState, plan, cxt, ctx.getOpContext()); TaskRunner tskRun = new TaskRunner(tsk);
console.printInfo("Launching Job " + cxt.getCurJobNo() + " out of " + jobs); tsk.initialize(queryState, plan, cxt, ctx.getOpContext()); TaskResult tskRes = new TaskResult(); TaskRunner tskRun = new TaskRunner(tsk, tskRes);
ctx.getOpContext().getColStatsCache(), this.columnAccessInfo);
fmd.setOutputPath(mergeFilesDesc.getOutputDir()); CompilationOpContext opContext = driverContext.getCtx().getOpContext(); Operator<? extends OperatorDesc> mergeOp = OperatorFactory.get(opContext, fmd);
fmd.setOutputPath(mergeFilesDesc.getOutputDir()); CompilationOpContext opContext = driverContext.getCtx().getOpContext(); Operator<? extends OperatorDesc> mergeOp = OperatorFactory.get(opContext, fmd);
plan.getFetchTask().initialize(queryState, plan, null, ctx.getOpContext());
if (fetchTask != null) { fetchTask.getWork().initializeForFetch(ctx.getOpContext());
if (fetchTask != null) { fetchTask.getWork().initializeForFetch(ctx.getOpContext());