@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, ctx, opContext); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, ctx, opContext); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, ctx, opContext); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, ctx, opContext); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); jc = new JobConf(conf); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, ColumnTruncateTask.class); jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, MergeFileTask.class); jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, ColumnTruncateTask.class); jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, PartialScanTask.class); jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, MergeFileTask.class); jobExecHelper = new HadoopJobExecHelper(job, this.console, this, this); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, ExecDriver.class); execContext = new ExecMapperContext(job); //we don't use the HadoopJobExecHooks for local tasks this.jobExecHelper = new HadoopJobExecHelper(job, console, this, null); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, ExecDriver.class); execContext = new ExecMapperContext(job); //we don't use the HadoopJobExecHooks for local tasks this.jobExecHelper = new HadoopJobExecHelper(job, console, this, null); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, ctx, opContext); work.initializeForFetch(opContext); try { JobConf job = new JobConf(conf); ftOp = new FetchOperator(work.getfWork(), job); } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); throw new RuntimeException(e); } }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, ctx, opContext); // Pick the formatter to use to display the results. Either the // normal human readable output or a json object. formatter = MetaDataFormatUtils.getFormatter(conf); INTERMEDIATE_ARCHIVED_DIR_SUFFIX = HiveConf.getVar(conf, ConfVars.METASTORE_INT_ARCHIVED); INTERMEDIATE_ORIGINAL_DIR_SUFFIX = HiveConf.getVar(conf, ConfVars.METASTORE_INT_ORIGINAL); INTERMEDIATE_EXTRACTED_DIR_SUFFIX = HiveConf.getVar(conf, ConfVars.METASTORE_INT_EXTRACTED); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, ctx, opContext); // Pick the formatter to use to display the results. Either the // normal human readable output or a json object. formatter = MetaDataFormatUtils.getFormatter(conf); INTERMEDIATE_ARCHIVED_DIR_SUFFIX = HiveConf.getVar(conf, ConfVars.METASTORE_INT_ARCHIVED); INTERMEDIATE_ORIGINAL_DIR_SUFFIX = HiveConf.getVar(conf, ConfVars.METASTORE_INT_ORIGINAL); INTERMEDIATE_EXTRACTED_DIR_SUFFIX = HiveConf.getVar(conf, ConfVars.METASTORE_INT_EXTRACTED); }
/** * Initialization when invoked from QL. */ @Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, ExecDriver.class); initializeFiles("tmpjars", getResource(conf, SessionState.ResourceType.JAR)); initializeFiles("tmpfiles", getResource(conf, SessionState.ResourceType.FILE)); initializeFiles("tmparchives", getResource(conf, SessionState.ResourceType.ARCHIVE)); conf.stripHiddenConfigurations(job); this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); }
@Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext ctx, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, ctx, opContext); if (work.getBasicStatsWork() != null) { BasicStatsTask task = new BasicStatsTask(conf, work.getBasicStatsWork()); task.followedColStats = work.hasColStats(); processors.add(0, task); } else if (work.isFooterScan()) { BasicStatsNoJobTask t = new BasicStatsNoJobTask(conf, work.getBasicStatsNoJobWork()); processors.add(0, t); } if (work.hasColStats()) { processors.add(new ColStatsProcessor(work.getColStats(), conf)); } for (IStatsProcessor p : processors) { p.initialize(opContext); } }
/** * Initialization when invoked from QL. */ @Override public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { super.initialize(queryState, queryPlan, driverContext, opContext); job = new JobConf(conf, ExecDriver.class); initializeFiles("tmpjars", getResource(conf, SessionState.ResourceType.JAR)); initializeFiles("tmpfiles", getResource(conf, SessionState.ResourceType.FILE)); initializeFiles("tmparchives", getResource(conf, SessionState.ResourceType.ARCHIVE)); conf.stripHiddenConfigurations(job); this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); }