public CliDriver() { SessionState ss = SessionState.get(); conf = (ss != null) ? ss.getConf() : new Configuration(); Logger LOG = LoggerFactory.getLogger("CliDriver"); if (LOG.isDebugEnabled()) { LOG.debug("CliDriver inited with classpath {}", System.getProperty("java.class.path")); } console = new LogHelper(LOG); }
public BaseSemanticAnalyzer(QueryState queryState, Hive db) throws SemanticException { try { this.queryState = queryState; this.conf = queryState.getConf(); this.db = db; rootTasks = new ArrayList<Task<? extends Serializable>>(); LOG = LoggerFactory.getLogger(this.getClass().getName()); console = new LogHelper(LOG); idToTableNameMap = new HashMap<String, String>(); inputs = new LinkedHashSet<ReadEntity>(); outputs = new LinkedHashSet<WriteEntity>(); } catch (Exception e) { throw new SemanticException(e); } }
public BaseSemanticAnalyzer(QueryState queryState, Hive db) throws SemanticException { try { this.queryState = queryState; this.conf = queryState.getConf(); this.db = db; rootTasks = new ArrayList<Task<?>>(); LOG = LoggerFactory.getLogger(this.getClass().getName()); console = new LogHelper(LOG); idToTableNameMap = new HashMap<String, String>(); inputs = new LinkedHashSet<ReadEntity>(); outputs = new LinkedHashSet<WriteEntity>(); txnManager = queryState.getTxnManager(); } catch (Exception e) { throw new SemanticException(e); } }
Configuration conf = (ss != null) ? ss.getConf() : new Configuration(); LogHelper console = new LogHelper(LOG);
console = new LogHelper(LOG); String conf_file_loc = ss.getConf().getVar( HiveConf.ConfVars.HIVEHISTORYFILELOC);
console = new LogHelper(LOG); String conf_file_loc = ss.getConf().getVar( HiveConf.ConfVars.HIVEHISTORYFILELOC);
public MapredLocalTask(MapredLocalWork plan, JobConf job, boolean isSilent) throws HiveException { setWork(plan); this.job = job; console = new LogHelper(LOG, isSilent); }
public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { this.queryPlan = queryPlan; setInitialized(); this.queryState = queryState; this.conf = queryState.getConf(); this.driverContext = driverContext; console = new LogHelper(LOG); } public void setQueryDisplay(QueryDisplay queryDisplay) {
/** * Constructor when invoked from QL. */ public ExecDriver() { super(); console = new LogHelper(LOG); job = new JobConf(ExecDriver.class); this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); }
/** * initialize or retrieve console object for SessionState. */ public static LogHelper getConsole() { if (_console == null) { Logger LOG = LoggerFactory.getLogger("SessionState"); _console = new LogHelper(LOG); } return _console; }
/** * Constructor/Initialization for invocation as independent utility. */ public ExecDriver(MapredWork plan, JobConf job, boolean isSilent) throws HiveException { setWork(plan); this.job = job; console = new LogHelper(LOG, isSilent); this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); }
/** * initialize or retrieve console object for SessionState. */ public static LogHelper getConsole() { if (_console == null) { Logger LOG = LoggerFactory.getLogger("SessionState"); _console = new LogHelper(LOG); } return _console; }
public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext, CompilationOpContext opContext) { this.queryPlan = queryPlan; setInitialized(); this.queryState = queryState; if (null == this.conf) { this.conf = queryState.getConf(); } this.driverContext = driverContext; console = new LogHelper(LOG); } public void setQueryDisplay(QueryDisplay queryDisplay) {
public MapredLocalTask(MapredLocalWork plan, JobConf job, boolean isSilent) throws HiveException { setWork(plan); this.job = job; console = new LogHelper(LOG, isSilent); }
/** * Constructor/Initialization for invocation as independent utility. */ public ExecDriver(MapredWork plan, JobConf job, boolean isSilent) throws HiveException { setWork(plan); this.job = job; console = new LogHelper(LOG, isSilent); this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); }
public BasicStatsTask(HiveConf conf, BasicStatsWork work) { super(); dpPartSpecs = null; this.conf = conf; console = new LogHelper(LOG); this.work = work; }
/** * Constructor when invoked from QL. */ public ExecDriver() { super(); console = new LogHelper(LOG); job = new JobConf(ExecDriver.class); this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this); }
public BasicStatsNoJobTask(HiveConf conf, BasicStatsNoJobWork work) { this.conf = conf; this.work = work; console = new LogHelper(LOG); }
protected SparkJobMonitor(HiveConf hiveConf) { monitorTimeoutInterval = hiveConf.getTimeVar(HiveConf.ConfVars.SPARK_JOB_MONITOR_TIMEOUT, TimeUnit.SECONDS); inPlaceUpdate = InPlaceUpdate.canRenderInPlace(hiveConf) && !SessionState.getConsole().getIsSilent(); console = new SessionState.LogHelper(LOG); updateFunction = updateFunction(); }
@Before public void setup() { logHelper = new LogHelper(LOG); } @Test(expected=MapJoinMemoryExhaustionError.class)