private void executePlan() throws Exception { String testName = new Exception().getStackTrace()[1].getMethodName(); MapRedTask mrtask = new MapRedTask(); DriverContext dctx = new DriverContext (); mrtask.setWork(mr); mrtask.initialize(queryState, null, dctx, null); int exitVal = mrtask.execute(dctx); if (exitVal != 0) { LOG.error(testName + " execution failed with exit status: " + exitVal); assertEquals(true, false); } LOG.info(testName + " execution completed successfully"); }
DriverContext driverCxt = new DriverContext(); PartialScanTask taskExec = new PartialScanTask(); taskExec.initialize(queryState, null, driverCxt, new CompilationOpContext());
MapredLocalWork plan = SerializationUtilities.deserializePlan(pathData, MapredLocalWork.class); MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent); ret = ed.executeInProcess(new DriverContext()); ret = ed.execute(new DriverContext());
MapredLocalWork plan = SerializationUtilities.deserializePlan(pathData, MapredLocalWork.class); MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent); ret = ed.executeInProcess(new DriverContext()); ret = ed.execute(new DriverContext());
@Test public void mrTaskSumbitViaChildWithImpersonation() throws IOException, LoginException { Utils.getUGI().setAuthenticationMethod(PROXY); Context ctx = Mockito.mock(Context.class); when(ctx.getLocalTmpPath()).thenReturn(new Path(System.getProperty("java.io.tmpdir"))); DriverContext dctx = new DriverContext(ctx); QueryState queryState = new QueryState.Builder().build(); HiveConf conf= queryState.getConf(); conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true); MapredWork mrWork = new MapredWork(); mrWork.setMapWork(Mockito.mock(MapWork.class)); MapRedTask mrTask = Mockito.spy(new MapRedTask()); mrTask.setWork(mrWork); mrTask.initialize(queryState, null, dctx, null); mrTask.jobExecHelper = Mockito.mock(HadoopJobExecHelper.class); when(mrTask.jobExecHelper.progressLocal(Mockito.any(Process.class), Mockito.anyString())).thenReturn(0); mrTask.execute(dctx); ArgumentCaptor<String[]> captor = ArgumentCaptor.forClass(String[].class); verify(mrTask).spawn(Mockito.anyString(), Mockito.anyString(), captor.capture()); String expected = "HADOOP_PROXY_USER=" + Utils.getUGI().getUserName(); Assert.assertTrue(Arrays.asList(captor.getValue()).contains(expected)); }
DriverContext driverCxt = new DriverContext(); Task<PartialScanWork> psTask = TaskFactory.get(scanWork, parseCtx.getConf()); psTask.initialize(parseCtx.getQueryState(), null, driverCxt, op.getCompilationOpContext());
truncateWork.setListBucketingCtx(truncateTableDesc.getLbCtx()); truncateWork.setMapperCannotSpanPartns(true); DriverContext driverCxt = new DriverContext(); ColumnTruncateTask taskExec = new ColumnTruncateTask(); taskExec.initialize(queryState, null, driverCxt, null);
DriverContext driverCxt = new DriverContext(); Task<PartialScanWork> partialScanTask = TaskFactory.get(scanWork, parseContext.getConf()); partialScanTask.initialize(parseContext.getQueryState(), null, driverCxt,
createTableTask.execute(new DriverContext(new Context(conf))); newTable = db.getTable(newTableName); } catch(IOException|HiveException ex) {
DriverContext driverCxt = new DriverContext();
aliasToWork.put(mergeFilesDesc.getInputDir().toString(), mergeOp); mergeWork.setAliasToWork(aliasToWork); DriverContext driverCxt = new DriverContext(); Task<?> task; if (conf.getVar(ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
aliasToWork.put(mergeFilesDesc.getInputDir().toString(), mergeOp); mergeWork.setAliasToWork(aliasToWork); DriverContext driverCxt = new DriverContext(); Task task; if (conf.getVar(ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
truncateWork.setListBucketingCtx(truncateTableDesc.getLbCtx()); truncateWork.setMapperCannotSpanPartns(true); DriverContext driverCxt = new DriverContext(); ColumnTruncateTask taskExec = new ColumnTruncateTask(); taskExec.initialize(queryState, null, driverCxt, null);
return handleInterruption("before running tasks."); DriverContext driverCxt = new DriverContext(ctx); driverCxt.prepare(plan);
DriverContext driverCxt = new DriverContext(ctx); driverCxt.prepare(plan);
DriverContext driverCxt = new DriverContext(); PartialScanTask taskExec = new PartialScanTask(); taskExec.initialize(hiveConf, null, driverCxt);
DriverContext driverCxt = new DriverContext(); Task<PartialScanWork> psTask = TaskFactory.get(scanWork, parseCtx.getConf()); psTask.initialize(parseCtx.getConf(), null, driverCxt);
/** * handle partial scan command. * * It is composed of PartialScanTask followed by StatsTask. */ private void handlePartialScanCommand(TableScanOperator tableScan, ParseContext parseContext, StatsWork statsWork, GenSparkProcContext context, Task<StatsWork> statsTask) throws SemanticException { String aggregationKey = tableScan.getConf().getStatsAggPrefix(); StringBuffer aggregationKeyBuffer = new StringBuffer(aggregationKey); List<Path> inputPaths = GenMapRedUtils.getInputPathsForPartialScan(tableScan, aggregationKeyBuffer); aggregationKey = aggregationKeyBuffer.toString(); // scan work PartialScanWork scanWork = new PartialScanWork(inputPaths); scanWork.setMapperCannotSpanPartns(true); scanWork.setAggKey(aggregationKey); // stats work statsWork.setPartialScanAnalyzeCommand(true); // partial scan task DriverContext driverCxt = new DriverContext(); @SuppressWarnings("unchecked") Task<PartialScanWork> partialScanTask = TaskFactory.get(scanWork, parseContext.getConf()); partialScanTask.initialize(parseContext.getConf(), null, driverCxt); partialScanTask.setWork(scanWork); statsWork.setSourceTask(partialScanTask); // task dependency context.rootTasks.remove(context.currentTask); context.rootTasks.add(partialScanTask); partialScanTask.addDependentTask(statsTask); }
DriverContext driverCxt = new DriverContext(); Task<PartialScanWork> partialScanTask = TaskFactory.get(scanWork, parseContext.getConf()); partialScanTask.initialize(parseContext.getConf(), null, driverCxt);
truncateWork.setListBucketingCtx(truncateTableDesc.getLbCtx()); truncateWork.setMapperCannotSpanPartns(true); DriverContext driverCxt = new DriverContext(); ColumnTruncateTask taskExec = new ColumnTruncateTask(); taskExec.initialize(db.getConf(), null, driverCxt);