private void prepareReturnValues(List<String> values, String schema) throws SemanticException { LOG.debug("prepareReturnValues : " + schema); for (String s : values) { LOG.debug(" > " + s); } ctx.setResFile(ctx.getLocalTmpPath()); Utils.writeOutput(values, ctx.getResFile(), conf); } }
private void prepareReturnValues(List<String> values, String schema) throws SemanticException { LOG.debug("prepareReturnValues : " + schema); for (String s : values) { LOG.debug(" > " + s); } ctx.setResFile(ctx.getLocalTmpPath()); writeOutput(values, ctx.getResFile()); }
+ String.valueOf(eventTo) + " maxEventLimit " + String.valueOf(maxEventLimit)); try { ctx.setResFile(ctx.getLocalTmpPath()); Task<ReplDumpWork> replDumpWorkTask = TaskFactory .get(new ReplDumpWork(
throws IOException { HiveConf tempConf = new HiveConf(); Path hConfFilePath = new Path(ctx.getLocalTmpPath(), JOBCONF_FILENAME); OutputStream out = null;
throws IOException { HiveConf tempConf = new HiveConf(); Path hConfFilePath = new Path(ctx.getLocalTmpPath(), JOBCONF_FILENAME); OutputStream out = null;
conf.setVar(ConfVars.HIVEADDEDJARS, Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR)); Path planPath = new Path(ctx.getLocalTmpPath(), "plan.xml"); MapredLocalWork plan = getWork(); LOG.info("Generating plan file " + planPath.toString()); cmdLine = cmdLine + " -files " + files; workDir = ctx.getLocalTmpPath().toUri().getPath();
conf.setVar(ConfVars.HIVEADDEDJARS, Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR)); Path planPath = new Path(ctx.getLocalTmpPath(), "plan.xml"); MapredLocalWork plan = getWork(); LOG.info("Generating plan file " + planPath.toString()); cmdLine = cmdLine + " -files " + files; workDir = ctx.getLocalTmpPath().toUri().getPath();
@Test public void mrTaskSumbitViaChildWithImpersonation() throws IOException, LoginException { Utils.getUGI().setAuthenticationMethod(PROXY); Context ctx = Mockito.mock(Context.class); when(ctx.getLocalTmpPath()).thenReturn(new Path(System.getProperty("java.io.tmpdir"))); DriverContext dctx = new DriverContext(ctx); QueryState queryState = new QueryState.Builder().build(); HiveConf conf= queryState.getConf(); conf.setBoolVar(HiveConf.ConfVars.SUBMITVIACHILD, true); MapredWork mrWork = new MapredWork(); mrWork.setMapWork(Mockito.mock(MapWork.class)); MapRedTask mrTask = Mockito.spy(new MapRedTask()); mrTask.setWork(mrWork); mrTask.initialize(queryState, null, dctx, null); mrTask.jobExecHelper = Mockito.mock(HadoopJobExecHelper.class); when(mrTask.jobExecHelper.progressLocal(Mockito.any(Process.class), Mockito.anyString())).thenReturn(0); mrTask.execute(dctx); ArgumentCaptor<String[]> captor = ArgumentCaptor.forClass(String[].class); verify(mrTask).spawn(Mockito.anyString(), Mockito.anyString(), captor.capture()); String expected = "HADOOP_PROXY_USER=" + Utils.getUGI().getUserName(); Assert.assertTrue(Arrays.asList(captor.getValue()).contains(expected)); }
@SuppressWarnings("unchecked") @Override public void analyzeInternal(ASTNode ast) throws SemanticException { ctx.setExplainConfig(new ExplainConfiguration()); // Create a semantic analyzer for the query ASTNode input = (ASTNode) ast.getChild(0); SemanticAnalyzer sem = (SemanticAnalyzer) SemanticAnalyzerFactory.get(queryState, input); sem.analyze(input, ctx); sem.validate(); ctx.setResFile(ctx.getLocalTmpPath()); ExplainSQRewriteWork work = new ExplainSQRewriteWork(ctx.getResFile().toString(), sem.getQB(), input, ctx ); ExplainSQRewriteTask explTask = (ExplainSQRewriteTask) TaskFactory.get(work); fieldList = explTask.getResultSchema(); rootTasks.add(explTask); }
Path planPath = new Path(ctx.getLocalTmpPath(), "plan.xml"); MapredWork plan = getWork(); LOG.info("Generating plan file " + planPath.toString()); cmdLine = cmdLine + " -files " + files; workDir = ctx.getLocalTmpPath().toUri().getPath();
@SuppressWarnings("unchecked") @Override public void analyzeInternal(ASTNode ast) throws SemanticException { ctx.setExplainConfig(new ExplainConfiguration()); // Create a semantic analyzer for the query ASTNode input = (ASTNode) ast.getChild(0); SemanticAnalyzer sem = (SemanticAnalyzer) SemanticAnalyzerFactory.get(queryState, input); sem.analyze(input, ctx); sem.validate(); ctx.setResFile(ctx.getLocalTmpPath()); ExplainSQRewriteWork work = new ExplainSQRewriteWork(ctx.getResFile().toString(), sem.getQB(), input, ctx ); ExplainSQRewriteTask explTask = (ExplainSQRewriteTask) TaskFactory.get(work, conf); fieldList = explTask.getResultSchema(); rootTasks.add(explTask); }
Path path = new Path(ctx.getLocalTmpPath(), EximUtil.METADATA_NAME); EximUtil.createExportDump( FileSystem.getLocal(conf),
private void analyzeSetShowRole(ASTNode ast) throws SemanticException { switch (ast.getChildCount()) { case 0: ctx.setResFile(ctx.getLocalTmpPath()); rootTasks.add(hiveAuthorizationTaskFactory.createShowCurrentRoleTask( getInputs(), getOutputs(), ctx.getResFile())); setFetchTask(createFetchTask(RoleDDLDesc.getRoleNameSchema())); break; case 1: rootTasks.add(hiveAuthorizationTaskFactory.createSetRoleTask( BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()), getInputs(), getOutputs())); break; default: throw new SemanticException("Internal error. ASTNode expected to have 0 or 1 child. " + ast.dump()); } }
private void analyzeSetShowRole(ASTNode ast) throws SemanticException { switch (ast.getChildCount()) { case 0: ctx.setResFile(ctx.getLocalTmpPath()); rootTasks.add(hiveAuthorizationTaskFactory.createShowCurrentRoleTask( getInputs(), getOutputs(), ctx.getResFile())); setFetchTask(createFetchTask(RoleDDLDesc.getRoleNameSchema())); break; case 1: rootTasks.add(hiveAuthorizationTaskFactory.createSetRoleTask( BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()), getInputs(), getOutputs())); break; default: throw new SemanticException("Internal error. ASTNode expected to have 0 or 1 child. " + ast.dump()); } }
Path tmpPath = Utilities.generateTmpPath(ctx.getLocalTmpPath(), currTask.getId()); localwork.setTmpPath(tmpPath); mapredWork.getMapWork().setTmpHDFSPath(Utilities.generateTmpPath(
Path tmpPath = Utilities.generateTmpPath(ctx.getLocalTmpPath(), currTask.getId()); localwork.setTmpPath(tmpPath); mapredWork.getMapWork().setTmpHDFSPath(Utilities.generateTmpPath(
break; case HiveParser.TOK_DESCTABLE: ctx.setResFile(ctx.getLocalTmpPath()); analyzeDescribeTable(ast); break; case HiveParser.TOK_SHOWDATABASES: ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowDatabases(ast); break; case HiveParser.TOK_SHOWTABLES: ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowTables(ast); break; case HiveParser.TOK_SHOWCOLUMNS: ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowColumns(ast); break; case HiveParser.TOK_SHOW_TABLESTATUS: ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowTableStatus(ast); break; case HiveParser.TOK_SHOW_TBLPROPERTIES: ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowTableProperties(ast); break; case HiveParser.TOK_SHOWFUNCTIONS: ctx.setResFile(ctx.getLocalTmpPath()); analyzeShowFunctions(ast); break;
sem.validate(); ctx.setResFile(ctx.getLocalTmpPath()); List<Task<? extends Serializable>> tasks = sem.getAllRootTasks(); if (tasks == null) {
outputs = sem.getOutputs(); ctx.setResFile(ctx.getLocalTmpPath()); List<Task<?>> tasks = sem.getAllRootTasks(); if (tasks == null) {
resourcePlan, rpName, validate, isEnableActivate, false, isReplace); if (validate) { ctx.setResFile(ctx.getLocalTmpPath()); desc.setResFile(ctx.getResFile().toString());