@Override public Pair<Integer, String> call() throws Exception { Pair<Integer, String> result; try { result = exec.execute(cmd, patternedLogger); } catch (Exception e) { logger.error("error run spark job:", e); result = new Pair<>(-1, e.getMessage()); } return result; } };
public Pair<Integer, String> execute(String command) throws IOException { return execute(command, new SoutLogger()); }
private void addShellOutput(String cmd, String destDir, String filename) { try { File destDirFile = null; if (!StringUtils.isEmpty(destDir)) { destDirFile = new File(exportDir, destDir); FileUtils.forceMkdir(destDirFile); } else { destDirFile = exportDir; } Pair<Integer, String> result = cmdExecutor.execute(cmd); String output = result.getSecond(); FileUtils.writeStringToFile(new File(destDirFile, filename), output, Charset.defaultCharset()); } catch (Exception e) { logger.warn("Failed to run command: " + cmd + ".", e); } } }
private void killApp(String appId) throws IOException, InterruptedException { CliCommandExecutor executor = KylinConfig.getInstanceFromEnv().getCliCommandExecutor(); String killCmd = String.format(Locale.ROOT, "yarn application -kill %s", appId); executor.execute(killCmd); }
private String getDefaultMapRedClasspath() { String classpath = ""; try { CliCommandExecutor executor = KylinConfig.getInstanceFromEnv().getCliCommandExecutor(); String output = executor.execute("mapred classpath").getSecond(); classpath = output.trim().replace(':', ','); } catch (IOException e) { logger.error("Failed to run: 'mapred classpath'.", e); } return classpath; }
private boolean isYarnAppSucc(String applicationId) throws IOException { final String yarnCmd = "yarn application -status " + applicationId; final String cmdOutput = kylinConfig.getCliCommandExecutor().execute(yarnCmd).getSecond(); final Map<String, String> params = Maps.newHashMap(); final String[] cmdOutputLines = StringUtil.split(cmdOutput, "\n"); for (String cmdOutputLine : cmdOutputLines) { String[] pair = StringUtil.split(cmdOutputLine, ":"); if (pair.length >= 2) { params.put(pair[0].trim(), pair[1].trim()); } } for (Map.Entry<String, String> e : params.entrySet()) { logger.info(e.getKey() + ":" + e.getValue()); } if (params.containsKey("State") && params.get("State").equals("RUNNING")) { return true; } return params.containsKey("Final-State") && params.get("Final-State").equals("SUCCEEDED"); }
private void extractJobLog(String taskId, File destDir, boolean onlyFail) throws Exception { final Map<String, String> jobInfo = executableDao.getJobOutput(taskId).getInfo(); FileUtils.forceMkdir(destDir); if (jobInfo.containsKey(ExecutableConstants.MR_JOB_ID)) { String applicationId = jobInfo.get(ExecutableConstants.MR_JOB_ID).replace("job", "application"); if (!onlyFail || !isYarnAppSucc(applicationId)) { File destFile = new File(destDir, applicationId + ".log"); String yarnCmd = "yarn logs -applicationId " + applicationId + " > " + destFile.getAbsolutePath(); logger.debug(yarnCmd); try { kylinConfig.getCliCommandExecutor().execute(yarnCmd); } catch (Exception ex) { logger.warn("Failed to get yarn logs. ", ex); } } } }
private String getAppState(String appId) throws IOException { CliCommandExecutor executor = KylinConfig.getInstanceFromEnv().getCliCommandExecutor(); PatternedLogger patternedLogger = new PatternedLogger(logger); String stateCmd = String.format(Locale.ROOT, "yarn application -status %s", appId); executor.execute(stateCmd, patternedLogger); Map<String, String> info = patternedLogger.getInfo(); return info.get(ExecutableConstants.YARN_APP_STATE); }
private void runDiagnosisCLI(String[] args) throws IOException { Message msg = MsgPicker.getMsg(); File cwd = new File(""); logger.debug("Current path: " + cwd.getAbsolutePath()); logger.debug("DiagnosisInfoCLI args: " + Arrays.toString(args)); File script = new File(KylinConfig.getKylinHome() + File.separator + "bin", "diag.sh"); if (!script.exists()) { throw new BadRequestException( String.format(Locale.ROOT, msg.getDIAG_NOT_FOUND(), script.getAbsolutePath())); } String diagCmd = script.getAbsolutePath() + " " + StringUtils.join(args, " "); CliCommandExecutor executor = KylinConfig.getInstanceFromEnv().getCliCommandExecutor(); Pair<Integer, String> cmdOutput = executor.execute(diagCmd); if (cmdOutput.getFirst() != 0) { throw new BadRequestException(msg.getGENERATE_DIAG_PACKAGE_FAIL()); } }
@Override protected CliCommandExecutor getCliCommandExecutor() throws IOException { CliCommandExecutor mockCli = mock(CliCommandExecutor.class); when(mockCli.execute((String) notNull())).thenReturn(null); return mockCli; } }
private String cleanUpIntermediateFlatTable(KylinConfig config) throws IOException { StringBuffer output = new StringBuffer(); final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); final List<String> hiveTables = this.getIntermediateTables(); if (!config.isHiveKeepFlatTable()){ for (String hiveTable : hiveTables) { if (StringUtils.isNotEmpty(hiveTable)) { hiveCmdBuilder.addStatement("USE " + config.getHiveDatabaseForIntermediateTable() + ";"); hiveCmdBuilder.addStatement("DROP TABLE IF EXISTS `" + hiveTable + "`;"); output.append("Hive table " + hiveTable + " is dropped. \n"); } } rmdirOnHDFS(getExternalDataPaths()); } config.getCliCommandExecutor().execute(hiveCmdBuilder.build()); output.append("Path " + getExternalDataPaths() + " is deleted. \n"); return output.toString(); }
@Override public void validateSQL(String query) throws Exception { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement(query); Pair<Integer, String> response = KylinConfig.getInstanceFromEnv().getCliCommandExecutor() .execute(hiveCmdBuilder.toString()); if (response.getFirst() != 0) { throw new IllegalArgumentException(response.getSecond()); } }
protected void sqoopFlatHiveTable(KylinConfig config) throws IOException { String cmd = getParam("cmd"); stepLogger.log(String.format(Locale.ROOT, "exe cmd:%s", cmd)); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); getManager().addJobInfo(getId(), stepLogger.getInfo()); if (response.getFirst() != 0) { throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst()); } }
protected void sqoopFlatHiveTable(KylinConfig config) throws IOException { String cmd = getParam("cmd"); cmd = String.format(Locale.ROOT, "%s/bin/sqoop import -Dorg.apache.sqoop.splitter.allow_text_splitter=true " + generateSqoopConfigArgString() + cmd, config.getSqoopHome()); stepLogger.log(String.format(Locale.ROOT, "exe cmd:%s", cmd)); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); getManager().addJobInfo(getId(), stepLogger.getInfo()); if (response.getFirst() != 0) { throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst()); } }
/** * only used by Deploy Util * @throws IOException */ @Override public void executeHQL(String hql) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement(hql); Pair<Integer, String> response = KylinConfig.getInstanceFromEnv().getCliCommandExecutor() .execute(hiveCmdBuilder.toString()); if (response.getFirst() != 0) { throw new IllegalArgumentException("Failed to execute hql [" + hql + "], error message is: " + response.getSecond()); } }
private static void execCliCommand(String cmd) throws IOException { config().getCliCommandExecutor().execute(cmd); }
protected void createFlatHiveTable(KylinConfig config) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.overwriteHiveProps(config.getHiveConfigOverride()); hiveCmdBuilder.addStatement(getInitStatement()); hiveCmdBuilder.addStatement(getCreateTableStatement()); final String cmd = hiveCmdBuilder.toString(); stepLogger.log("Create and distribute table, cmd: "); stepLogger.log(cmd); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); Map<String, String> info = stepLogger.getInfo(); //get the flat Hive table size Matcher matcher = HDFS_LOCATION.matcher(cmd); if (matcher.find()) { String hiveFlatTableHdfsUrl = matcher.group(1); long size = getFileSize(hiveFlatTableHdfsUrl); info.put(ExecutableConstants.HDFS_BYTES_WRITTEN, "" + size); logger.info("HDFS_Bytes_Writen: " + size); } getManager().addJobInfo(getId(), info); if (response.getFirst() != 0) { throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst()); } }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { logger.info("executing:" + getCmd()); final PatternedLogger patternedLogger = new PatternedLogger(logger); final Pair<Integer, String> result = context.getConfig().getCliCommandExecutor().execute(getCmd(), patternedLogger); getManager().addJobInfo(getId(), patternedLogger.getInfo()); return result.getFirst() == 0 ? new ExecuteResult(ExecuteResult.State.SUCCEED, result.getSecond()) : ExecuteResult.createFailed(new ShellException(result.getSecond())); } catch (IOException e) { logger.error("job:" + getId() + " execute finished with exception", e); return ExecuteResult.createError(e); } }
protected void createFlatHiveTable(KylinConfig config) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.overwriteHiveProps(config.getHiveConfigOverride()); hiveCmdBuilder.addStatement(getCmd()); final String cmd = hiveCmdBuilder.toString(); stepLogger.log("cmd: "); stepLogger.log(cmd); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); getManager().addJobInfo(getId(), stepLogger.getInfo()); if (response.getFirst() != 0) { throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst()); } }
private void redistributeTable(KylinConfig config, int numReducers) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.overwriteHiveProps(config.getHiveConfigOverride()); hiveCmdBuilder.addStatement(getInitStatement()); hiveCmdBuilder.addStatement("set mapreduce.job.reduces=" + numReducers + ";\n"); hiveCmdBuilder.addStatement("set hive.merge.mapredfiles=false;\n"); hiveCmdBuilder.addStatement(getRedistributeDataStatement()); final String cmd = hiveCmdBuilder.toString(); stepLogger.log("Redistribute table, cmd: "); stepLogger.log(cmd); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); getManager().addJobInfo(getId(), stepLogger.getInfo()); if (response.getFirst() != 0) { throw new RuntimeException("Failed to redistribute flat hive table"); } }