public CliCommandExecutor getCliCommandExecutor() throws IOException { CliCommandExecutor exec = new CliCommandExecutor(); if (getRunAsRemoteCommand()) { exec.setRunAtRemote(getRemoteHadoopCliHostname(), getRemoteHadoopCliPort(), getRemoteHadoopCliUsername(), getRemoteHadoopCliPassword()); } return exec; }
CliCommandExecutor exec = new CliCommandExecutor(); PatternedLogger patternedLogger = new PatternedLogger(logger);
@Test public void testBeeline() throws IOException { String lineSeparator = java.security.AccessController .doPrivileged(new sun.security.action.GetPropertyAction("line.separator")); System.setProperty("kylin.source.hive.client", "beeline"); System.setProperty("kylin.source.hive.beeline-shell", "/spark-client/bin/beeline"); System.setProperty("kylin.source.hive.beeline-params", "-u jdbc_url"); HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement("USE default;"); hiveCmdBuilder.addStatement("DROP TABLE `test`;"); hiveCmdBuilder.addStatement("SHOW TABLES;"); String cmd = hiveCmdBuilder.build(); String hqlFile = cmd.substring(cmd.lastIndexOf("-f ") + 3).trim(); hqlFile = hqlFile.substring(0, hqlFile.length() - ";exit $ret_code".length()); String createFileCmd = cmd.substring(0, cmd.indexOf("EOL\n", cmd.indexOf("EOL\n") + 1) + 3); CliCommandExecutor cliCommandExecutor = new CliCommandExecutor(); Pair<Integer, String> execute = cliCommandExecutor.execute(createFileCmd); String hqlStatement = FileUtils.readFileToString(new File(hqlFile), Charset.defaultCharset()); assertEquals( "USE default;" + lineSeparator + "DROP TABLE `test`;" + lineSeparator + "SHOW TABLES;" + lineSeparator, hqlStatement); assertBeelineCmd(cmd); FileUtils.forceDelete(new File(hqlFile)); }
logger.info("cmd: " + cmd); final ExecutorService executorService = Executors.newSingleThreadExecutor(); final CliCommandExecutor exec = new CliCommandExecutor(); final PatternedLogger patternedLogger = new PatternedLogger(logger, new PatternedLogger.ILogListener() { @Override
protected ShellCmd(String executeCmd, ICommandOutput out, String host, String user, String password, boolean async) { this.executeCommand = executeCmd; this.output = out; cliCommandExecutor = new CliCommandExecutor(); cliCommandExecutor.setRunAtRemote(host, user, password); this.isAsync = async; }
public CliCommandExecutor getCliCommandExecutor() throws IOException { CliCommandExecutor exec = new CliCommandExecutor(); if (getRunAsRemoteCommand()) { exec.setRunAtRemote(getRemoteHadoopCliHostname(), getRemoteHadoopCliUsername(), getRemoteHadoopCliPassword()); } return exec; }
protected ShellCmd(String executeCmd, ICommandOutput out, String host, String user, String password, boolean async) { this.executeCommand = executeCmd; this.output = out; cliCommandExecutor = new CliCommandExecutor(); cliCommandExecutor.setRunAtRemote(host, user, password); this.isAsync = async; }
public CliCommandExecutor getCliCommandExecutor() throws IOException { CliCommandExecutor exec = new CliCommandExecutor(); if (getRunAsRemoteCommand()) { exec.setRunAtRemote(getRemoteHadoopCliHostname(), getRemoteHadoopCliUsername(), getRemoteHadoopCliPassword()); } return exec; }
public CliCommandExecutor getCliCommandExecutor() throws IOException { CliCommandExecutor exec = new CliCommandExecutor(); if (getRunAsRemoteCommand()) { exec.setRunAtRemote(getRemoteHadoopCliHostname(), getRemoteHadoopCliPort(), getRemoteHadoopCliUsername(), getRemoteHadoopCliPassword()); } return exec; }
logger.info("cmd: " + cmd); final ExecutorService executorService = Executors.newSingleThreadExecutor(); final CliCommandExecutor exec = new CliCommandExecutor(); final PatternedLogger patternedLogger = new PatternedLogger(logger, new PatternedLogger.ILogListener() { @Override