@Override public String toString() { return build(); }
logger.info("Deleting Hive table " + delHive); getCliCommandExecutor().execute(hiveCmdBuilder.build());
private String cleanUpIntermediateFlatTable(KylinConfig config) throws IOException { StringBuffer output = new StringBuffer(); final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); final List<String> hiveTables = this.getIntermediateTables(); if (!config.isHiveKeepFlatTable()){ for (String hiveTable : hiveTables) { if (StringUtils.isNotEmpty(hiveTable)) { hiveCmdBuilder.addStatement("USE " + config.getHiveDatabaseForIntermediateTable() + ";"); hiveCmdBuilder.addStatement("DROP TABLE IF EXISTS `" + hiveTable + "`;"); output.append("Hive table " + hiveTable + " is dropped. \n"); } } rmdirOnHDFS(getExternalDataPaths()); } config.getCliCommandExecutor().execute(hiveCmdBuilder.build()); output.append("Path " + getExternalDataPaths() + " is deleted. \n"); return output.toString(); }
@Test public void testBeeline() throws IOException { String lineSeparator = java.security.AccessController .doPrivileged(new sun.security.action.GetPropertyAction("line.separator")); System.setProperty("kylin.source.hive.client", "beeline"); System.setProperty("kylin.source.hive.beeline-shell", "/spark-client/bin/beeline"); System.setProperty("kylin.source.hive.beeline-params", "-u jdbc_url"); HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement("USE default;"); hiveCmdBuilder.addStatement("DROP TABLE `test`;"); hiveCmdBuilder.addStatement("SHOW TABLES;"); String cmd = hiveCmdBuilder.build(); String hqlFile = cmd.substring(cmd.lastIndexOf("-f ") + 3).trim(); hqlFile = hqlFile.substring(0, hqlFile.length() - ";exit $ret_code".length()); String createFileCmd = cmd.substring(0, cmd.indexOf("EOL\n", cmd.indexOf("EOL\n") + 1) + 3); CliCommandExecutor cliCommandExecutor = new CliCommandExecutor(); Pair<Integer, String> execute = cliCommandExecutor.execute(createFileCmd); String hqlStatement = FileUtils.readFileToString(new File(hqlFile), Charset.defaultCharset()); assertEquals( "USE default;" + lineSeparator + "DROP TABLE `test`;" + lineSeparator + "SHOW TABLES;" + lineSeparator, hqlStatement); assertBeelineCmd(cmd); FileUtils.forceDelete(new File(hqlFile)); }
@Test public void testSparkSqlForTableOps() throws IOException { System.setProperty("kylin.source.hive.enable-sparksql-for-table-ops", "true"); System.setProperty("kylin.source.hive.sparksql-beeline-shell", "/spark-client/bin/beeline"); System.setProperty("kylin.source.hive.sparksql-beeline-params", "-u jdbc_url"); HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement("USE default;"); hiveCmdBuilder.addStatement("DROP TABLE `test`;"); hiveCmdBuilder.addStatement("SHOW TABLES;"); String cmd = hiveCmdBuilder.build(); assertBeelineCmd(cmd); }
@Test public void testHiveCLI() { System.setProperty("kylin.source.hive.client", "cli"); Map<String, String> hiveProps = new HashMap<>(); hiveProps.put("hive.execution.engine", "mr"); Map<String, String> hivePropsOverwrite = new HashMap<>(); hivePropsOverwrite.put("hive.execution.engine", "tez"); HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement("USE default;"); hiveCmdBuilder.addStatement("DROP TABLE `test`;"); hiveCmdBuilder.addStatement("SHOW\n TABLES;"); hiveCmdBuilder.setHiveConfProps(hiveProps); hiveCmdBuilder.overwriteHiveProps(hivePropsOverwrite); assertEquals( "hive -e \"USE default;\nDROP TABLE \\`test\\`;\nSHOW\n TABLES;\n\" --hiveconf hive.execution.engine=tez", hiveCmdBuilder.build()); }
step.setCmd(hiveCmdBuilder.build()); return step;
@Override public String toString() { return build(); }
private String cleanUpIntermediateFlatTable(KylinConfig config) throws IOException { StringBuffer output = new StringBuffer(); final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); final List<String> hiveTables = this.getIntermediateTables(); if (!config.isHiveKeepFlatTable()){ for (String hiveTable : hiveTables) { if (StringUtils.isNotEmpty(hiveTable)) { hiveCmdBuilder.addStatement("USE " + config.getHiveDatabaseForIntermediateTable() + ";"); hiveCmdBuilder.addStatement("DROP TABLE IF EXISTS `" + hiveTable + "`;"); output.append("Hive table " + hiveTable + " is dropped. \n"); } } rmdirOnHDFS(getExternalDataPaths()); } config.getCliCommandExecutor().execute(hiveCmdBuilder.build()); output.append("Path " + getExternalDataPaths() + " is deleted. \n"); return output.toString(); }
step.setCmd(hiveCmdBuilder.build()); return step;