hiveCmdBuilder.addStatement(useDatabaseHql); for (String delHive : allHiveTablesNeedToBeDeleted) { hiveCmdBuilder.addStatement("drop table if exists " + delHive + "; "); logger.info("Deleting Hive table " + delHive);
@Test public void testSparkSqlForTableOps() throws IOException { System.setProperty("kylin.source.hive.enable-sparksql-for-table-ops", "true"); System.setProperty("kylin.source.hive.sparksql-beeline-shell", "/spark-client/bin/beeline"); System.setProperty("kylin.source.hive.sparksql-beeline-params", "-u jdbc_url"); HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement("USE default;"); hiveCmdBuilder.addStatement("DROP TABLE `test`;"); hiveCmdBuilder.addStatement("SHOW TABLES;"); String cmd = hiveCmdBuilder.build(); assertBeelineCmd(cmd); }
private String cleanUpIntermediateFlatTable(KylinConfig config) throws IOException { StringBuffer output = new StringBuffer(); final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); final List<String> hiveTables = this.getIntermediateTables(); if (!config.isHiveKeepFlatTable()){ for (String hiveTable : hiveTables) { if (StringUtils.isNotEmpty(hiveTable)) { hiveCmdBuilder.addStatement("USE " + config.getHiveDatabaseForIntermediateTable() + ";"); hiveCmdBuilder.addStatement("DROP TABLE IF EXISTS `" + hiveTable + "`;"); output.append("Hive table " + hiveTable + " is dropped. \n"); } } rmdirOnHDFS(getExternalDataPaths()); } config.getCliCommandExecutor().execute(hiveCmdBuilder.build()); output.append("Path " + getExternalDataPaths() + " is deleted. \n"); return output.toString(); }
hiveCmdBuilder.addStatement(useDatabaseHql); hiveCmdBuilder.addStatement("show tables " + "\'" + MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + "*\'" + "; "); hiveCmdBuilder.addStatement(useDatabaseHql); for (String delHive : allHiveTablesNeedToBeDeleted) { hiveCmdBuilder.addStatement("drop table if exists " + delHive + "; "); logger.info("Remove " + delHive + " from hive tables.");
private void redistributeTable(KylinConfig config, int numReducers) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.overwriteHiveProps(config.getHiveConfigOverride()); hiveCmdBuilder.addStatement(getInitStatement()); hiveCmdBuilder.addStatement("set mapreduce.job.reduces=" + numReducers + ";\n"); hiveCmdBuilder.addStatement("set hive.merge.mapredfiles=false;\n"); hiveCmdBuilder.addStatement(getRedistributeDataStatement()); final String cmd = hiveCmdBuilder.toString(); stepLogger.log("Redistribute table, cmd: "); stepLogger.log(cmd); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); getManager().addJobInfo(getId(), stepLogger.getInfo()); if (response.getFirst() != 0) { throw new RuntimeException("Failed to redistribute flat hive table"); } }
@Override public void validateSQL(String query) throws Exception { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement(query); Pair<Integer, String> response = KylinConfig.getInstanceFromEnv().getCliCommandExecutor() .execute(hiveCmdBuilder.toString()); if (response.getFirst() != 0) { throw new IllegalArgumentException(response.getSecond()); } }
@Test public void testBeeline() throws IOException { String lineSeparator = java.security.AccessController .doPrivileged(new sun.security.action.GetPropertyAction("line.separator")); System.setProperty("kylin.source.hive.client", "beeline"); System.setProperty("kylin.source.hive.beeline-shell", "/spark-client/bin/beeline"); System.setProperty("kylin.source.hive.beeline-params", "-u jdbc_url"); HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement("USE default;"); hiveCmdBuilder.addStatement("DROP TABLE `test`;"); hiveCmdBuilder.addStatement("SHOW TABLES;"); String cmd = hiveCmdBuilder.build(); String hqlFile = cmd.substring(cmd.lastIndexOf("-f ") + 3).trim(); hqlFile = hqlFile.substring(0, hqlFile.length() - ";exit $ret_code".length()); String createFileCmd = cmd.substring(0, cmd.indexOf("EOL\n", cmd.indexOf("EOL\n") + 1) + 3); CliCommandExecutor cliCommandExecutor = new CliCommandExecutor(); Pair<Integer, String> execute = cliCommandExecutor.execute(createFileCmd); String hqlStatement = FileUtils.readFileToString(new File(hqlFile), Charset.defaultCharset()); assertEquals( "USE default;" + lineSeparator + "DROP TABLE `test`;" + lineSeparator + "SHOW TABLES;" + lineSeparator, hqlStatement); assertBeelineCmd(cmd); FileUtils.forceDelete(new File(hqlFile)); }
@Test public void testHiveCLI() { System.setProperty("kylin.source.hive.client", "cli"); Map<String, String> hiveProps = new HashMap<>(); hiveProps.put("hive.execution.engine", "mr"); Map<String, String> hivePropsOverwrite = new HashMap<>(); hivePropsOverwrite.put("hive.execution.engine", "tez"); HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement("USE default;"); hiveCmdBuilder.addStatement("DROP TABLE `test`;"); hiveCmdBuilder.addStatement("SHOW\n TABLES;"); hiveCmdBuilder.setHiveConfProps(hiveProps); hiveCmdBuilder.overwriteHiveProps(hivePropsOverwrite); assertEquals( "hive -e \"USE default;\nDROP TABLE \\`test\\`;\nSHOW\n TABLES;\n\" --hiveconf hive.execution.engine=tez", hiveCmdBuilder.build()); }
/** * only used by Deploy Util * @throws IOException */ @Override public void executeHQL(String hql) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement(hql); Pair<Integer, String> response = KylinConfig.getInstanceFromEnv().getCliCommandExecutor() .execute(hiveCmdBuilder.toString()); if (response.getFirst() != 0) { throw new IllegalArgumentException("Failed to execute hql [" + hql + "], error message is: " + response.getSecond()); } }
protected void createFlatHiveTable(KylinConfig config) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.overwriteHiveProps(config.getHiveConfigOverride()); hiveCmdBuilder.addStatement(getInitStatement()); hiveCmdBuilder.addStatement(getCreateTableStatement()); final String cmd = hiveCmdBuilder.toString(); stepLogger.log("Create and distribute table, cmd: "); stepLogger.log(cmd); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); Map<String, String> info = stepLogger.getInfo(); //get the flat Hive table size Matcher matcher = HDFS_LOCATION.matcher(cmd); if (matcher.find()) { String hiveFlatTableHdfsUrl = matcher.group(1); long size = getFileSize(hiveFlatTableHdfsUrl); info.put(ExecutableConstants.HDFS_BYTES_WRITTEN, "" + size); logger.info("HDFS_Bytes_Writen: " + size); } getManager().addJobInfo(getId(), info); if (response.getFirst() != 0) { throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst()); } }
hiveCmdBuilder.addStatement(hiveInitStatements); for (TableDesc lookUpTableDesc : lookupViewsTables) { String identity = lookUpTableDesc.getIdentityQuoted("`"); String intermediate = lookUpTableDesc.getMaterializedName(uuid); String materializeViewHql = materializeViewHql(intermediate, identity, jobWorkingDir); hiveCmdBuilder.addStatement(materializeViewHql); intermediateTables.add(intermediate);
protected void createFlatHiveTable(KylinConfig config) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.overwriteHiveProps(config.getHiveConfigOverride()); hiveCmdBuilder.addStatement(getCmd()); final String cmd = hiveCmdBuilder.toString(); stepLogger.log("cmd: "); stepLogger.log(cmd); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); getManager().addJobInfo(getId(), stepLogger.getInfo()); if (response.getFirst() != 0) { throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst()); } }
private String cleanUpIntermediateFlatTable(KylinConfig config) throws IOException { StringBuffer output = new StringBuffer(); final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); final List<String> hiveTables = this.getIntermediateTables(); if (!config.isHiveKeepFlatTable()){ for (String hiveTable : hiveTables) { if (StringUtils.isNotEmpty(hiveTable)) { hiveCmdBuilder.addStatement("USE " + config.getHiveDatabaseForIntermediateTable() + ";"); hiveCmdBuilder.addStatement("DROP TABLE IF EXISTS `" + hiveTable + "`;"); output.append("Hive table " + hiveTable + " is dropped. \n"); } } rmdirOnHDFS(getExternalDataPaths()); } config.getCliCommandExecutor().execute(hiveCmdBuilder.build()); output.append("Path " + getExternalDataPaths() + " is deleted. \n"); return output.toString(); }
hiveCmdBuilder.addStatement(useDatabaseHql); hiveCmdBuilder.addStatement("show tables " + "\'" + MetadataConstants.KYLIN_INTERMEDIATE_PREFIX + "*\'" + "; "); hiveCmdBuilder.addStatement(useDatabaseHql); for (String delHive : allHiveTablesNeedToBeDeleted) { hiveCmdBuilder.addStatement("drop table if exists " + delHive + "; "); logger.info("Remove " + delHive + " from hive tables.");
private void redistributeTable(KylinConfig config, int numReducers) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.overwriteHiveProps(config.getHiveConfigOverride()); hiveCmdBuilder.addStatement(getInitStatement()); hiveCmdBuilder.addStatement("set mapreduce.job.reduces=" + numReducers + ";\n"); hiveCmdBuilder.addStatement("set hive.merge.mapredfiles=false;\n"); hiveCmdBuilder.addStatement(getRedistributeDataStatement()); final String cmd = hiveCmdBuilder.toString(); stepLogger.log("Redistribute table, cmd: "); stepLogger.log(cmd); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); getManager().addJobInfo(getId(), stepLogger.getInfo()); if (response.getFirst() != 0) { throw new RuntimeException("Failed to redistribute flat hive table"); } }
@Override public void validateSQL(String query) throws Exception { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement(query); Pair<Integer, String> response = KylinConfig.getInstanceFromEnv().getCliCommandExecutor() .execute(hiveCmdBuilder.toString()); if (response.getFirst() != 0) { throw new IllegalArgumentException(response.getSecond()); } }
/** * only used by Deploy Util * @throws IOException */ @Override public void executeHQL(String hql) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.addStatement(hql); Pair<Integer, String> response = KylinConfig.getInstanceFromEnv().getCliCommandExecutor() .execute(hiveCmdBuilder.toString()); if (response.getFirst() != 0) { throw new IllegalArgumentException("Failed to execute hql [" + hql + "], error message is: " + response.getSecond()); } }
protected void createFlatHiveTable(KylinConfig config) throws IOException { final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder(); hiveCmdBuilder.overwriteHiveProps(config.getHiveConfigOverride()); hiveCmdBuilder.addStatement(getInitStatement()); hiveCmdBuilder.addStatement(getCreateTableStatement()); final String cmd = hiveCmdBuilder.toString(); stepLogger.log("Create and distribute table, cmd: "); stepLogger.log(cmd); Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger); Map<String, String> info = stepLogger.getInfo(); //get the flat Hive table size Matcher matcher = HDFS_LOCATION.matcher(cmd); if (matcher.find()) { String hiveFlatTableHdfsUrl = matcher.group(1); long size = getFileSize(hiveFlatTableHdfsUrl); info.put(ExecutableConstants.HDFS_BYTES_WRITTEN, "" + size); logger.info("HDFS_Bytes_Writen: " + size); } getManager().addJobInfo(getId(), info); if (response.getFirst() != 0) { throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst()); } }
hiveCmdBuilder.addStatement(hiveInitStatements); for (TableDesc lookUpTableDesc : lookupViewsTables) { String identity = lookUpTableDesc.getIdentityQuoted("`"); String intermediate = lookUpTableDesc.getMaterializedName(uuid); String materializeViewHql = materializeViewHql(intermediate, identity, jobWorkingDir); hiveCmdBuilder.addStatement(materializeViewHql); intermediateTables.add(intermediate);