@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { Map<String, String> configMap = getConfiguration(); HiveClient hiveClient = new HiveClient(configMap); for (String hql: getHqls()) { hiveClient.executeHQL(hql); } return new ExecuteResult(ExecuteResult.State.SUCCEED); } catch (Exception e) { logger.error("error run hive query:" + getHqls(), e); return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage()); } }
public String getHiveTableLocation(String database, String tableName) throws Exception { Table t = getHiveTable(database, tableName); return t.getSd().getLocation(); }
public HiveClient(Map<String, String> configMap) { this(); appendConfiguration(configMap); }
private String computeHDFSLocation(boolean needFilePath) throws IOException { String override = KylinConfig.getInstanceFromEnv().getOverrideHiveTableLocation(hiveTable); if (override != null) { logger.debug("Override hive table location " + hiveTable + " -- " + override); return override; } String hdfsDir = null; try { HiveClient hiveClient = new HiveClient(); hdfsDir = hiveClient.getHiveTableLocation(database, hiveTable); } catch (Exception e) { e.printStackTrace(); throw new IOException(e); } if (needFilePath) { FileSystem fs = HadoopUtil.getFileSystem(hdfsDir); FileStatus file = findOnlyFile(hdfsDir, fs); return file.getPath().toString(); } else { return hdfsDir; } }
for (String tableName : tables) { Table table = null; HiveClient hiveClient = new HiveClient(); List<FieldSchema> partitionFields = null; List<FieldSchema> fields = null; try { table = hiveClient.getHiveTable(database, tableName); partitionFields = table.getPartitionKeys(); fields = hiveClient.getHiveTableFields(database, tableName); } catch (Exception e) { e.printStackTrace(); long tableSize = hiveClient.getFileSizeForTable(table); long tableFileNum = hiveClient.getFileNumberForTable(table); TableDesc tableDesc = metaMgr.getTableDesc(database + "." + tableName); if (tableDesc == null) {
public HiveClient getHiveClient() { if (hiveClient == null) { hiveClient = new HiveClient(); } return hiveClient; }
public void executeHQL(String[] hqls) throws CommandNeedRetryException, IOException { for (String sql : hqls) executeHQL(sql); }
public long getFileSizeForTable(Table table) { return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.TOTAL_SIZE); }
/** * * @param hql * @throws CommandNeedRetryException * @throws IOException */ public void executeHQL(String hql) throws CommandNeedRetryException, IOException { CommandProcessorResponse response = getDriver().run(hql); int retCode = response.getResponseCode(); if (retCode != 0) { String err = response.getErrorMessage(); throw new IOException("Failed to execute hql [" + hql + "], error message is: " + err); } }
for (String tableName : tables) { Table table = null; HiveClient hiveClient = new HiveClient(); List<FieldSchema> partitionFields = null; List<FieldSchema> fields = null; try { table = hiveClient.getHiveTable(database, tableName); partitionFields = table.getPartitionKeys(); fields = hiveClient.getHiveTableFields(database, tableName); } catch (Exception e) { e.printStackTrace(); long tableSize = hiveClient.getFileSizeForTable(table); long tableFileNum = hiveClient.getFileNumberForTable(table); TableDesc tableDesc = metaMgr.getTableDesc(database + "." + tableName); if (tableDesc == null) {
public void executeHQL(String[] hqls) throws CommandNeedRetryException, IOException { for (String sql : hqls) executeHQL(sql); }
public long getFileNumberForTable(Table table) { return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES); }
/** * * @param hql * @throws org.apache.hadoop.hive.ql.CommandNeedRetryException * @throws java.io.IOException */ public void executeHQL(String hql) throws CommandNeedRetryException, IOException { CommandProcessorResponse response = getDriver().run(hql); int retCode = response.getResponseCode(); if (retCode != 0) { String err = response.getErrorMessage(); throw new IOException("Failed to execute hql [" + hql + "], error message is: " + err); } }
temp.delete(); HiveClient hiveClient = new HiveClient(); hiveClient.executeHQL("CREATE DATABASE IF NOT EXISTS EDW"); hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_CAL_DT.toUpperCase()))); hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_CATEGORY_GROUPINGS.toUpperCase()))); hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_KYLIN_FACT.toUpperCase()))); hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_SELLER_TYPE_DIM.toUpperCase()))); hiveClient.executeHQL(generateCreateTableHql(metaMgr.getTableDesc(TABLE_SITES.toUpperCase()))); hiveClient.executeHQL(generateLoadDataHql(TABLE_CAL_DT, tableFileDir)); hiveClient.executeHQL(generateLoadDataHql(TABLE_CATEGORY_GROUPINGS, tableFileDir)); hiveClient.executeHQL(generateLoadDataHql(TABLE_KYLIN_FACT, tableFileDir)); hiveClient.executeHQL(generateLoadDataHql(TABLE_SELLER_TYPE_DIM, tableFileDir)); hiveClient.executeHQL(generateLoadDataHql(TABLE_SITES, tableFileDir));
public String getHiveTableLocation(String database, String tableName) throws Exception { Table t = getHiveTable(database, tableName); return t.getSd().getLocation(); }
public long getFileNumberForTable(Table table) { return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.NUM_FILES); }
public HiveClient(Map<String, String> configMap) { this(); appendConfiguration(configMap); }
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { Map<String, String> configMap = getConfiguration(); HiveClient hiveClient = new HiveClient(configMap); for (String hql : getHqls()) { hiveClient.executeHQL(hql); } return new ExecuteResult(ExecuteResult.State.SUCCEED); } catch (Exception e) { logger.error("error run hive query:" + getHqls(), e); return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage()); } }
public long getFileSizeForTable(Table table) { return getBasicStatForTable(new org.apache.hadoop.hive.ql.metadata.Table(table), StatsSetupConst.TOTAL_SIZE); }