@Override public void configureInputJobProperties( TableDesc tableDesc, Map<String, String> jobProperties) { //Input this.configureInputJobProps = true; configureTableJobProperties(tableDesc, jobProperties); }
Configuration jobConf = getJobConf(); addHBaseResources(jobConf, jobProperties); addHBaseDelegationToken(jobConf); } catch (IOException | MetaException e) { throw new IllegalStateException("Error while configuring input job properties", e); if (isHBaseGenerateHFiles(jobConf)) {
@Override public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer, ExprNodeDesc predicate) { return HBaseStorageHandler.decomposePredicate(jobConf, (HBaseSerDe) deserializer, predicate); } }
@Override public void rollbackCreateTable(Table table) throws MetaException { boolean isExternal = MetaStoreUtils.isExternalTable(table); String tableName = getHBaseTableName(table); try { if (!isExternal && getHBaseAdmin().tableExists(tableName)) { // we have created an HBase table, so we delete it to roll back; if (getHBaseAdmin().isTableEnabled(tableName)) { getHBaseAdmin().disableTable(tableName); } getHBaseAdmin().deleteTable(tableName); } } catch (IOException ie) { throw new MetaException(StringUtils.stringifyException(ie)); } }
@Override public Class<? extends OutputFormat> getOutputFormatClass() { if (isHBaseGenerateHFiles(jobConf)) { return HiveHFileOutputFormat.class; } return HiveHBaseTableOutputFormat.class; }
JobConf hbaseJobConf = new JobConf(getConf()); org.apache.hadoop.hbase.mapred.TableMapReduceUtil.initCredentials(hbaseJobConf); ShimLoader.getHadoopShims().mergeCredentials(jobConf, hbaseJobConf);
@Override public void commitDropTable( Table tbl, boolean deleteData) throws MetaException { try { String tableName = getHBaseTableName(tbl); boolean isExternal = MetaStoreUtils.isExternalTable(tbl); if (deleteData && !isExternal) { if (getHBaseAdmin().isTableEnabled(tableName)) { getHBaseAdmin().disableTable(tableName); } getHBaseAdmin().deleteTable(tableName); } } catch (IOException ie) { throw new MetaException(StringUtils.stringifyException(ie)); } }
@Override public Class<? extends OutputFormat> getOutputFormatClass() { if (isHBaseGenerateHFiles(jobConf)) { return HiveHFileOutputFormat.class; } return HiveHBaseTableOutputFormat.class; }
JobConf hbaseJobConf = new JobConf(getConf()); org.apache.hadoop.hbase.mapred.TableMapReduceUtil.initCredentials(hbaseJobConf); ShimLoader.getHadoopShims().mergeCredentials(jobConf, hbaseJobConf);
Configuration jobConf = getJobConf(); addHBaseResources(jobConf, jobProperties); addHBaseDelegationToken(jobConf); if (isHBaseGenerateHFiles(jobConf)) {
@Override public void rollbackCreateTable(Table table) throws MetaException { boolean isExternal = MetaStoreUtils.isExternalTable(table); String tableName = getHBaseTableName(table); try { if (!isExternal && getHBaseAdmin().tableExists(tableName)) { // we have created an HBase table, so we delete it to roll back; if (getHBaseAdmin().isTableEnabled(tableName)) { getHBaseAdmin().disableTable(tableName); } getHBaseAdmin().deleteTable(tableName); } } catch (IOException ie) { throw new MetaException(StringUtils.stringifyException(ie)); } }
@Override public void configureOutputJobProperties( TableDesc tableDesc, Map<String, String> jobProperties) { //Output this.configureInputJobProps = false; configureTableJobProperties(tableDesc, jobProperties); }
@Override public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer, ExprNodeDesc predicate) { return HBaseStorageHandler.decomposePredicate(jobConf, (HBaseSerDe) deserializer, predicate); } }
@Override public void commitDropTable( Table tbl, boolean deleteData) throws MetaException { try { String tableName = getHBaseTableName(tbl); boolean isExternal = MetaStoreUtils.isExternalTable(tbl); if (deleteData && !isExternal) { if (getHBaseAdmin().isTableEnabled(tableName)) { getHBaseAdmin().disableTable(tableName); } getHBaseAdmin().deleteTable(tableName); } } catch (IOException ie) { throw new MetaException(StringUtils.stringifyException(ie)); } }
@Override public void configureOutputJobProperties( TableDesc tableDesc, Map<String, String> jobProperties) { configureTableJobProperties(tableDesc, jobProperties); }
String tableName = getHBaseTableName(tbl); Map<String, String> serdeParam = tbl.getSd().getSerdeInfo().getParameters(); String hbaseColumnsMapping = serdeParam.get(HBaseSerDe.HBASE_COLUMNS_MAPPING); if (!getHBaseAdmin().tableExists(tableName)) { getHBaseAdmin().createTable(tableDesc); } else { tableDesc = getHBaseAdmin().getTableDescriptor(Bytes.toBytes(tableName));
@Override public void configureInputJobProperties( TableDesc tableDesc, Map<String, String> jobProperties) { configureTableJobProperties(tableDesc, jobProperties); }
String tableName = getHBaseTableName(tbl); Map<String, String> serdeParam = tbl.getSd().getSerdeInfo().getParameters(); String hbaseColumnsMapping = serdeParam.get(HBaseSerDe.HBASE_COLUMNS_MAPPING); if (!getHBaseAdmin().tableExists(tableName)) { getHBaseAdmin().createTable(tableDesc); } else { tableDesc = getHBaseAdmin().getTableDescriptor(Bytes.toBytes(tableName));
@Override public void configureInputJobProperties( TableDesc tableDesc, Map<String, String> jobProperties) { //Input this.configureInputJobProps = true; configureTableJobProperties(tableDesc, jobProperties); }
@Override public void configureOutputJobProperties( TableDesc tableDesc, Map<String, String> jobProperties) { //Output this.configureInputJobProps = false; configureTableJobProperties(tableDesc, jobProperties); }