ShimLoader.getHadoopShims().mergeCredentials(jobConf, hbaseJobConf); } catch (Exception e) { throw new RuntimeException(e);
@Override public void configureJobConf(TableDesc tableDesc, JobConf jobConf) { try { TableMapReduceUtil.addDependencyJars(jobConf); org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJars(jobConf, PhoenixStorageHandler.class); JobConf hbaseJobConf = new JobConf(getConf()); org.apache.hadoop.hbase.mapred.TableMapReduceUtil.initCredentials(hbaseJobConf); ShimLoader.getHadoopShims().mergeCredentials(jobConf, hbaseJobConf); } catch (IOException e) { throw new RuntimeException(e); } }
/** * Merge the provided <code>Token</code> into the JobConf. * * @param jobConf * JobConf to merge token into * @param accumuloToken * The Token * @throws IOException * If the merging fails */ public void mergeTokenIntoJobConf(JobConf jobConf, Token<?> accumuloToken) throws IOException { JobConf accumuloJobConf = new JobConf(jobConf); accumuloJobConf.getCredentials().addToken(accumuloToken.getService(), accumuloToken); // Merge them together. ShimLoader.getHadoopShims().mergeCredentials(jobConf, accumuloJobConf); }
/** * Merge the provided <code>Token</code> into the JobConf. * * @param jobConf * JobConf to merge token into * @param accumuloToken * The Token * @throws IOException * If the merging fails */ public void mergeTokenIntoJobConf(JobConf jobConf, Token<?> accumuloToken) throws IOException { JobConf accumuloJobConf = new JobConf(jobConf); accumuloJobConf.getCredentials().addToken(accumuloToken.getService(), accumuloToken); // Merge them together. ShimLoader.getHadoopShims().mergeCredentials(jobConf, accumuloJobConf); }
ShimLoader.getHadoopShims().mergeCredentials(jobConf, hbaseJobConf); } catch (Exception e) { throw new RuntimeException(e);