@Override public boolean next(K key, V value) throws IOException { while ((curReader == null) || !doNextWithExceptionHandler((K) ((CombineHiveKey) key).getKey(), value)) { if (!initNextRecordReader(key)) { return false; } } return true; }
public void restoreOriginalTracker() { if (originalTracker != null) { ShimLoader.getHadoopShims().setJobLauncherRpcAddress(conf, originalTracker); originalTracker = null; } }
public static JobContext createJobContext(JobConf conf, org.apache.hadoop.mapreduce.JobID id, Progressable progressable) { return ShimLoader.getHadoopShims().getHCatShim().createJobContext(conf, id, (Reporter) progressable); } }
public static TaskAttemptContext createTaskAttemptContext(JobConf conf, TaskAttemptID id, Progressable progressable) { return ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, id, (Reporter) progressable); } public static org.apache.hadoop.mapreduce.TaskAttemptID createTaskAttemptID(JobID jobId, boolean isMap, int taskId, int id) {
private static String getMetastoreJdbcPasswd(HiveConf conf) throws IOException { return ShimLoader.getHadoopShims().getPassword(conf, HiveConf.ConfVars.METASTOREPWD.varname); } }
public CombineHiveInputSplit() throws IOException { this(ShimLoader.getHadoopShims().getCombineFileInputFormat() .getInputSplitShim()); }
@Override public void cleanupOnFailure(String location, Job job) throws IOException { ShimLoader.getHadoopShims().getHCatShim().abortJob(getOutputFormat(), job); } }
private String getPrincipalWithoutRealmAndHost(String fullPrincipal) throws HttpAuthenticationException { KerberosNameShim fullKerberosName; try { fullKerberosName = ShimLoader.getHadoopShims().getKerberosNameShim(fullPrincipal); return fullKerberosName.getShortName(); } catch (IOException e) { throw new HttpAuthenticationException(e); } } }
public static org.apache.hadoop.mapreduce.TaskAttemptID createTaskAttemptID(JobID jobId, boolean isMap, int taskId, int id) { return ShimLoader.getHadoopShims().newTaskAttemptID(jobId, isMap, taskId, id); } public static org.apache.hadoop.mapred.JobContext createJobContext(org.apache.hadoop.mapreduce.JobContext context) {
@Override public void beforeClass(HiveTestEnvContext ctx) throws Exception { HadoopShims shims = ShimLoader.getHadoopShims(); mr1 = shims.getLocalMiniTezCluster(ctx.hiveConf, true); mr1.setupConfiguration(ctx.hiveConf); }
/** * Load the fair scheduler queue for given user if available. */ @Override public void refreshDefaultQueue(Configuration conf, String userName) throws IOException { if (StringUtils.isNotBlank(userName) && isFairScheduler(conf)) { ShimLoader.getSchedulerShims().refreshDefaultQueue(conf, userName); } }
static boolean hasEquivalentEncryption(HadoopShims.HdfsEncryptionShim encryptionShim, Path path1, Path path2) throws IOException { // Assumes these are both qualified paths are in the same FileSystem if (encryptionShim.isPathEncrypted(path1) || encryptionShim.isPathEncrypted(path2)) { if (!encryptionShim.arePathsOnSameEncryptionZone(path1, path2)) { return false; } } return true; } }
@Override public RecordReader getRecordReader(JobConf job, CombineFileSplit split, Reporter reporter, Class<RecordReader<K, V>> rrClass) throws IOException { CombineFileSplit cfSplit = split; return new CombineFileRecordReader(job, cfSplit, reporter, rrClass); } }
/** * Returns a shim to wrap MiniMrCluster */ @Override public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers, String nameNode, int numDir) throws IOException { return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir); }
@Override public HCatHadoopShims getHCatShim() { if(hcatShimInstance == null) { hcatShimInstance = new HCatHadoopShims23(); } return hcatShimInstance; } private final class HCatHadoopShims23 implements HCatHadoopShims {
/** * Returns a shim to wrap MiniSparkOnYARNCluster */ @Override public MiniMrShim getMiniSparkCluster(Configuration conf, int numberOfTaskTrackers, String nameNode, int numDir) throws IOException { return new MiniSparkShim(conf, numberOfTaskTrackers, nameNode, numDir); }
@Override public boolean arePathsOnSameEncryptionZone(Path path1, Path path2) throws IOException { return equivalentEncryptionZones(getEncryptionZoneForPath(path1), getEncryptionZoneForPath(path2)); }
public MiniTezLocalShim(Configuration conf, boolean usingLlap) { this.conf = conf; this.isLlap = usingLlap; setupConfiguration(conf); }
@Override public void checkOutputSpecs(final FileSystem ignored, final JobConf job) throws IOException { realOutputFormat.checkOutputSpecs(ShimLoader.getHadoopShims().getHCatShim().createJobContext(job, null)); }
public static org.apache.hadoop.mapreduce.TaskAttemptContext createTaskAttemptContext(Configuration conf, org.apache.hadoop.mapreduce.TaskAttemptID id) { return ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf,id); }