/** * Create a copy of the given credentials. * @param credentials to copy */ public Credentials(Credentials credentials) { this.addAll(credentials); }
/** * Copy all of the credentials from one credential object into another. * Existing secrets and tokens are not overwritten. * @param other the credentials to copy */ public void mergeAll(Credentials other) { addAll(other, false); }
/** * Copy all of the credentials from one credential object into another. * Existing secrets and tokens are overwritten. * @param other the credentials to copy */ public void addAll(Credentials other) { addAll(other, true); }
/** * Add the given Credentials to this user. * @param credentials of tokens and secrets */ public void addCredentials(Credentials credentials) { synchronized (subject) { getCredentialsInternal().addAll(credentials); } }
/** * Configure the job with the multiple output formats added. This method * should be called after all the output formats have been added and * configured and before the job submission. */ public void configure() { StringBuilder aliases = new StringBuilder(); Configuration jobConf = job.getConfiguration(); for (Entry<String, Job> entry : outputConfigs.entrySet()) { // Copy credentials job.getCredentials().addAll(entry.getValue().getCredentials()); String alias = entry.getKey(); aliases.append(alias).append(COMMA_DELIM); // Store the differing configuration for each alias in the job // as a setting. setAliasConf(alias, job, entry.getValue()); } aliases.delete(aliases.length() - COMMA_DELIM.length(), aliases.length()); jobConf.set(MO_ALIASES, aliases.toString()); }
@SuppressWarnings("unchecked") private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { super.read(in); String hadoopOutputFormatName = in.readUTF(); if (jobConf == null) { jobConf = new JobConf(); } jobConf.readFields(in); try { this.mapredOutputFormat = (org.apache.hadoop.mapred.OutputFormat<K, V>) Class.forName(hadoopOutputFormatName, true, Thread.currentThread().getContextClassLoader()).newInstance(); } catch (Exception e) { throw new RuntimeException("Unable to instantiate the hadoop output format", e); } ReflectionUtils.setConf(mapredOutputFormat, jobConf); jobConf.getCredentials().addAll(this.credentials); Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser()); if (currentUserCreds != null) { jobConf.getCredentials().addAll(currentUserCreds); } } }
@Override public HadoopInputSplit[] createInputSplits(int minNumSplits) throws IOException { configuration.setInt("mapreduce.input.fileinputformat.split.minsize", minNumSplits); JobContext jobContext = new JobContextImpl(configuration, new JobID()); jobContext.getCredentials().addAll(this.credentials); Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser()); if (currentUserCreds != null) { jobContext.getCredentials().addAll(currentUserCreds); } List<org.apache.hadoop.mapreduce.InputSplit> splits; try { splits = this.mapreduceInputFormat.getSplits(jobContext); } catch (InterruptedException e) { throw new IOException("Could not get Splits.", e); } HadoopInputSplit[] hadoopInputSplits = new HadoopInputSplit[splits.size()]; for (int i = 0; i < hadoopInputSplits.length; i++) { hadoopInputSplits[i] = new HadoopInputSplit(i, splits.get(i), jobContext); } return hadoopInputSplits; }
@Override public void finalizeGlobal(int parallelism) throws IOException { JobContext jobContext; TaskAttemptContext taskContext; try { TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_" + String.format("%" + (6 - Integer.toString(1).length()) + "s", " ").replace(" ", "0") + Integer.toString(1) + "_0"); jobContext = new JobContextImpl(this.configuration, new JobID()); taskContext = new TaskAttemptContextImpl(this.configuration, taskAttemptID); this.outputCommitter = this.mapreduceOutputFormat.getOutputCommitter(taskContext); } catch (Exception e) { throw new RuntimeException(e); } jobContext.getCredentials().addAll(this.credentials); Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser()); if (currentUserCreds != null) { jobContext.getCredentials().addAll(currentUserCreds); } // finalize HDFS output format if (this.outputCommitter != null) { this.outputCommitter.commitJob(jobContext); } }
private ByteBuffer serializeCredentials(Credentials credentials) throws IOException { Credentials containerCredentials = new Credentials(); containerCredentials.addAll(credentials); DataOutputBuffer containerTokens_dob = new DataOutputBuffer(); containerCredentials.writeTokenStorageToStream(containerTokens_dob); return ByteBuffer.wrap(containerTokens_dob.getData(), 0, containerTokens_dob.getLength()); }
public static ByteBuffer serializeCredentials(Credentials credentials) throws IOException { Credentials containerCredentials = new Credentials(); containerCredentials.addAll(credentials); DataOutputBuffer containerTokensDob = new DataOutputBuffer(); containerCredentials.writeTokenStorageToStream(containerTokensDob); return ByteBuffer.wrap(containerTokensDob.getData(), 0, containerTokensDob.getLength()); } }
ugi.getCredentials().addAll(cred);
@Override public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { for (String alias : getOutputFormatAliases(context)) { LOGGER.debug("Calling checkOutputSpecs for alias: " + alias); JobContext aliasContext = getJobContext(alias, context); OutputFormat<?, ?> outputFormat = getOutputFormatInstance(aliasContext); outputFormat.checkOutputSpecs(aliasContext); // Copy credentials and any new config added back to JobContext context.getCredentials().addAll(aliasContext.getCredentials()); setAliasConf(alias, context, aliasContext); } }
String fileLoc = conf.get(CREDENTIALS_LOCATION); Credentials cred = Credentials.readTokenStorageFile(new File(fileLoc), conf); job.getCredentials().addAll(cred);
job.getCredentials().addAll(crd);
/** * Copy all of the credentials from one credential object into another. * Existing secrets and tokens are overwritten. * @param other the credentials to copy */ public void addAll(Credentials other) { addAll(other, true); }
/** * Creates an {@link Credentials} by copying the {@link Credentials} of the current user. */ private Credentials createCredentials() { Credentials credentials = new Credentials(); try { credentials.addAll(UserGroupInformation.getCurrentUser().getCredentials()); } catch (IOException e) { LOG.warn("Failed to get current user UGI. Current user credentials not added.", e); } return credentials; }
@Override public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { for (String alias : getOutputFormatAliases(context)) { LOGGER.debug("Calling checkOutputSpecs for alias: " + alias); JobContext aliasContext = getJobContext(alias, context); OutputFormat<?, ?> outputFormat = getOutputFormatInstance(aliasContext); outputFormat.checkOutputSpecs(aliasContext); // Copy credentials and any new config added back to JobContext context.getCredentials().addAll(aliasContext.getCredentials()); setAliasConf(alias, context, aliasContext); } }