public HadoopOutputFormatBase(org.apache.hadoop.mapreduce.OutputFormat<K, V> mapreduceOutputFormat, Job job) { super(job.getCredentials()); this.mapreduceOutputFormat = mapreduceOutputFormat; this.configuration = job.getConfiguration(); HadoopUtils.mergeHadoopConf(configuration); }
void handleSecurity( Job job, OutputJobInfo outputJobInfo, IMetaStoreClient client, Configuration conf, boolean harRequested) throws IOException, MetaException, TException, Exception { handleSecurity(job.getCredentials(), outputJobInfo, client, conf, harRequested); }
/** * @see org.apache.hive.hcatalog.mapreduce.HCatOutputFormat#setOutput(org.apache.hadoop.conf.Configuration, Credentials, OutputJobInfo) */ public static void setOutput(Job job, OutputJobInfo outputJobInfo) throws IOException { setOutput(job.getConfiguration(), job.getCredentials(), outputJobInfo); }
/** * Configure the job with the multiple output formats added. This method * should be called after all the output formats have been added and * configured and before the job submission. */ public void configure() { StringBuilder aliases = new StringBuilder(); Configuration jobConf = job.getConfiguration(); for (Entry<String, Job> entry : outputConfigs.entrySet()) { // Copy credentials job.getCredentials().addAll(entry.getValue().getCredentials()); String alias = entry.getKey(); aliases.append(alias).append(COMMA_DELIM); // Store the differing configuration for each alias in the job // as a setting. setAliasConf(alias, job, entry.getValue()); } aliases.delete(aliases.length() - COMMA_DELIM.length(), aliases.length()); jobConf.set(MO_ALIASES, aliases.toString()); }
public HadoopInputFormatBase(org.apache.hadoop.mapreduce.InputFormat<K, V> mapreduceInputFormat, Class<K> key, Class<V> value, Job job) { super(Preconditions.checkNotNull(job, "Job can not be null").getCredentials()); this.mapreduceInputFormat = Preconditions.checkNotNull(mapreduceInputFormat); this.keyClass = Preconditions.checkNotNull(key); this.valueClass = Preconditions.checkNotNull(value); this.configuration = job.getConfiguration(); HadoopUtils.mergeHadoopConf(configuration); }
private String addHMSToken(Job job, String user) throws IOException, InterruptedException, TException { if(!secureMetastoreAccess) { return null; } Token<org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier> hiveToken = new Token<org.apache.hadoop.hive.metastore.security.DelegationTokenIdentifier>(); String metastoreTokenStrForm = buildHcatDelegationToken(user); hiveToken.decodeFromUrlString(metastoreTokenStrForm); job.getCredentials().addToken(new Text(SecureProxySupport.HCAT_SERVICE), hiveToken); return metastoreTokenStrForm; } private String buildHcatDelegationToken(String user) throws IOException, InterruptedException,
TokenCache.obtainTokensForNamenodes(job.getCredentials(), new Path[] { inputRoot }, srcConf); Configuration destConf = HBaseConfiguration.createClusterConf(conf, null, CONF_DEST_PREFIX); TokenCache.obtainTokensForNamenodes(job.getCredentials(), new Path[] { outputRoot }, destConf);
job.getCredentials().addToken(new Text("mr token"), mrdt);
user.getName() + " on cluster " + clusterId.toString()); job.getCredentials().addToken(clusterId, token); } catch (IOException ioe) { throw ioe;
job.getCredentials().addAll(crd); jobCredentials.put(INNER_SIGNATURE_PREFIX + "_" + sign, job.getCredentials()); udfProps.put(HCatConstants.HCAT_PIG_STORER_LOCATION_SET, true);
String fileLoc = conf.get(CREDENTIALS_LOCATION); Credentials cred = Credentials.readTokenStorageFile(new File(fileLoc), conf); job.getCredentials().addAll(cred);
job.getCredentials().addAll(crd); crd.addAll(job.getCredentials()); jobCredentials.put(INNER_SIGNATURE_PREFIX + "_" + signature, crd);
/** * Checks for an authentication token for the given user, obtaining a new token if necessary, * and adds it to the credentials for the given map reduce job. * * @param conn The HBase cluster connection * @param user The user for whom to obtain the token * @param job The job instance in which the token should be stored * @throws IOException If making a remote call to the authentication service fails * @throws InterruptedException If executing as the given user is interrupted */ public static void addTokenForJob(final Connection conn, User user, Job job) throws IOException, InterruptedException { Token<AuthenticationTokenIdentifier> token = getAuthToken(conn.getConfiguration(), user); if (token == null) { token = obtainToken(conn, user); } job.getCredentials().addToken(token.getService(), token); }
@Test public void testUGICredentialsPropogation() throws Exception { Credentials creds = new Credentials(); Token<?> token = mock(Token.class); Text tokenService = new Text("service"); Text secretName = new Text("secret"); byte secret[] = new byte[]{}; creds.addToken(tokenService, token); creds.addSecretKey(secretName, secret); UserGroupInformation.getLoginUser().addCredentials(creds); JobConf jobConf = new JobConf(); Job job = new Job(jobConf); assertSame(token, job.getCredentials().getToken(tokenService)); assertSame(secret, job.getCredentials().getSecretKey(secretName)); } }
job.getCredentials().addToken(hadoopToken.getService(), hadoopToken);
job.getCredentials().addToken(hadoopToken.getService(), hadoopToken);
/** * @see org.apache.hive.hcatalog.mapreduce.HCatOutputFormat#setOutput(org.apache.hadoop.conf.Configuration, Credentials, OutputJobInfo) */ public static void setOutput(Job job, OutputJobInfo outputJobInfo) throws IOException { setOutput(job.getConfiguration(), job.getCredentials(), outputJobInfo); }
public HadoopOutputFormatBase(org.apache.hadoop.mapreduce.OutputFormat<K, V> mapreduceOutputFormat, Job job) { super(job.getCredentials()); this.mapreduceOutputFormat = mapreduceOutputFormat; this.configuration = job.getConfiguration(); HadoopUtils.mergeHadoopConf(configuration); }
void handleSecurity( Job job, OutputJobInfo outputJobInfo, IMetaStoreClient client, Configuration conf, boolean harRequested) throws IOException, MetaException, TException, Exception { handleSecurity(job.getCredentials(), outputJobInfo, client, conf, harRequested); }
public HadoopInputFormatBase(org.apache.hadoop.mapreduce.InputFormat<K, V> mapreduceInputFormat, Class<K> key, Class<V> value, Job job) { super(Preconditions.checkNotNull(job, "Job can not be null").getCredentials()); this.mapreduceInputFormat = Preconditions.checkNotNull(mapreduceInputFormat); this.keyClass = Preconditions.checkNotNull(key); this.valueClass = Preconditions.checkNotNull(value); this.configuration = job.getConfiguration(); HadoopUtils.mergeHadoopConf(configuration); }