@Override public Collection<String> getStringCollection(String name) { return encapsulated.getStringCollection(name); }
@Override public Collection<String> getStringCollection(String name) { return encapsulated.getStringCollection(name); }
@Override public Collection<String> getStringCollection(String name) { return conf.getStringCollection(name); }
public static boolean isBlobStorageScheme(final Configuration conf, final String scheme) { Collection<String> supportedBlobStoreSchemes = conf.getStringCollection(HiveConf.ConfVars.HIVE_BLOBSTORE_SUPPORTED_SCHEMES.varname); return supportedBlobStoreSchemes.contains(scheme); }
public static void addOutputFormat(Configuration cfg, Class<? extends OutputFormat>... formats) { Collection<String> of = cfg.getStringCollection(CFG_FIELD); for (Class<? extends OutputFormat> format : formats) { of.add(format.getName()); } cfg.setStrings(CFG_FIELD, StringUtils.join(of, ",")); }
/** * Adds the AvroSerialization scheme to the configuration, so SerializationFactory * instances constructed from the given configuration will be aware of it. * * @param conf The configuration to add AvroSerialization to. */ public static void addToConfiguration(Configuration conf) { Collection<String> serializations = conf.getStringCollection("io.serializations"); if (!serializations.contains(AvroSerialization.class.getName())) { serializations.add(AvroSerialization.class.getName()); conf.setStrings("io.serializations", serializations.toArray(new String[serializations.size()])); } }
public Object run() throws IOException, URISyntaxException { Credentials creds = new Credentials(); //get Tokens for default FS. Not all FSs support delegation tokens, e.g. WASB collectTokens(FileSystem.get(conf), twrapper, creds, ugi.getShortUserName()); //get tokens for all other known FSs since Hive tables may result in different ones //passing "creds" prevents duplicate tokens from being added Collection<String> URIs = conf.getStringCollection("mapreduce.job.hdfs-servers"); for(String uri : URIs) { LOG.debug("Getting tokens for " + uri); collectTokens(FileSystem.get(new URI(uri), conf), twrapper, creds, ugi.getShortUserName()); } return null; } });
/** * Returns a classpath string built from the content of the "tmpjars" value in {@code conf}. * Also exposed to shell scripts via `bin/hbase mapredcp`. */ public static String buildDependencyClasspath(Configuration conf) { if (conf == null) { throw new IllegalArgumentException("Must provide a configuration object."); } Set<String> paths = new HashSet<>(conf.getStringCollection("tmpjars")); if (paths.isEmpty()) { throw new IllegalArgumentException("Configuration contains no tmpjars."); } StringBuilder sb = new StringBuilder(); for (String s : paths) { // entries can take the form 'file:/path/to/file.jar'. int idx = s.indexOf(":"); if (idx != -1) s = s.substring(idx + 1); if (sb.length() > 0) sb.append(File.pathSeparator); sb.append(s); } return sb.toString(); }
public static List<KeyProvider> getProviders(Configuration conf ) throws IOException { List<KeyProvider> result = new ArrayList<KeyProvider>(); for(String path: conf.getStringCollection(KEY_PROVIDER_PATH)) { try { URI uri = new URI(path); KeyProvider kp = get(uri, conf); if (kp != null) { result.add(kp); } else { throw new IOException("No KeyProviderFactory for " + uri + " in " + KEY_PROVIDER_PATH); } } catch (URISyntaxException error) { throw new IOException("Bad configuration of " + KEY_PROVIDER_PATH + " at " + path, error); } } return result; }
@SuppressWarnings("SameParameterValue") static void addDependencyJars(Configuration conf, Class<?>... classes) throws IOException { FileSystem localFs = FileSystem.getLocal(conf); Set<String> jars = new HashSet<>(conf.getStringCollection("tmpjars")); for (Class<?> clazz : classes) { if (clazz == null) { continue; } final String path = Utilities.jarFinderGetJar(clazz); if (path == null) { throw new RuntimeException("Could not find jar for class " + clazz + " in order to ship it to the cluster."); } if (!localFs.exists(new Path(path))) { throw new RuntimeException("Could not validate jar file " + path + " for class " + clazz); } jars.add(path); } if (jars.isEmpty()) { return; } //noinspection ToArrayCallWithZeroLengthArrayArgument conf.set("tmpjars", StringUtils.arrayToString(jars.toArray(new String[jars.size()]))); }
public static void addDependencyJars(Configuration conf, List<Class<?>> classes) throws IOException { FileSystem localFs = FileSystem.getLocal(conf); Set<String> jars = new HashSet<String>(); // Add jars that are already in the tmpjars variable jars.addAll(conf.getStringCollection("tmpjars")); // add jars as we find them to a map of contents jar name so that we can // avoid // creating new jars for classes that have already been packaged. Map<String,String> packagedClasses = new HashMap<String,String>(); // Add jars containing the specified classes for (Class<?> clazz : classes) { if (clazz == null) { continue; } Path path = findOrCreateJar(clazz, localFs, packagedClasses); if (path == null) { log.warn("Could not find jar for class " + clazz + " in order to ship it to the cluster."); continue; } if (!localFs.exists(path)) { log.warn("Could not validate jar file " + path + " for class " + clazz); continue; } jars.add(path.toString()); } if (!jars.isEmpty()) { conf.set("tmpjars", StringUtils.join(jars, ",")); } }
@Test public void testAppendsObserver() { conf.set(MASTER_COPROCESSOR_CONF_KEY, AccessController.class.getName()); master.updateConfigurationForQuotasObserver(conf); Set<String> coprocs = new HashSet<>(conf.getStringCollection(MASTER_COPROCESSOR_CONF_KEY)); assertEquals(2, coprocs.size()); assertTrue( "Observed coprocessors were: " + coprocs, coprocs.contains(AccessController.class.getName())); assertTrue( "Observed coprocessors were: " + coprocs, coprocs.contains(MasterQuotasObserver.class.getName())); } }
@Override public void init(FileSystem fs, Path path, Configuration conf, boolean overwritable, long blocksize) throws IOException, CommonFSUtils.StreamLacksCapabilityException { Collection<String> operations = conf.getStringCollection(ALLOWED_OPERATIONS); if (operations.isEmpty() || operations.contains(AllowedOperations.all.name())) { doAppends = doSyncs = true; } else if (operations.contains(AllowedOperations.none.name())) { doAppends = doSyncs = false; } else { doAppends = operations.contains(AllowedOperations.append.name()); doSyncs = operations.contains(AllowedOperations.sync.name()); } LOG.info("IOTestWriter initialized with appends " + (doAppends ? "enabled" : "disabled") + " and syncs " + (doSyncs ? "enabled" : "disabled")); super.init(fs, path, conf, overwritable, blocksize); }
@Test public void testNoCredentialProviderWithPassword() throws Exception { setupConfigs(false, false, true, false); Assert.assertTrue(StringUtils.isBlank(jobConf.get(HADOOP_CREDENTIAL_PROVIDER_PATH_CONFIG))); Assert.assertNull(getValueFromJobConf(jobConf.get(JobConf.MAPRED_MAP_TASK_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); Assert.assertNull(getValueFromJobConf(jobConf.get(JobConf.MAPRED_REDUCE_TASK_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); Assert.assertNull(getValueFromJobConf(jobConf.get(MRJobConfig.MR_AM_ADMIN_USER_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); REDACTED_PROPERTIES.forEach(property -> Assert.assertFalse( jobConf.getStringCollection(MRJobConfig.MR_JOB_REDACTED_PROPERTIES) .contains(property))); }
@Test public void testNoCredentialProvider() throws Exception { setupConfigs(false, false, false, false); assertTrue(StringUtils.isBlank(jobConf.get(HADOOP_CREDENTIAL_PROVIDER_PATH_CONFIG))); assertNull(getValueFromJobConf(jobConf.get(JobConf.MAPRED_MAP_TASK_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); assertNull(getValueFromJobConf(jobConf.get(JobConf.MAPRED_REDUCE_TASK_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); assertNull(getValueFromJobConf(jobConf.get(MRJobConfig.MR_AM_ADMIN_USER_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); REDACTED_PROPERTIES.forEach(property -> Assert.assertFalse( jobConf.getStringCollection(MRJobConfig.MR_JOB_REDACTED_PROPERTIES) .contains(property))); }
@Test public void testCredentialProviderWithNoPasswords() throws Exception { setupConfigs(true, false, false, true); HiveConfUtil.updateJobCredentialProviders(jobConf); Assert.assertEquals(JOB_CREDSTORE_LOCATION, jobConf.get(HADOOP_CREDENTIAL_PROVIDER_PATH_CONFIG)); Assert.assertNull(jobConf.get(JobConf.MAPRED_MAP_TASK_ENV)); Assert.assertNull(jobConf.get(JobConf.MAPRED_REDUCE_TASK_ENV)); Assert.assertNull(jobConf.get(MRJobConfig.MR_AM_ADMIN_USER_ENV)); REDACTED_PROPERTIES.forEach(property -> Assert.assertFalse( jobConf.getStringCollection(MRJobConfig.MR_JOB_REDACTED_PROPERTIES) .contains(property))); resetConfig(); setupConfigs(true, false, false, false); HiveConfUtil.updateJobCredentialProviders(jobConf); Assert.assertEquals(HADOOP_CREDSTORE_LOCATION, jobConf.get(HADOOP_CREDENTIAL_PROVIDER_PATH_CONFIG)); Assert.assertNull(jobConf.get(JobConf.MAPRED_MAP_TASK_ENV)); Assert.assertNull(jobConf.get(JobConf.MAPRED_REDUCE_TASK_ENV)); Assert.assertNull(jobConf.get(MRJobConfig.MR_AM_ADMIN_USER_ENV)); REDACTED_PROPERTIES.forEach(property -> Assert.assertFalse( jobConf.getStringCollection(MRJobConfig.MR_JOB_REDACTED_PROPERTIES) .contains(property))); }
@Test public void testJobCredentialProviderUnset() throws Exception { setupConfigs(true, true, false, false); HiveConfUtil.updateJobCredentialProviders(jobConf); assertEquals(HADOOP_CREDSTORE_LOCATION, jobConf.get(HADOOP_CREDENTIAL_PROVIDER_PATH_CONFIG)); assertEquals(HADOOP_CREDSTORE_PASSWORD_ENVVAR_VAL, getValueFromJobConf( jobConf.get(JobConf.MAPRED_MAP_TASK_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); assertEquals(HADOOP_CREDSTORE_PASSWORD_ENVVAR_VAL, getValueFromJobConf( jobConf.get(JobConf.MAPRED_REDUCE_TASK_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); Assert.assertEquals(HADOOP_CREDSTORE_PASSWORD_ENVVAR_VAL, getValueFromJobConf( jobConf.get(MRJobConfig.MR_AM_ADMIN_USER_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); Assert.assertTrue(jobConf.getStringCollection(MRJobConfig.MR_JOB_REDACTED_PROPERTIES) .containsAll(REDACTED_PROPERTIES)); }
@Test public void testJobCredentialProviderWithDefaultPassword() throws Exception { setupConfigs(false, true, false, true); HiveConfUtil.updateJobCredentialProviders(jobConf); Assert.assertEquals(JOB_CREDSTORE_LOCATION, jobConf.get(HADOOP_CREDENTIAL_PROVIDER_PATH_CONFIG)); Assert.assertEquals(HADOOP_CREDSTORE_PASSWORD_ENVVAR_VAL, getValueFromJobConf( jobConf.get(JobConf.MAPRED_MAP_TASK_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); Assert.assertEquals(HADOOP_CREDSTORE_PASSWORD_ENVVAR_VAL, getValueFromJobConf( jobConf.get(JobConf.MAPRED_REDUCE_TASK_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); Assert.assertEquals(HADOOP_CREDSTORE_PASSWORD_ENVVAR_VAL, getValueFromJobConf( jobConf.get(MRJobConfig.MR_AM_ADMIN_USER_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); Assert.assertTrue(jobConf.getStringCollection(MRJobConfig.MR_JOB_REDACTED_PROPERTIES) .containsAll(REDACTED_PROPERTIES)); }
@Test public void testHadoopCredentialProvider() throws Exception { setupConfigs(true, true, true, false); HiveConfUtil.updateJobCredentialProviders(jobConf); Assert.assertEquals(HADOOP_CREDSTORE_LOCATION, jobConf.get(HADOOP_CREDENTIAL_PROVIDER_PATH_CONFIG)); // make sure MAP task environment points to HADOOP_CREDSTORE_PASSWORD Assert.assertEquals(HADOOP_CREDSTORE_PASSWORD_ENVVAR_VAL, getValueFromJobConf( jobConf.get(JobConf.MAPRED_MAP_TASK_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); // make sure REDUCE task environment points to HADOOP_CREDSTORE_PASSWORD Assert.assertEquals(HADOOP_CREDSTORE_PASSWORD_ENVVAR_VAL, getValueFromJobConf( jobConf.get(JobConf.MAPRED_REDUCE_TASK_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); Assert.assertEquals(HADOOP_CREDSTORE_PASSWORD_ENVVAR_VAL, getValueFromJobConf( jobConf.get(MRJobConfig.MR_AM_ADMIN_USER_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); Assert.assertTrue(jobConf.getStringCollection(MRJobConfig.MR_JOB_REDACTED_PROPERTIES) .containsAll(REDACTED_PROPERTIES)); }
@Test public void testJobCredentialProvider() throws Exception { setupConfigs(true, true, true, true); HiveConfUtil.updateJobCredentialProviders(jobConf); // make sure credential provider path points to HIVE_SERVER2_JOB_CREDSTORE_LOCATION Assert.assertEquals(JOB_CREDSTORE_LOCATION, jobConf.get(HADOOP_CREDENTIAL_PROVIDER_PATH_CONFIG)); // make sure MAP task environment points to HIVE_JOB_CREDSTORE_PASSWORD Assert.assertEquals(HIVE_JOB_CREDSTORE_PASSWORD_ENVVAR_VAL, getValueFromJobConf( jobConf.get(JobConf.MAPRED_MAP_TASK_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); // make sure REDUCE task environment points to HIVE_JOB_CREDSTORE_PASSWORD Assert.assertEquals(HIVE_JOB_CREDSTORE_PASSWORD_ENVVAR_VAL, getValueFromJobConf( jobConf.get(JobConf.MAPRED_REDUCE_TASK_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); Assert.assertEquals(HIVE_JOB_CREDSTORE_PASSWORD_ENVVAR_VAL, getValueFromJobConf( jobConf.get(MRJobConfig.MR_AM_ADMIN_USER_ENV), HADOOP_CREDENTIAL_PASSWORD_ENVVAR)); Assert.assertTrue(jobConf.getStringCollection(MRJobConfig.MR_JOB_REDACTED_PROPERTIES) .containsAll(REDACTED_PROPERTIES)); }