private void unCacheDataNucleusClassLoaders() { try { boolean isLocalMetastore = HiveConfUtil.isEmbeddedMetaStore( MetastoreConf.getVar(sessionConf, MetastoreConf.ConfVars.THRIFT_URIS)); if (isLocalMetastore) { String rawStoreImpl = MetastoreConf.getVar(sessionConf, MetastoreConf.ConfVars.RAW_STORE_IMPL); String realStoreImpl; if (rawStoreImpl.equals(CachedStore.class.getName())) { realStoreImpl = MetastoreConf.getVar(sessionConf, MetastoreConf.ConfVars.CACHED_RAW_STORE_IMPL); } else { realStoreImpl = rawStoreImpl; } Class<?> clazz = Class.forName(realStoreImpl); if (ObjectStore.class.isAssignableFrom(clazz)) { PersistenceManagerProvider.clearOutPmfClassLoaderCache(); } } } catch (Exception e) { LOG.info("Failed to remove classloaders from DataNucleus ", e); } }
private void logConfigurations(JobConf localJobConf) { if (LOG.isDebugEnabled()) { LOG.debug("Logging job configuration: "); StringBuilder outWriter = new StringBuilder(); // redact sensitive information before logging HiveConfUtil.dumpConfig(localJobConf, outWriter); LOG.debug(outWriter.toString()); } } }
String credstorePassword = getJobCredentialProviderPassword(jobConf); if (credstorePassword != null) { String execEngine = jobConf.get(ConfVars.HIVE_EXECUTION_ENGINE.varname); addKeyValuePair(jobConf, property, Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, credstorePassword); redactedProperties.add(property);
/** * Searches the given configuration object and replaces all the configuration values for keys * defined hive.conf.hidden.list by empty String * * @param conf - Configuration object which needs to be modified to remove sensitive keys */ public static void stripConfigurations(Configuration conf) { Set<String> hiddenSet = getHiddenSet(conf); stripConfigurations(conf, hiddenSet); }
private SparkJobRef submit(final DriverContext driverContext, final SparkWork sparkWork) throws Exception { final Context ctx = driverContext.getCtx(); final HiveConf hiveConf = (HiveConf) ctx.getConf(); refreshLocalResources(sparkWork, hiveConf); final JobConf jobConf = new JobConf(hiveConf); //update the credential provider location in the jobConf HiveConfUtil.updateJobCredentialProviders(jobConf); // Create temporary scratch dir final Path emptyScratchDir = ctx.getMRTmpPath(); FileSystem fs = emptyScratchDir.getFileSystem(jobConf); fs.mkdirs(emptyScratchDir); // make sure NullScanFileSystem can be loaded - HIVE-18442 jobConf.set("fs." + NullScanFileSystem.getBaseScheme() + ".impl", NullScanFileSystem.class.getCanonicalName()); byte[] jobConfBytes = KryoSerializer.serializeJobConf(jobConf); byte[] scratchDirBytes = KryoSerializer.serialize(emptyScratchDir); byte[] sparkWorkBytes = KryoSerializer.serialize(sparkWork); JobStatusJob job = new JobStatusJob(jobConfBytes, scratchDirBytes, sparkWorkBytes); if (driverContext.isShutdown()) { throw new HiveException("Operation is cancelled."); } JobHandle<Serializable> jobHandle = remoteClient.submit(job); RemoteSparkJobStatus sparkJobStatus = new RemoteSparkJobStatus(remoteClient, jobHandle, sparkClientTimtout); return new RemoteSparkJobRef(hiveConf, jobHandle, sparkJobStatus); }
if(HiveConfUtil.isEmbeddedMetaStore(msUri)){ setLoadMetastoreConfig(true); hiddenSet.addAll(HiveConfUtil.getHiddenSet(this)); setupRSCList();
/** * Strips hidden config entries from configuration */ public void stripHiddenConfigurations(Configuration conf) { HiveConfUtil.stripConfigurations(conf, hiddenSet); }
String password = HiveConfUtil.getJobCredentialProviderPassword(hiveConf); if(password != null) { addCredentialProviderPassword(sparkConf, password);
private SparkJobRef submit(final DriverContext driverContext, final SparkWork sparkWork) throws Exception { final Context ctx = driverContext.getCtx(); final HiveConf hiveConf = (HiveConf) ctx.getConf(); refreshLocalResources(sparkWork, hiveConf); final JobConf jobConf = new JobConf(hiveConf); //update the credential provider location in the jobConf HiveConfUtil.updateJobCredentialProviders(jobConf); // Create temporary scratch dir final Path emptyScratchDir = ctx.getMRTmpPath(); FileSystem fs = emptyScratchDir.getFileSystem(jobConf); fs.mkdirs(emptyScratchDir); byte[] jobConfBytes = KryoSerializer.serializeJobConf(jobConf); byte[] scratchDirBytes = KryoSerializer.serialize(emptyScratchDir); byte[] sparkWorkBytes = KryoSerializer.serialize(sparkWork); JobStatusJob job = new JobStatusJob(jobConfBytes, scratchDirBytes, sparkWorkBytes); if (driverContext.isShutdown()) { throw new HiveException("Operation is cancelled."); } JobHandle<Serializable> jobHandle = remoteClient.submit(job); RemoteSparkJobStatus sparkJobStatus = new RemoteSparkJobStatus(remoteClient, jobHandle, sparkClientTimtout); return new RemoteSparkJobRef(hiveConf, jobHandle, sparkJobStatus); }
if(HiveConfUtil.isEmbeddedMetaStore(msUri)){ setLoadMetastoreConfig(true); hiddenSet.addAll(HiveConfUtil.getHiddenSet(this)); setupRSCList();
public static void dumpConfig(Configuration originalConf, StringBuilder sb) { Set<String> hiddenSet = getHiddenSet(originalConf); sb.append("Values omitted for security reason if present: ").append(hiddenSet).append("\n"); Configuration conf = new Configuration(originalConf); stripConfigurations(conf, hiddenSet); Iterator<Map.Entry<String, String>> configIter = conf.iterator(); List<Map.Entry<String, String>> configVals = new ArrayList<>(); while(configIter.hasNext()) { configVals.add(configIter.next()); } Collections.sort(configVals, new Comparator<Map.Entry<String, String>>() { @Override public int compare(Map.Entry<String, String> ent, Map.Entry<String, String> ent2) { return ent.getKey().compareTo(ent2.getKey()); } }); for(Map.Entry<String, String> entry : configVals) { //use get() to make sure variable substitution works if(entry.getKey().toLowerCase().contains("path")) { StringTokenizer st = new StringTokenizer(conf.get(entry.getKey()), File.pathSeparator); sb.append(entry.getKey()).append("=\n"); while(st.hasMoreTokens()) { sb.append(" ").append(st.nextToken()).append(File.pathSeparator).append('\n'); } } else { sb.append(entry.getKey()).append('=').append(conf.get(entry.getKey())).append('\n'); } } }
/** * Guts of the servlet - extracted for easy testing. */ static void writeResponse(Configuration conf, Writer out, String format) throws IOException, BadFormatException { //redact the sensitive information from the configuration values Configuration hconf = new Configuration(conf); HiveConfUtil.stripConfigurations(hconf); if (FORMAT_JSON.equals(format)) { Configuration.dumpConfiguration(hconf, out); } else if (FORMAT_XML.equals(format)) { hconf.writeXml(out); } else { throw new BadFormatException("Bad format: " + format); } }
String password = HiveConfUtil.getJobCredentialProviderPassword(hiveConf); if(password != null) { addCredentialProviderPassword(sparkConf, password);
@Test public void testCredentialProviderWithNoPasswords() throws Exception { setupConfigs(true, false, false, true); HiveConfUtil.updateJobCredentialProviders(jobConf); Assert.assertEquals(JOB_CREDSTORE_LOCATION, jobConf.get(HADOOP_CREDENTIAL_PROVIDER_PATH_CONFIG)); Assert.assertNull(jobConf.get(JobConf.MAPRED_MAP_TASK_ENV)); Assert.assertNull(jobConf.get(JobConf.MAPRED_REDUCE_TASK_ENV)); Assert.assertNull(jobConf.get(MRJobConfig.MR_AM_ADMIN_USER_ENV)); REDACTED_PROPERTIES.forEach(property -> Assert.assertFalse( jobConf.getStringCollection(MRJobConfig.MR_JOB_REDACTED_PROPERTIES) .contains(property))); resetConfig(); setupConfigs(true, false, false, false); HiveConfUtil.updateJobCredentialProviders(jobConf); Assert.assertEquals(HADOOP_CREDSTORE_LOCATION, jobConf.get(HADOOP_CREDENTIAL_PROVIDER_PATH_CONFIG)); Assert.assertNull(jobConf.get(JobConf.MAPRED_MAP_TASK_ENV)); Assert.assertNull(jobConf.get(JobConf.MAPRED_REDUCE_TASK_ENV)); Assert.assertNull(jobConf.get(MRJobConfig.MR_AM_ADMIN_USER_ENV)); REDACTED_PROPERTIES.forEach(property -> Assert.assertFalse( jobConf.getStringCollection(MRJobConfig.MR_JOB_REDACTED_PROPERTIES) .contains(property))); }
localMetaStore = HiveConfUtil.isEmbeddedMetaStore(msUri); if (localMetaStore) {
/** * Dumps all env and config state. Should be called once on WebHCat start up to facilitate * support/debugging. Later it may be worth adding a REST call which will return this data. */ private String dumpEnvironent() { StringBuilder sb = TempletonUtils.dumpPropMap("========WebHCat System.getenv()========", System.getenv()); sb.append("START========WebHCat AppConfig.iterator()========: \n"); HiveConfUtil.dumpConfig(this, sb); sb.append("END========WebHCat AppConfig.iterator()========: \n"); sb.append(TempletonUtils.dumpPropMap("========WebHCat System.getProperties()========", System.getProperties())); sb.append(HiveConfUtil.dumpConfig(new HiveConf())); return sb.toString(); }
/** * Searches the given configuration object and replaces all the configuration values for keys * defined hive.conf.hidden.list by empty String * * @param conf - Configuration object which needs to be modified to remove sensitive keys */ public static void stripConfigurations(Configuration conf) { Set<String> hiddenSet = getHiddenSet(conf); stripConfigurations(conf, hiddenSet); }
String credStorepassword = getJobCredentialProviderPassword(jobConf); if (credStorepassword != null) { addKeyValuePair(jobConf, JobConf.MAPRED_MAP_TASK_ENV, Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, credStorepassword); addKeyValuePair(jobConf, JobConf.MAPRED_REDUCE_TASK_ENV, Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, credStorepassword); addKeyValuePair(jobConf, "yarn.app.mapreduce.am.admin.user.env", Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, credStorepassword);
@Test public void testHideStringVar() throws Exception { Assert.assertEquals("aaa", conf.get("dummy")); HiveConfUtil.stripConfigurations(conf, Sets.newHashSet("dummy")); Assert.assertEquals("", conf.get("dummy")); }
@Test public void testExistingConfiguration() throws Exception { jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "k1=v1, k2=v2, HADOOP_CREDSTORE_PASSWORD=test"); setupConfigs(false, true, false, true); HiveConfUtil.updateJobCredentialProviders(jobConf); assertEquals("v1", getValueFromJobConf(jobConf.get(JobConf.MAPRED_MAP_TASK_ENV), "k1")); assertEquals("v2", getValueFromJobConf(jobConf.get(JobConf.MAPRED_MAP_TASK_ENV), "k2")); resetConfig(); jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "k1=v1, HADOOP_CREDSTORE_PASSWORD=test, k2=v2"); setupConfigs(false, true, false, true); HiveConfUtil.updateJobCredentialProviders(jobConf); assertEquals("v1", getValueFromJobConf(jobConf.get(JobConf.MAPRED_MAP_TASK_ENV), "k1")); assertEquals("v2", getValueFromJobConf(jobConf.get(JobConf.MAPRED_MAP_TASK_ENV), "k2")); resetConfig(); jobConf.set(JobConf.MAPRED_MAP_TASK_ENV, "HADOOP_CREDSTORE_PASSWORD=test, k1=v1, k2=v2"); setupConfigs(false, true, false, true); HiveConfUtil.updateJobCredentialProviders(jobConf); assertEquals("v1", getValueFromJobConf(jobConf.get(JobConf.MAPRED_MAP_TASK_ENV), "k1")); assertEquals("v2", getValueFromJobConf(jobConf.get(JobConf.MAPRED_MAP_TASK_ENV), "k2")); }