if (flinkConfig != null) { LOG.debug("Loading Hadoop configuration for swift native file system"); hadoopConfig = HadoopUtils.getHadoopConfiguration(flinkConfig);
/** * Creates a new Recoverable writer. * @param fs The Hadoop file system on which the writer operates. */ public HadoopRecoverableWriter(org.apache.hadoop.fs.FileSystem fs) { this.fs = checkNotNull(fs); // This writer is only supported on a subset of file systems, and on // specific versions. We check these schemes and versions eagerly for // better error messages. if (!"hdfs".equalsIgnoreCase(fs.getScheme()) || !HadoopUtils.isMinHadoopVersion(2, 7)) { throw new UnsupportedOperationException( "Recoverable writers on Hadoop are only supported for HDFS and for Hadoop version 2.7 or newer"); } }
if (!HadoopUtils.hasHDFSDelegationToken()) { LOG.warn("Hadoop security is enabled but current login user does not have Kerberos credentials");
if (!HadoopUtils.hasHDFSDelegationToken()) { LOG.warn("Hadoop security is enabled but current login user does not have Kerberos credentials");
hadoopConfig = HadoopUtils.getHadoopConfiguration(flinkConfig); this.hadoopConfig = hadoopConfig;
if (!HadoopUtils.hasHDFSDelegationToken()) { LOG.warn("Hadoop security is enabled but current login user does not have Kerberos credentials");
@BeforeClass public static void testHadoopVersion() { Assume.assumeTrue(HadoopUtils.isMinHadoopVersion(2, 7)); }
null, new File(fileLocation), HadoopUtils.getHadoopConfiguration(flinkConfig));
/** * Creates a new Recoverable writer. * @param fs The Hadoop file system on which the writer operates. */ public HadoopRecoverableWriter(org.apache.hadoop.fs.FileSystem fs) { this.fs = checkNotNull(fs); // This writer is only supported on a subset of file systems, and on // specific versions. We check these schemes and versions eagerly for // better error messages. if (!"hdfs".equalsIgnoreCase(fs.getScheme()) || !HadoopUtils.isMinHadoopVersion(2, 7)) { throw new UnsupportedOperationException( "Recoverable writers on Hadoop are only supported for HDFS and for Hadoop version 2.7 or newer"); } }
@Test public void loadFromClasspathByDefault() { org.apache.hadoop.conf.Configuration hadoopConf = HadoopUtils.getHadoopConfiguration(new Configuration()); assertEquals(IN_CP_CONFIG_VALUE, hadoopConf.get(IN_CP_CONFIG_KEY, null)); }
@Test public void loadFromHadoopConfEntry() throws Exception { final String k1 = "singing?"; final String v1 = "rain!"; final String k2 = "dancing?"; final String v2 = "shower!"; final File confDir = tempFolder.newFolder(); final File file1 = new File(confDir, "core-site.xml"); final File file2 = new File(confDir, "hdfs-site.xml"); printConfig(file1, k1, v1); printConfig(file2, k2, v2); final Configuration cfg = new Configuration(); cfg.setString(ConfigConstants.PATH_HADOOP_CONFIG, confDir.getAbsolutePath()); org.apache.hadoop.conf.Configuration hadoopConf = HadoopUtils.getHadoopConfiguration(cfg); // contains extra entries assertEquals(v1, hadoopConf.get(k1, null)); assertEquals(v2, hadoopConf.get(k2, null)); // also contains classpath defaults assertEquals(IN_CP_CONFIG_VALUE, hadoopConf.get(IN_CP_CONFIG_KEY, null)); }
@Test public void loadFromLegacyConfigEntries() throws Exception { final String k1 = "shipmate"; final String v1 = "smooth sailing"; final String k2 = "pirate"; final String v2 = "Arrg, yer scurvy dog!"; final File file1 = tempFolder.newFile("core-site.xml"); final File file2 = tempFolder.newFile("hdfs-site.xml"); printConfig(file1, k1, v1); printConfig(file2, k2, v2); final Configuration cfg = new Configuration(); cfg.setString(ConfigConstants.HDFS_DEFAULT_CONFIG, file1.getAbsolutePath()); cfg.setString(ConfigConstants.HDFS_SITE_CONFIG, file2.getAbsolutePath()); org.apache.hadoop.conf.Configuration hadoopConf = HadoopUtils.getHadoopConfiguration(cfg); // contains extra entries assertEquals(v1, hadoopConf.get(k1, null)); assertEquals(v2, hadoopConf.get(k2, null)); // also contains classpath defaults assertEquals(IN_CP_CONFIG_VALUE, hadoopConf.get(IN_CP_CONFIG_KEY, null)); }
try { CommonTestUtils.setEnv(newEnv); hadoopConf = HadoopUtils.getHadoopConfiguration(new Configuration());
@Override public SecurityModule createModule(SecurityConfiguration securityConfig) { // First check if we have Hadoop in the ClassPath. If not, we simply don't do anything. try { Class.forName( "org.apache.hadoop.conf.Configuration", false, HadoopModule.class.getClassLoader()); } catch (ClassNotFoundException e) { LOG.info("Cannot create Hadoop Security Module because Hadoop cannot be found in the Classpath."); return null; } try { Configuration hadoopConfiguration = HadoopUtils.getHadoopConfiguration(securityConfig.getFlinkConfig()); return new HadoopModule(securityConfig, hadoopConfiguration); } catch (LinkageError e) { LOG.error("Cannot create Hadoop Security Module.", e); return null; } } }
@Override public SecurityModule createModule(SecurityConfiguration securityConfig) { // First check if we have Hadoop in the ClassPath. If not, we simply don't do anything. try { Class.forName( "org.apache.hadoop.conf.Configuration", false, HadoopModule.class.getClassLoader()); } catch (ClassNotFoundException e) { LOG.info("Cannot create Hadoop Security Module because Hadoop cannot be found in the Classpath."); return null; } try { Configuration hadoopConfiguration = HadoopUtils.getHadoopConfiguration(securityConfig.getFlinkConfig()); return new HadoopModule(securityConfig, hadoopConfiguration); } catch (LinkageError e) { LOG.error("Cannot create Hadoop Security Module.", e); return null; } } }
@Override public SecurityModule createModule(SecurityConfiguration securityConfig) { // First check if we have Hadoop in the ClassPath. If not, we simply don't do anything. try { Class.forName( "org.apache.hadoop.conf.Configuration", false, HadoopModule.class.getClassLoader()); } catch (ClassNotFoundException e) { LOG.info("Cannot create Hadoop Security Module because Hadoop cannot be found in the Classpath."); return null; } try { Configuration hadoopConfiguration = HadoopUtils.getHadoopConfiguration(securityConfig.getFlinkConfig()); return new HadoopModule(securityConfig, hadoopConfiguration); } catch (LinkageError e) { LOG.error("Cannot create Hadoop Security Module.", e); return null; } } }
hadoopConfig = HadoopUtils.getHadoopConfiguration(flinkConfig); this.hadoopConfig = hadoopConfig;
hadoopConfig = HadoopUtils.getHadoopConfiguration(flinkConfig); this.hadoopConfig = hadoopConfig;
null, new File(fileLocation), HadoopUtils.getHadoopConfiguration(flinkConfig));
null, new File(fileLocation), HadoopUtils.getHadoopConfiguration(flinkConfig));