@OnScheduled protected void setDefaultConfiguration(ProcessContext context) throws IOException { DefaultConfiguration.set(getConfiguration( context.getProperty(CONF_XML_FILES).evaluateAttributeExpressions().getValue())); }
private static void addToConfiguration(Properties hadoopConfiguration) { // base the new Configuration on the current defaults Configuration conf = new Configuration(DefaultConfiguration.get()); // add all of the properties as config settings for (String key : hadoopConfiguration.stringPropertyNames()) { String value = hadoopConfiguration.getProperty(key); conf.set(key, value); } // replace the original Configuration DefaultConfiguration.set(conf); addedConf = true; }
/** * Starts the services in order, passing the previous service's modified * Configuration object to the next. * * @throws IOException */ public void start() throws IOException, InterruptedException { for (Service service : services) { service.configure(serviceConfig); logger.info("Running Minicluster Service: " + service.getClass().getName()); service.start(); serviceConfig.setHadoopConf(service.getHadoopConf()); // set the default configuration so that the minicluster is used DefaultConfiguration.set(serviceConfig.getHadoopConf()); } logger.info("All Minicluster Services running."); }
@SuppressWarnings({"deprecation", "unchecked"}) private static <E> View<E> load(Configuration conf) { Class<E> type; try { type = (Class<E>)conf.getClass(KITE_TYPE, GenericData.Record.class); } catch (RuntimeException e) { if (e.getCause() instanceof ClassNotFoundException) { throw new TypeNotFoundException(String.format( "The Java class %s for the entity type could not be found", conf.get(KITE_TYPE)), e.getCause()); } else { throw e; } } DefaultConfiguration.set(conf); String schemaStr = conf.get(KITE_READER_SCHEMA); Schema projection = null; if (schemaStr != null) { projection = new Schema.Parser().parse(schemaStr); } String inputUri = conf.get(KITE_INPUT_URI); if (projection != null) { return Datasets.load(inputUri).asSchema(projection).asType(type); } else { return Datasets.load(inputUri, type); } }
@AfterClass public static void restoreDefaultConfiguration() { DefaultConfiguration.set(original); }
@OnScheduled protected void setDefaultConfiguration(ProcessContext context) throws IOException { DefaultConfiguration.set(getConfiguration( context.getProperty(CONF_XML_FILES).evaluateAttributeExpressions().getValue())); }
@Override public int run(String[] args) throws Exception { if (getConf() != null) { DefaultConfiguration.set(getConf());
@Test public void testFindsHDFS() throws Exception { // set the default configuration that the loader will use Configuration existing = DefaultConfiguration.get(); DefaultConfiguration.set(getConfiguration()); FileSystemDataset<GenericRecord> dataset = Datasets.load("dataset:hdfs:/tmp/datasets/ns/strings"); Assert.assertNotNull("Dataset should be found", dataset); Assert.assertEquals("Dataset should be located in HDFS", "hdfs", dataset.getFileSystem().getUri().getScheme()); // replace the original config so the other tests are not affected DefaultConfiguration.set(existing); }
private synchronized void loadConfiguration() { if(Services.get() != null) { KiteConfigurationService kiteService = Services.get().get(KiteConfigurationService.class); if(kiteService != null) { Configuration kiteConf = kiteService.getKiteConf(); if(kiteConf != null) { DefaultConfiguration.set(kiteConf); } else { // kite conf was null LOG.warn("Configuration for Kite not loaded, Kite configuration service config was null."); } } else { // service was null LOG.warn("Configuration for Kite not loaded, Kite configuration service was not available."); } } else { // services were null LOG.warn("Configuration for Kite not loaded, oozie services were not available."); } }