static void print(PrintWriter writer, DirectDataSourceProfile profile, int indent) { writer.printf("%sID: %s%n", indent(indent), profile.getId()); writer.printf("%sbase-path: %s%n", indent(indent), BasePath.of(profile.getPath())); writer.printf("%sclass: %s%n", indent(indent), profile.getTargetClass().getName()); writer.printf("%sattributes:%n", indent(indent)); profile.getAttributes().forEach((k, v) -> writer.printf("%s- %s: %s%n", indent(indent), k, v)); }
@Override public DirectDataSource newInstance() throws IOException, InterruptedException { try { AbstractDirectDataSource instance = profile.getTargetClass().getConstructor().newInstance(); if (instance instanceof Configurable) { ((Configurable) instance).setConf(configuration); } instance.configure(profile); return instance; } catch (Exception e) { throw new IOException(MessageFormat.format( "Failed to create data source instance: {0} ({1})", PREFIX + profile.getId(), profile.getTargetClass().getName()), e); } } }
@Override public void configure(DirectDataSourceProfile profile) throws IOException, InterruptedException { if (conf == null) { throw new IllegalStateException(); } if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format( "Start configuring Hadoop data source (id={0}, path={1})", //$NON-NLS-1$ profile.getId(), profile.getPath())); } HadoopDataSourceProfile hProfile = HadoopDataSourceProfile.convert(profile, conf); this.core = new HadoopDataSourceCore(hProfile); if (hProfile.getKeepAliveInterval() > 0) { this.core = new KeepAliveDataSource(core, hProfile.getKeepAliveInterval()); } if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format( "Finish configuring Hadoop data source: {0}", //$NON-NLS-1$ hProfile)); } }
/** * Loads a simple profile. */ @Test public void loadProfiles_simple() { Configuration conf = new Configuration(); conf.set(key("root"), MockHadoopDataSource.class.getName()); conf.set(key("root", "path"), "/"); List<DirectDataSourceProfile> profiles = HadoopDataSourceUtil.loadProfiles(conf); assertThat(profiles.size(), is(1)); DirectDataSourceProfile profile = find(profiles, ""); assertThat(profile.getTargetClass(), equalTo((Object) MockHadoopDataSource.class)); assertThat(profile.getAttributes(), is(map())); }
throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$ Map<String, String> attributes = new HashMap<>(profile.getAttributes()); Path fsPath = takeFsPath(profile, attributes, conf); if (fsPath == null) { throw new IOException(MessageFormat.format( "The directio configuration \"{0} ({1})\" does not have \"{2}\"", profile.getId(), profile.getPath().isEmpty() ? ROOT_REPRESENTATION : profile.getPath(), fqn(profile, KEY_PATH))); HadoopDataSourceProfile result = new HadoopDataSourceProfile( conf, profile.getId(), profile.getPath(), fsPath, tempPath); long minFragment = takeMinFragment(profile, attributes, conf); throw new IOException(MessageFormat.format( "Unknown attributes in \"{0}\": {1}", profile.getId(), new TreeSet<>(attributes.keySet())));
results.add(new DirectDataSourceProfile(key, aClass, path, config));
@Override public String getPath() { return profile.getPath(); }
@Override public String getId() { return profile.getId(); }
/** * Loads a profile with path. */ @Test public void loadProfiles_path() { Configuration conf = new Configuration(); conf.set(key("root"), MockHadoopDataSource.class.getName()); conf.set(key("root", "path"), "example/path"); List<DirectDataSourceProfile> profiles = HadoopDataSourceUtil.loadProfiles(conf); assertThat(profiles.size(), is(1)); DirectDataSourceProfile profile = find(profiles, "example/path"); assertThat(profile.getTargetClass(), equalTo((Object) MockHadoopDataSource.class)); assertThat(profile.getAttributes(), is(map())); }
@Override public DirectDataSource newInstance() throws IOException, InterruptedException { MockDirectDataSource ds = new MockDirectDataSource(); ds.configure(new DirectDataSourceProfile( id, MockDirectDataSource.class, path, Collections.emptyMap())); return ds; } }
private DirectDataSourceProfile find(List<DirectDataSourceProfile> profiles, String path) { for (DirectDataSourceProfile p : profiles) { if (p.getPath().equals(path)) { return p; } } throw new AssertionError(path); }
private static Object fqn(DirectDataSourceProfile profile, String key) { assert profile != null; assert key != null; return MessageFormat.format( "{0}.{1}", //$NON-NLS-1$ profile.getId(), key); }
/** * Loads a profile with attributes. */ @Test public void loadProfiles_attribute() { Configuration conf = new Configuration(); conf.set(key("root"), MockHadoopDataSource.class.getName()); conf.set(key("root", "path"), "/"); conf.set(key("root", "hello1"), "world1"); conf.set(key("root", "hello2"), "world2"); conf.set(key("root", "hello3"), "world3"); List<DirectDataSourceProfile> profiles = HadoopDataSourceUtil.loadProfiles(conf); assertThat(profiles.size(), is(1)); DirectDataSourceProfile profile = find(profiles, ""); assertThat(profile.getTargetClass(), equalTo((Object) MockHadoopDataSource.class)); assertThat(profile.getAttributes(), is(map("hello1", "world1", "hello2", "world2", "hello3", "world3"))); }
/** * the preferred fragment size is invalid. * @throws Exception if failed */ @Test(expected = IOException.class) public void convert_prefSize_zero() throws Exception { Configuration conf = new Configuration(); Map<String, String> attributes = new HashMap<>(); attributes.put(KEY_PREF_FRAGMENT, "0"); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class, "context", attributes); HadoopDataSourceProfile.convert(profile, conf); }
/** * create simple repository. * @throws Exception if failed */ @Test public void loadRepository() throws Exception { Configuration conf = new Configuration(); conf.set(key("testing"), MockHadoopDataSource.class.getName()); conf.set(key("testing", "path"), "testing"); conf.set(key("testing", "hello"), "world"); DirectDataSourceRepository repo = HadoopDataSourceUtil.loadRepository(conf); DirectDataSource ds = repo.getRelatedDataSource("testing"); assertThat(ds, instanceOf(MockHadoopDataSource.class)); MockHadoopDataSource mock = (MockHadoopDataSource) ds; assertThat(mock.conf, is(notNullValue())); assertThat(mock.profile.getPath(), is("testing")); }
@Override public void run() { LOG.debug("starting {}", getClass().getSimpleName()); Configuration conf = configurationParameter.getConfiguration(); List<DirectDataSourceProfile> profiles = HadoopDataSourceUtil.loadProfiles(conf); DirectDataSourceProfile spec = null; if (id != null) { spec = profiles.stream() .filter(it -> Objects.equal(it.getId(), id)) .findAny() .orElseThrow(() -> new CommandConfigurationException(MessageFormat.format( "data source \"{0}\" not found (available data source: {1})", id, profiles.stream() .map(DirectDataSourceProfile::getId) .collect(Collectors.joining(", "))))); } try (PrintWriter writer = outputParameter.open()) { if (spec == null) { verboseParameter.printf(writer, "total %,d%n", profiles.size()); profiles.forEach(it -> { writer.printf("%s%n", it.getId()); verboseParameter.ifRequired(() -> print(writer, it, 4)); }); } else { print(writer, spec, 0); } } }
/** * Loads multiple profiles. */ @Test public void loadProfiles_multiple() { Configuration conf = new Configuration(); conf.set(key("a"), MockHadoopDataSource.class.getName()); conf.set(key("a", "path"), "aaa"); conf.set(key("b"), MockHadoopDataSource.class.getName()); conf.set(key("b", "path"), "bbb"); conf.set(key("c"), MockHadoopDataSource.class.getName()); conf.set(key("c", "path"), "ccc"); List<DirectDataSourceProfile> profiles = HadoopDataSourceUtil.loadProfiles(conf); assertThat(profiles.size(), is(3)); DirectDataSourceProfile a = find(profiles, "aaa"); assertThat(a.getTargetClass(), equalTo((Object) MockHadoopDataSource.class)); assertThat(a.getAttributes(), is(map())); DirectDataSourceProfile b = find(profiles, "bbb"); assertThat(b.getTargetClass(), equalTo((Object) MockHadoopDataSource.class)); assertThat(b.getAttributes(), is(map())); DirectDataSourceProfile c = find(profiles, "ccc"); assertThat(c.getTargetClass(), equalTo((Object) MockHadoopDataSource.class)); assertThat(c.getAttributes(), is(map())); }
/** * the preferred fragment size is invalid. * @throws Exception if failed */ @Test(expected = IOException.class) public void convert_prefSize_notInt() throws Exception { Configuration conf = new Configuration(); Map<String, String> attributes = new HashMap<>(); attributes.put(KEY_PREF_FRAGMENT, "INVALID"); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class, "context", attributes); HadoopDataSourceProfile.convert(profile, conf); }
/** * the minimum fragment size is invalid. * @throws Exception if failed */ @Test(expected = IOException.class) public void convert_minSize_notInt() throws Exception { Configuration conf = new Configuration(); Map<String, String> attributes = new HashMap<>(); attributes.put(KEY_MIN_FRAGMENT, "INVALID"); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class, "context", attributes); HadoopDataSourceProfile.convert(profile, conf); }
/** * the minimum fragment size is invalid. * @throws Exception if failed */ @Test(expected = IOException.class) public void convert_minSize_zero() throws Exception { Configuration conf = new Configuration(); Map<String, String> attributes = new HashMap<>(); attributes.put(KEY_MIN_FRAGMENT, "0"); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class, "context", attributes); HadoopDataSourceProfile.convert(profile, conf); }