results.add(new DirectDataSourceProfile(key, aClass, path, config));
@Override public DirectDataSource newInstance() throws IOException, InterruptedException { MockDirectDataSource ds = new MockDirectDataSource(); ds.configure(new DirectDataSourceProfile( id, MockDirectDataSource.class, path, Collections.emptyMap())); return ds; } }
/** * the preferred fragment size is invalid. * @throws Exception if failed */ @Test(expected = IOException.class) public void convert_prefSize_notInt() throws Exception { Configuration conf = new Configuration(); Map<String, String> attributes = new HashMap<>(); attributes.put(KEY_PREF_FRAGMENT, "INVALID"); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class, "context", attributes); HadoopDataSourceProfile.convert(profile, conf); }
/** * the preferred fragment size is invalid. * @throws Exception if failed */ @Test(expected = IOException.class) public void convert_prefSize_zero() throws Exception { Configuration conf = new Configuration(); Map<String, String> attributes = new HashMap<>(); attributes.put(KEY_PREF_FRAGMENT, "0"); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class, "context", attributes); HadoopDataSourceProfile.convert(profile, conf); }
/** * the minimum fragment size is invalid. * @throws Exception if failed */ @Test(expected = IOException.class) public void convert_minSize_notInt() throws Exception { Configuration conf = new Configuration(); Map<String, String> attributes = new HashMap<>(); attributes.put(KEY_MIN_FRAGMENT, "INVALID"); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class, "context", attributes); HadoopDataSourceProfile.convert(profile, conf); }
/** * the minimum fragment size is invalid. * @throws Exception if failed */ @Test(expected = IOException.class) public void convert_minSize_zero() throws Exception { Configuration conf = new Configuration(); Map<String, String> attributes = new HashMap<>(); attributes.put(KEY_MIN_FRAGMENT, "0"); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class, "context", attributes); HadoopDataSourceProfile.convert(profile, conf); }
/** * convert without no path. * @throws Exception if failed */ @Test(expected = IOException.class) public void convert_nopath() throws Exception { Map<String, String> attributes = new HashMap<>(); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class, "context", attributes); Configuration conf = new Configuration(); HadoopDataSourceProfile.convert(profile, conf); }
/** * the minimum fragment size is invalid. * @throws Exception if failed */ @Test(expected = IOException.class) public void convert_unknown_properties() throws Exception { Configuration conf = new Configuration(); Map<String, String> attributes = new HashMap<>(); attributes.put(KEY_PATH, folder.getRoot().getCanonicalFile().toURI().toString()); attributes.put("__INVALID__", "value"); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class, "context", attributes); HadoopDataSourceProfile.convert(profile, conf); }
/** * fs is inconsistent between prod and temp. * @throws Exception if failed */ @Test(expected = IOException.class) public void convert_inconsistent_fs() throws Exception { Configuration conf = new Configuration(); conf.setClass("fs.mock.impl", MockFs.class, FileSystem.class); Map<String, String> attributes = new HashMap<>(); attributes.put(KEY_PATH, folder.getRoot().toURI().toString()); attributes.put(KEY_TEMP, "mock://" + folder.getRoot().toURI().toString()); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class, "context", attributes); HadoopDataSourceProfile.convert(profile, conf); }
/** * convert with relative path. * @throws Exception if failed */ @Test public void convert_relpath() throws Exception { Map<String, String> attributes = new HashMap<>(); attributes.put(KEY_PATH, "relative"); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class, "context", attributes); Configuration conf = new Configuration(); HadoopDataSourceProfile result = HadoopDataSourceProfile.convert(profile, conf); FileSystem defaultFs = FileSystem.get(conf); Path path = defaultFs.makeQualified(new Path(defaultFs.getWorkingDirectory(), "relative")); assertThat(result.getFileSystem().getCanonicalServiceName(), is(defaultFs.getCanonicalServiceName())); assertThat(result.getFileSystemPath(), is(path)); }
Map<String, String> attributes = new HashMap<>(); attributes.put(KEY_PATH, folder.getRoot().getCanonicalFile().toURI().toString()); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class,
attributes.put(KEY_KEEPALIVE_INTERVAL, "12345"); attributes.put(KEY_ROLLFORWARD_THREADS, "23"); DirectDataSourceProfile profile = new DirectDataSourceProfile( "testing", HadoopDataSource.class,