private static boolean runImport(String[] args, Configuration configuration) throws IOException, InterruptedException, ClassNotFoundException { // need to make a copy of the configuration because to make sure different temp dirs are used. GenericOptionsParser opts = new GenericOptionsParser(new Configuration(configuration), args); Configuration newConf = opts.getConfiguration(); args = opts.getRemainingArgs(); Job job = Import.createSubmittableJob(newConf, args); job.waitForCompletion(false); return job.isSuccessful(); }
public static void main(String args[]) throws Exception { StringUtils.startupShutdownMessage(DFSZKFailoverController.class, args, LOG); if (DFSUtil.parseHelpArgument(args, ZKFailoverController.USAGE, System.out, true)) { System.exit(0); } GenericOptionsParser parser = new GenericOptionsParser( new HdfsConfiguration(), args); DFSZKFailoverController zkfc = DFSZKFailoverController.create( parser.getConfiguration()); try { System.exit(zkfc.run(parser.getRemainingArgs())); } catch (Throwable t) { LOG.error("DFSZKFailOverController exiting due to earlier exception " + t); terminate(1, t); } }
private void addDistributedCacheFile(File file, Configuration conf) throws IOException { String HADOOP_TMP_FILES = "tmpfiles"; // see Hadoop's GenericOptionsParser String tmpFiles = conf.get(HADOOP_TMP_FILES, ""); if (tmpFiles.length() > 0) { // already present? tmpFiles = tmpFiles + ","; } GenericOptionsParser parser = new GenericOptionsParser( new Configuration(conf), new String[] { "--files", file.getCanonicalPath() }); String additionalTmpFiles = parser.getConfiguration().get(HADOOP_TMP_FILES); assert additionalTmpFiles != null; assert additionalTmpFiles.length() > 0; tmpFiles += additionalTmpFiles; conf.set(HADOOP_TMP_FILES, tmpFiles); }
private void addDistributedCacheFile(File file, Configuration conf) throws IOException { String HADOOP_TMP_FILES = "tmpfiles"; // see Hadoop's GenericOptionsParser String tmpFiles = conf.get(HADOOP_TMP_FILES, ""); if (tmpFiles.length() > 0) { // already present? tmpFiles = tmpFiles + ","; } GenericOptionsParser parser = new GenericOptionsParser( new Configuration(conf), new String[] { "--files", file.getCanonicalPath() }); String additionalTmpFiles = parser.getConfiguration().get(HADOOP_TMP_FILES); assert additionalTmpFiles != null; assert additionalTmpFiles.length() > 0; tmpFiles += additionalTmpFiles; conf.set(HADOOP_TMP_FILES, tmpFiles); }
private void addDistributedCacheFile(File file, Configuration conf) throws IOException { String HADOOP_TMP_FILES = "tmpfiles"; // see Hadoop's GenericOptionsParser String tmpFiles = conf.get(HADOOP_TMP_FILES, ""); if (tmpFiles.length() > 0) { // already present? tmpFiles = tmpFiles + ","; } GenericOptionsParser parser = new GenericOptionsParser( new Configuration(conf), new String[]{"--files", file.getCanonicalPath()}); String additionalTmpFiles = parser.getConfiguration().get(HADOOP_TMP_FILES); assert additionalTmpFiles != null; assert additionalTmpFiles.length() > 0; tmpFiles += additionalTmpFiles; conf.set(HADOOP_TMP_FILES, tmpFiles); }
private void addDistributedCacheFile(File file, Configuration conf) throws IOException { String HADOOP_TMP_FILES = "tmpfiles"; // see Hadoop's GenericOptionsParser String tmpFiles = conf.get(HADOOP_TMP_FILES, ""); if (tmpFiles.length() > 0) { // already present? tmpFiles = tmpFiles + ","; } GenericOptionsParser parser = new GenericOptionsParser( new Configuration(conf), new String[]{"--files", file.getCanonicalPath()}); String additionalTmpFiles = parser.getConfiguration().get(HADOOP_TMP_FILES); assert additionalTmpFiles != null; assert additionalTmpFiles.length() > 0; tmpFiles += additionalTmpFiles; conf.set(HADOOP_TMP_FILES, tmpFiles); }
public void testGenericOptionsParser() throws Exception { GenericOptionsParser parser = new GenericOptionsParser( new Configuration(), new String[] {"-jt"}); assertEquals(parser.getRemainingArgs().length, 0); // test if -D accepts -Dx=y=z parser = new GenericOptionsParser(new Configuration(), new String[] {"-Dx=y=z"}); assertEquals(parser.getConfiguration().get("x"), "y=z"); }
@Override public int run(String[] real_args) throws Exception { GenericOptionsParser gop = new GenericOptionsParser(getConf(), real_args); Configuration conf = gop.getConfiguration(); String[] args = gop.getRemainingArgs(); Job validate = new Job(conf); validate.setJobName("Chukwa Test pattern validator"); validate.setJarByClass(this.getClass()); validate.setInputFormatClass(SequenceFileInputFormat.class); validate.setMapperClass(MapClass.class); validate.setMapOutputKeyClass(ByteRange.class); validate.setMapOutputValueClass(NullWritable.class); validate.setReducerClass(ReduceClass.class); validate.setOutputFormatClass(TextOutputFormat.class); FileInputFormat.setInputPaths(validate, new Path(args[0])); FileOutputFormat.setOutputPath(validate, new Path(args[1])); validate.submit(); return 0; }
public OperationsParams(GenericOptionsParser parser, boolean autodetectShape) { super(parser.getConfiguration()); initialize(parser.getRemainingArgs()); if (autodetectShape) { TextSerializable shape = getShape("shape"); if (shape != null) { // In case this class is in a third part jar file, add it to // path SpatialSite.addClassToPath(this, shape.getClass()); } } }
public DumpExtractor(String[] args) throws Exception { GenericOptionsParser gop = new GenericOptionsParser(args) ; conf = gop.getConfiguration() ; //outputFileSystem = FileSystem.get(conf); this.args = gop.getRemainingArgs() ; configure() ; }
final Configuration conf = parser.getConfiguration();
public static void main(String args[]) throws Exception { if (DFSUtil.parseHelpArgument(args, ZKFailoverController.USAGE, System.out, true)) { System.exit(0); } GenericOptionsParser parser = new GenericOptionsParser( new HdfsConfiguration(), args); DFSZKFailoverController zkfc = DFSZKFailoverController.create( parser.getConfiguration()); int retCode = 0; try { retCode = zkfc.run(parser.getRemainingArgs()); } catch (Throwable t) { LOG.fatal("Got a fatal error, exiting now", t); } System.exit(retCode); }
public void testGenericOptionsParser() throws Exception { GenericOptionsParser parser = new GenericOptionsParser( new Configuration(), new String[] {"-jt"}); assertEquals(0, parser.getRemainingArgs().length); // test if -D accepts -Dx=y=z parser = new GenericOptionsParser(new Configuration(), new String[] {"-Dx=y=z"}); assertEquals( "Options parser gets entire ='s expresion", "y=z", parser.getConfiguration().get("x")); }
public static void main(String args[]) throws Exception { if (DFSUtil.parseHelpArgument(args, ZKFailoverController.USAGE, System.out, true)) { System.exit(0); } GenericOptionsParser parser = new GenericOptionsParser( new HdfsConfiguration(), args); DFSZKFailoverController zkfc = DFSZKFailoverController.create( parser.getConfiguration()); int retCode = 0; try { retCode = zkfc.run(parser.getRemainingArgs()); } catch (Throwable t) { LOG.fatal("Got a fatal error, exiting now", t); } System.exit(retCode); }
public void testGenericOptionsParser() throws Exception { GenericOptionsParser parser = new GenericOptionsParser( new Configuration(), new String[] {"-jt"}); assertEquals(0, parser.getRemainingArgs().length); // test if -D accepts -Dx=y=z parser = new GenericOptionsParser(new Configuration(), new String[] {"-Dx=y=z"}); assertEquals( "Options parser gets entire ='s expresion", "y=z", parser.getConfiguration().get("x")); }
@Test public void test() throws Exception { Configuration conf = new Configuration(); String testHome = HADOOP_UTIL.setupAndGetTestDir(ITExportCsv.class.getName(), conf).getAbsolutePath(); // create a table with on empty tablet and 3 tablets of 3 rows each. createFourTabletsTableWithNineRows(harness.getAsyncClient(), TABLE_NAME, DEFAULT_SLEEP); String[] args = new String[] { "-D" + CommandLineParser.MASTER_ADDRESSES_KEY + "=" + harness.getMasterAddressesAsString(), "*", TABLE_NAME, testHome + "/exportdata"}; GenericOptionsParser parser = new GenericOptionsParser(conf, args); Job job = ExportCsv.createSubmittableJob(parser.getConfiguration(), parser.getRemainingArgs()); assertTrue("Test job did not end properly", job.waitForCompletion(true)); String csvContent = readCsvFile(new File(testHome + "/exportdata/part-m-00001")); assertEquals(csvContent.split("\n").length,3); assertEquals(csvContent.split("\n", -1)[0].split("\t", -1)[3],"a string"); }
@Test public void test() throws Exception { Configuration conf = new Configuration(); String testHome = HADOOP_UTIL.setupAndGetTestDir(ITImportCsv.class.getName(), conf).getAbsolutePath(); // Create a 4 records parquet input file. Path data = new Path(testHome, "data.parquet"); writeParquetFile(data,conf); StringBuilder sb = new StringBuilder(); for (ColumnSchema col : schema.getColumns()) { sb.append(col.getName()); sb.append(","); } sb.deleteCharAt(sb.length() - 1); String[] args = new String[] { "-D" + CommandLineParser.MASTER_ADDRESSES_KEY + "=" + harness.getMasterAddressesAsString(), TABLE_NAME, data.toString()}; thrown.expect(IllegalArgumentException.class); thrown.expectMessage("The column column1_i does not exist in Parquet schema"); GenericOptionsParser parser = new GenericOptionsParser(conf, args); Job job = ImportParquet.createSubmittableJob(parser.getConfiguration(), parser.getRemainingArgs()); job.waitForCompletion(true); KuduTable openTable = harness.getClient().openTable(TABLE_NAME); assertEquals(0, countRowsInScan(harness.getAsyncClient().newScannerBuilder(openTable).build())); }
@Test public void test() throws Exception { Configuration conf = new Configuration(); String testHome = HADOOP_UTIL.setupAndGetTestDir(ITImportCsv.class.getName(), conf).getAbsolutePath(); // Create a 4 records parquet input file. Path data = new Path(testHome, "data.parquet"); writeParquetFile(data,conf); StringBuilder sb = new StringBuilder(); for (ColumnSchema col : schema.getColumns()) { sb.append(col.getName()); sb.append(","); } sb.deleteCharAt(sb.length() - 1); String[] args = new String[] { "-D" + CommandLineParser.MASTER_ADDRESSES_KEY + "=" + harness.getMasterAddressesAsString(), TABLE_NAME, data.toString()}; GenericOptionsParser parser = new GenericOptionsParser(conf, args); Job job = ImportParquet.createSubmittableJob(parser.getConfiguration(), parser.getRemainingArgs()); assertTrue("Test job did not end properly", job.waitForCompletion(true)); KuduTable openTable = harness.getClient().openTable(TABLE_NAME); assertEquals(4, countRowsInScan( harness.getAsyncClient().newScannerBuilder(openTable).build())); assertEquals("INT32 key=1, INT32 column1_i=3, DOUBLE column2_d=2.3, STRING column3_s=some string, " + "BOOL column4_b=true",scanTableToStrings(openTable).get(0)); }
@Test public void test() throws Exception { Configuration conf = new Configuration(); HADOOP_UTIL.setupAndGetTestDir(ITRowCounter.class.getName(), conf).getAbsolutePath(); createFourTabletsTableWithNineRows(harness.getAsyncClient(), TABLE_NAME, DEFAULT_SLEEP); String[] args = new String[] { "-D" + CommandLineParser.MASTER_ADDRESSES_KEY + "=" + harness.getMasterAddressesAsString(), TABLE_NAME}; GenericOptionsParser parser = new GenericOptionsParser(conf, args); Job job = RowCounter.createSubmittableJob(parser.getConfiguration(), parser.getRemainingArgs()); assertTrue("Job did not end properly", job.waitForCompletion(true)); assertEquals(9, job.getCounters().findCounter(RowCounter.Counters.ROWS).getValue()); } }
@Test public void test() throws Exception { Configuration conf = new Configuration(); String testHome = HADOOP_UTIL.setupAndGetTestDir(ITImportCsv.class.getName(), conf).getAbsolutePath(); // Create a 2 lines input file File data = new File(testHome, "data.csv"); writeCsvFile(data); StringBuilder sb = new StringBuilder(); for (ColumnSchema col : schema.getColumns()) { sb.append(col.getName()); sb.append(","); } sb.deleteCharAt(sb.length() - 1); String[] args = new String[] { "-D" + CommandLineParser.MASTER_ADDRESSES_KEY + "=" + harness.getMasterAddressesAsString(), sb.toString(), TABLE_NAME, data.toString()}; GenericOptionsParser parser = new GenericOptionsParser(conf, args); Job job = ImportCsv.createSubmittableJob(parser.getConfiguration(), parser.getRemainingArgs()); assertTrue("Test job did not end properly", job.waitForCompletion(true)); KuduTable openTable = harness.getClient().openTable(TABLE_NAME); assertEquals(1, job.getCounters().findCounter(ImportCsv.Counters.BAD_LINES).getValue()); assertEquals(3, countRowsInScan(harness.getAsyncClient().newScannerBuilder(openTable).build())); assertEquals("INT32 key=1, INT32 column1_i=3, DOUBLE column2_d=2.3, STRING column3_s=some " + "string, BOOL column4_b=true", scanTableToStrings(openTable).get(0)); }