protected void walToHFiles(List<String> dirPaths, List<String> tableList) throws IOException { Tool player = new WALPlayer(); // Player reads all files in arbitrary directory structure and creates // a Map task for each file. We use ';' as separator // because WAL file names contains ',' String dirs = StringUtils.join(dirPaths, ';'); String jobname = "Incremental_Backup-" + backupId ; Path bulkOutputPath = getBulkOutputDir(); conf.set(WALPlayer.BULK_OUTPUT_CONF_KEY, bulkOutputPath.toString()); conf.set(WALPlayer.INPUT_FILES_SEPARATOR_KEY, ";"); conf.setBoolean(WALPlayer.MULTI_TABLES_SUPPORT, true); conf.set(JOB_NAME_CONF_KEY, jobname); String[] playerArgs = { dirs, StringUtils.join(tableList, ",") }; try { player.setConf(conf); int result = player.run(playerArgs); if(result != 0) { throw new IOException("WAL Player failed"); } conf.unset(WALPlayer.INPUT_FILES_SEPARATOR_KEY); conf.unset(JOB_NAME_CONF_KEY); } catch (IOException e) { throw e; } catch (Exception ee) { throw new IOException("Can not convert from directory " + dirs + " (check Hadoop, HBase and WALPlayer M/R job logs) ", ee); } }
public static int runMRJob(Tool tool, String[] args) throws Exception { Configuration conf = tool.getConf(); if (conf == null) { conf = new Configuration(); } GenericOptionsParser parser = getParser(conf, args); //set the configuration back, so that Tool can configure itself tool.setConf(conf); //get the args w/o generic hadoop args String[] toolArgs = parser.getRemainingArgs(); return tool.run(toolArgs); }
/** * Runs the <code>Tool</code> with its <code>Configuration</code>. * * Equivalent to <code>run(tool.getConf(), tool, args)</code>. * * @param tool <code>Tool</code> to run. * @param args command-line arguments to the tool. * @return exit code of the {@link Tool#run(String[])} method. */ public static int run(Tool tool, String[] args) throws Exception{ return run(tool.getConf(), tool, args); }
@Override public int run(String[] args) throws Exception { if ( args.length != 2 ) { System.err.printf("Usage: %s [generic options] <input> <output>\n", getClass().getName()); ToolRunner.printGenericCommandUsage(System.err); return -1; } Configuration configuration = getConf(); boolean overrideOutput = configuration.getBoolean(OPTION_OVERRIDE_OUTPUT, OPTION_OVERRIDE_OUTPUT_DEFAULT); FileSystem fs = FileSystem.get(new Path(args[1]).toUri(), configuration); if ( overrideOutput ) { fs.delete(new Path(args[1]), true); } Tool driver = new Freebase2RDFDriver(configuration); driver.run(new String[] { args[0], args[1] }); return 0; }
void generateAndLoad(final TableName table) throws Exception { LOG.info("Running test testGenerateAndLoad."); String cf = "d"; Path hfiles = new Path( util.getDataTestDirOnTestFS(table.getNameAsString()), "hfiles"); Map<String, String> args = new HashMap<>(); args.put(ImportTsv.BULK_OUTPUT_CONF_KEY, hfiles.toString()); args.put(ImportTsv.COLUMNS_CONF_KEY, format("HBASE_ROW_KEY,HBASE_TS_KEY,%s:c1,%s:c2", cf, cf)); // configure the test harness to NOT delete the HFiles after they're // generated. We need those for doLoadIncrementalHFiles args.put(TestImportTsv.DELETE_AFTER_LOAD_CONF, "false"); // run the job, complete the load. util.createTable(table, new String[]{cf}); Tool t = TestImportTsv.doMROnTableTest(util, table, cf, simple_tsv, args); doLoadIncrementalHFiles(hfiles, table); // validate post-conditions validateDeletedPartitionsFile(t.getConf()); // clean up after ourselves. util.deleteTable(table); util.cleanupDataTestDirOnTestFS(table.getNameAsString()); LOG.info("testGenerateAndLoad completed successfully."); }
System.setOut(out); System.setErr(out); ret = tool.run(cmds); System.out.flush(); System.err.flush();
@Override public int run(String[] args) throws Exception { if (args.length == 0) { printUsage(); return AbstractHBaseTool.EXIT_FAILURE; } Tool tool; switch (args[0]) { case VALIDATE_CP_NAME: tool = new CoprocessorValidator(); break; case VALIDATE_DBE_NAME: tool = new DataBlockEncodingValidator(); break; case VALIDATE_HFILE: tool = new HFileContentValidator(); break; case "-h": printUsage(); return AbstractHBaseTool.EXIT_FAILURE; default: System.err.println("Unknown command: " + args[0]); printUsage(); return AbstractHBaseTool.EXIT_FAILURE; } tool.setConf(getConf()); return tool.run(Arrays.copyOfRange(args, 1, args.length)); }
public static int runMRJob(Tool tool, String[] args) throws Exception { Configuration conf = tool.getConf(); if (conf == null) { conf = new Configuration(); } GenericOptionsParser parser = getParser(conf, args); //set the configuration back, so that Tool can configure itself tool.setConf(conf); //get the args w/o generic hadoop args String[] toolArgs = parser.getRemainingArgs(); return tool.run(toolArgs); }
/** * Runs the <code>Tool</code> with its <code>Configuration</code>. * * Equivalent to <code>run(tool.getConf(), tool, args)</code>. * * @param tool <code>Tool</code> to run. * @param args command-line arguments to the tool. * @return exit code of the {@link Tool#run(String[])} method. */ public static int run(Tool tool, String[] args) throws Exception{ return run(tool.getConf(), tool, args); }
/** * Runs the <code>Tool</code> with its <code>Configuration</code>. * * Equivalent to <code>run(tool.getConf(), tool, args)</code>. * * @param tool <code>Tool</code> to run. * @param args command-line arguments to the tool. * @return exit code of the {@link Tool#run(String[])} method. */ public static int run(Tool tool, String[] args) throws Exception{ return run(tool.getConf(), tool, args); }
/** * Runs the <code>Tool</code> with its <code>Configuration</code>. * * Equivalent to <code>run(tool.getConf(), tool, args)</code>. * * @param tool <code>Tool</code> to run. * @param args command-line arguments to the tool. * @return exit code of the {@link Tool#run(String[])} method. */ public static int run(Tool tool, String[] args) throws Exception{ return run(tool.getConf(), tool, args); }
@Override public Job getJob(final Tool tool) throws IOException { return new Job(tool.getConf()); }
/** * Runs the <code>Tool</code> with its <code>Configuration</code>. * * Equivalent to <code>run(tool.getConf(), tool, args)</code>. * * @param tool <code>Tool</code> to run. * @param args command-line arguments to the tool. * @return exit code of the {@link Tool#run(String[])} method. */ public static int run(Tool tool, String[] args) throws Exception{ return run(tool.getConf(), tool, args); }
public static int run(Configuration conf, Tool tool, String[] args) throws Exception{ if(conf == null) { conf = new Configuration(); } GenericOptionsParser parser = new GenericOptionsParser(conf, args); //set the configuration back, so that Tool can configure itself tool.setConf(conf); //get the args w/o generic hadoop args String[] toolArgs = parser.getRemainingArgs(); return tool.run(toolArgs);
/** * Runs the <code>Tool</code> with its <code>Configuration</code>. * * Equivalent to <code>run(tool.getConf(), tool, args)</code>. * * @param tool <code>Tool</code> to run. * @param args command-line arguments to the tool. * @return exit code of the {@link Tool#run(String[])} method. */ public static int run(Tool tool, String[] args) throws Exception{ return run(tool.getConf(), tool, args); }
@Override public boolean runDistCp(Path src, Path dst, Configuration conf) throws IOException { int rc; // Creates the command-line parameters for distcp String[] params = {"-update", "-skipcrccheck", src.toString(), dst.toString()}; try { Class clazzDistCp = Class.forName("org.apache.hadoop.tools.distcp2"); Constructor c = clazzDistCp.getConstructor(); c.setAccessible(true); Tool distcp = (Tool)c.newInstance(); distcp.setConf(conf); rc = distcp.run(params); } catch (ClassNotFoundException e) { throw new IOException("Cannot find DistCp class package: " + e.getMessage()); } catch (NoSuchMethodException e) { throw new IOException("Cannot get DistCp constructor: " + e.getMessage()); } catch (Exception e) { throw new IOException("Cannot execute DistCp process: " + e, e); } return (0 == rc) ? true : false; }