@Override public int runTool( String[] args, Configuration c ) { ClassLoader cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader( getClass().getClassLoader() ); try { return Sqoop.runTool( args, ShimUtils.asConfiguration( c ) ); } finally { Thread.currentThread().setContextClassLoader( cl ); } }
/** * Run a MapReduce-based export (using the argv provided to control * execution). * * @return the generated jar filename */ protected List<String> runExport(String[] argv) throws IOException { // run the tool through the normal entry-point. int ret; List<String> generatedJars = null; try { ExportTool exporter = new ExportTool(); Sqoop sqoop = new Sqoop(exporter); String username = MSSQLTestUtils.getDBUserName(); String password = MSSQLTestUtils.getDBPassWord(); sqoop.getOptions().setUsername(username); sqoop.getOptions().setPassword(password); ret = Sqoop.runSqoop(sqoop, argv); generatedJars = exporter.getGeneratedJarFiles(); } catch (Exception e) { LOG.error("Got exception running Sqoop: " + StringUtils.stringifyException(e)); ret = 1; } // expect a successful return. if (0 != ret) { throw new IOException("Failure during job; return status " + ret); } return generatedJars; }
@Test public void testLastModifiedImportWithExistingOutputDirectoryFails() throws Exception { final String TABLE_NAME = "failWithExistingOutputDirectory"; createDir(TABLE_NAME); Timestamp thePast = new Timestamp(System.currentTimeMillis() - 100); createTimestampTable(TABLE_NAME, 10, thePast); List<String> args = getArgListForTable(TABLE_NAME, true, false); SqoopOptions options = new SqoopOptions(newConf()); options.setThrowOnError(true); thrown.expectMessage("--merge-key or --append is required when using --incremental lastmodified and the output directory exists."); Sqoop sqoop = new Sqoop(new ImportTool(), options.getConf(), options); ToolRunner.run(sqoop.getConf(), sqoop, args.toArray(new String[0])); }
@Test public void testJobToolWithAutoConnectDisabledFails() throws IOException { ArgumentArrayBuilder builder = new ArgumentArrayBuilder() .withProperty("sqoop.metastore.client.enable.autoconnect", "false"); String[] arguments = builder.build(); assertEquals(STATUS_FAILURE, Sqoop.runSqoop(sqoop, arguments)); }
@Before public void before() { sqoop = new Sqoop(new JobTool()); }
private int runJobToolWithAutoConnectUrlAndCorrectUsernamePasswordSpecified() { ArgumentArrayBuilder builder = new ArgumentArrayBuilder() .withProperty("sqoop.metastore.client.autoconnect.url", HsqldbTestServer.getUrl()) .withProperty("sqoop.metastore.client.autoconnect.username", TEST_USER) .withProperty("sqoop.metastore.client.autoconnect.password", TEST_PASSWORD) .withOption("list"); String[] arguments = builder.build(); return Sqoop.runSqoop(sqoop, arguments); }
private void createJob() { Configuration conf = new Configuration(); conf.set(org.apache.sqoop.SqoopOptions.METASTORE_PASSWORD_KEY, "true"); JobTool jobToolCreate = new JobTool(); org.apache.sqoop.Sqoop sqoopCreate = new org.apache.sqoop.Sqoop(jobToolCreate, conf); String[] argsCreate = getIncrementalJob(metaConnectString, metaUser, metaPass); org.apache.sqoop.Sqoop.runSqoop(sqoopCreate, argsCreate); }
@Override public Integer call() throws Exception { ClassLoader cl = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader( getClass().getClassLoader() ); try { return Sqoop.runTool( args, ShimUtils.asConfiguration( c ) ); } finally { Thread.currentThread().setContextClassLoader( cl ); } } } );
protected void runImport(SqoopTool tool, String[] argv) throws IOException { // run the tool through the normal entry-point. int ret; try { Configuration conf = getConf(); SqoopOptions opts = getSqoopOptions(conf); Sqoop sqoop = new Sqoop(tool, conf, opts); ret = Sqoop.runSqoop(sqoop, argv); } catch (Exception e) { LOG.error("Got exception running import: " + e.toString()); e.printStackTrace(); ret = 1; } if (0 != ret) { throw new IOException("Import failure; return status " + ret); } }
@Override public void run(String... args) throws Exception { List<String> finalArguments = createFinalArguments(); logger.info("Running Sqoop tool with arguments: " + finalArguments); Configuration configuration = new Configuration(hadoopConfiguration); logger.info("Setting mapreduce.framework.name to " + mapreduceFramework); configuration.set("mapreduce.framework.name", mapreduceFramework); final int ret = Sqoop.runTool(finalArguments.toArray(new String[finalArguments.size()]), configuration); logger.info("Sqoop tool completed with return code: " + ret); if (ret != 0) { throw new RuntimeException("Sqoop job failed - return code " + ret); } }
private void runImportAll(SqoopTool tool,String [] argv) throws IOException { // run the tool through the normal entry-point. int ret; try { Configuration conf = getConf(); SqoopOptions opts = getSqoopOptions(conf); Sqoop sqoop = new Sqoop(tool, conf, opts); ret = Sqoop.runSqoop(sqoop, argv); //ret = tool.run(opts); } catch (Exception e) { LOG.error("Got exception running Sqoop: " + e.toString()); ret = 1; } // expect a successful return. if (0 != ret) { throw new IOException("Failure during job; return status " + ret); } }
@Test public void testPlugin() { // Register the plugin with SqoopTool. Configuration pluginConf = new Configuration(); pluginConf.set(SqoopTool.TOOL_PLUGINS_KEY, PluginClass.class.getName()); SqoopTool.loadPlugins(pluginConf); ArrayList<String> args = new ArrayList<String>(); args.add("fooTool"); args.add("--username"); args.add("bob"); args.add("--connect"); args.add("anywhere"); int ret = Sqoop.runTool(args.toArray(new String[0])); assertEquals("Expected tool run success", 0, ret); String actualUser = FooTool.getLastUser(); assertEquals("Failed to set username correctly.", "bob", actualUser); }
private void execJob() { JobTool jobToolExec = new JobTool(); org.apache.sqoop.Sqoop sqoopExec = new org.apache.sqoop.Sqoop(jobToolExec); String[] argsExec = getExecJob(metaConnectString, metaUser, metaPass); assertEquals("Sqoop Job did not execute properly", 0, org.apache.sqoop.Sqoop.runSqoop(sqoopExec, argsExec)); }
return Sqoop.runTool(sqoopArgs.toArray(new String[sqoopArgs.size()]), sqoopConf);
@Test public void testExecJob() throws IOException { Configuration conf = new Configuration(); //creates the job JobTool jobToolCreate = new JobTool(); Sqoop sqoopCreate = new Sqoop(jobToolCreate, conf); String[] argsCreate = getCreateJob(metaConnectString, metaUser, metaPass); Sqoop.runSqoop(sqoopCreate, argsCreate); //executes the job JobTool jobToolExec = new JobTool(); Sqoop sqoopExec = new Sqoop(jobToolExec); String[] argsExec = getExecJob(metaConnectString, metaUser, metaPass); assertEquals("Error executing Sqoop Job", 0, Sqoop.runSqoop(sqoopExec, argsExec)); }
Sqoop.runTool(sqoopArgs.toArray(new String[sqoopArgs.size()]), sqoopConf); int rowsImported = 0;
@Test public void testDeleteJob() throws IOException { Configuration conf = new Configuration(); //Creates the job JobTool jobToolCreate = new JobTool(); Sqoop sqoopCreate = new Sqoop(jobToolCreate, conf); String[] argsCreate = getCreateJob(metaConnectString, metaUser, metaPass); Sqoop.runSqoop(sqoopCreate, argsCreate); //Deletes the job JobTool jobToolDelete = new JobTool(); Sqoop sqoopExec = new Sqoop(jobToolDelete); String[] argsDelete = getDeleteJob(metaConnectString, metaUser, metaPass); assertEquals("Error deleting Sqoop Job", 0, Sqoop.runSqoop(sqoopExec, argsDelete)); } }
public void runImport(SqoopOptions options, List<String> args) { try { Sqoop importer = new Sqoop(new ImportTool(), options.getConf(), options); int ret = Sqoop.runSqoop(importer, args.toArray(new String[0])); assertEquals("Failure during job", 0, ret); } catch (Exception e) { LOG.error("Got exception running Sqoop: " + StringUtils.stringifyException(e)); throw new RuntimeException(e); } }
@Test public void testCreateJob() throws IOException { org.apache.sqoop.tool.JobTool jobTool = new org.apache.sqoop.tool.JobTool(); org.apache.sqoop.Sqoop sqoop = new Sqoop(jobTool); String[] args = getCreateJob(metaConnectString, metaUser, metaPass); assertEquals("Error creating Sqoop Job", 0, Sqoop.runSqoop(sqoop, args)); }