private int runSqoopTool(SqoopTool tool, String [] argv, SqoopOptions sqoopOptions) { Configuration conf = getConf(); //Need to disable OraOop for existing tests conf.set("oraoop.disabled", "true"); Sqoop sqoop = new Sqoop(tool, conf, sqoopOptions); return Sqoop.runSqoop(sqoop, argv); }
private void createJob() { Configuration conf = new Configuration(); conf.set(org.apache.sqoop.SqoopOptions.METASTORE_PASSWORD_KEY, "true"); JobTool jobToolCreate = new JobTool(); org.apache.sqoop.Sqoop sqoopCreate = new org.apache.sqoop.Sqoop(jobToolCreate, conf); String[] argsCreate = getIncrementalJob(metaConnectString, metaUser, metaPass); org.apache.sqoop.Sqoop.runSqoop(sqoopCreate, argsCreate); }
protected void runImport(SqoopTool tool, String[] argv) throws IOException { // run the tool through the normal entry-point. int ret; try { Configuration conf = getConf(); SqoopOptions opts = getSqoopOptions(conf); Sqoop sqoop = new Sqoop(tool, conf, opts); ret = Sqoop.runSqoop(sqoop, argv); } catch (Exception e) { LOG.error("Got exception running import: " + e.toString()); e.printStackTrace(); ret = 1; } if (0 != ret) { throw new IOException("Import failure; return status " + ret); } }
private void runImportAll(SqoopTool tool,String [] argv) throws IOException { // run the tool through the normal entry-point. int ret; try { Configuration conf = getConf(); SqoopOptions opts = getSqoopOptions(conf); Sqoop sqoop = new Sqoop(tool, conf, opts); ret = Sqoop.runSqoop(sqoop, argv); //ret = tool.run(opts); } catch (Exception e) { LOG.error("Got exception running Sqoop: " + e.toString()); ret = 1; } // expect a successful return. if (0 != ret) { throw new IOException("Failure during job; return status " + ret); } }
/** the same than ImportJobTestCase but without removing tabledir. */ protected void runUncleanImport(String[] argv) throws IOException { // run the tool through the normal entry-point. int ret; try { Configuration conf = getConf(); SqoopOptions opts = getSqoopOptions(conf); Sqoop sqoop = new Sqoop(new ImportTool(), conf, opts); ret = Sqoop.runSqoop(sqoop, argv); } catch (Exception e) { LOG.error("Got exception running Sqoop: " + e.toString()); e.printStackTrace(); ret = 1; } // expect a successful return. if (0 != ret) { throw new IOException("Failure during job; return status " + ret); } }
public void runImport(SqoopOptions options, List<String> args) { try { Sqoop importer = new Sqoop(new ImportTool(), options.getConf(), options); int ret = Sqoop.runSqoop(importer, args.toArray(new String[0])); assertEquals("Failure during job", 0, ret); } catch (Exception e) { LOG.error("Got exception running Sqoop: " + StringUtils.stringifyException(e)); throw new RuntimeException(e); } }
/** * Create a job with the specified name, where the job performs * an import configured with 'jobArgs', using the provided configuration * as defaults. */ private void createJob(String jobName, List<String> jobArgs, Configuration conf) { try { SqoopOptions options = new SqoopOptions(); options.setConf(conf); Sqoop makeJob = new Sqoop(new JobTool(), conf, options); List<String> args = new ArrayList<String>(); args.add("--create"); args.add(jobName); args.add("--"); args.add("import"); args.addAll(jobArgs); int ret = Sqoop.runSqoop(makeJob, args.toArray(new String[0])); assertEquals("Failure to create job", 0, ret); } catch (Exception e) { LOG.error("Got exception running Sqoop to create job: " + StringUtils.stringifyException(e)); throw new RuntimeException(e); } }
/** * Run the specified job. */ private void runJob(String jobName, Configuration conf) { try { SqoopOptions options = new SqoopOptions(); options.setConf(conf); Sqoop runJob = new Sqoop(new JobTool(), conf, options); List<String> args = new ArrayList<String>(); args.add("--exec"); args.add(jobName); int ret = Sqoop.runSqoop(runJob, args.toArray(new String[0])); assertEquals("Failure to run job", 0, ret); } catch (Exception e) { LOG.error("Got exception running Sqoop to run job: " + StringUtils.stringifyException(e)); throw new RuntimeException(e); } }
@Test public void testExecJob() throws IOException { Configuration conf = new Configuration(); //creates the job JobTool jobToolCreate = new JobTool(); Sqoop sqoopCreate = new Sqoop(jobToolCreate, conf); String[] argsCreate = getCreateJob(metaConnectString, metaUser, metaPass); Sqoop.runSqoop(sqoopCreate, argsCreate); //executes the job JobTool jobToolExec = new JobTool(); Sqoop sqoopExec = new Sqoop(jobToolExec); String[] argsExec = getExecJob(metaConnectString, metaUser, metaPass); assertEquals("Error executing Sqoop Job", 0, Sqoop.runSqoop(sqoopExec, argsExec)); }
@Test public void testFailedImportDueToIOException() throws IOException { // Make sure that if a MapReduce job to do the import fails due // to an IOException, we tell the user about it. // Create a table to attempt to import. createTableForColType("VARCHAR(32)", "'meep'"); Configuration conf = new Configuration(); LogFactory.getLog(getClass()).info( " getWarehouseDir() " + getWarehouseDir()); // Make the output dir exist so we know the job will fail via IOException. Path outputPath = new Path(new Path(getWarehouseDir()), getTableName()); FileSystem fs = FileSystem.getLocal(conf); fs.mkdirs(outputPath); assertTrue(fs.exists(outputPath)); String[] argv = getArgv(true, new String[] { "DATA_COL0" }, conf); Sqoop importer = new Sqoop(new ImportTool()); try { int ret = Sqoop.runSqoop(importer, argv); assertTrue("Expected ImportException running this job.", 1==ret); } catch (Exception e) { // In debug mode, IOException is wrapped in RuntimeException. LOG.info("Got exceptional return (expected: ok). msg is: " + e); } }
@Test public void testDeleteJob() throws IOException { Configuration conf = new Configuration(); //Creates the job JobTool jobToolCreate = new JobTool(); Sqoop sqoopCreate = new Sqoop(jobToolCreate, conf); String[] argsCreate = getCreateJob(metaConnectString, metaUser, metaPass); Sqoop.runSqoop(sqoopCreate, argsCreate); //Deletes the job JobTool jobToolDelete = new JobTool(); Sqoop sqoopExec = new Sqoop(jobToolDelete); String[] argsDelete = getDeleteJob(metaConnectString, metaUser, metaPass); assertEquals("Error deleting Sqoop Job", 0, Sqoop.runSqoop(sqoopExec, argsDelete)); } }
@Test public void testFailedNoColumns() throws IOException { // Make sure that if a MapReduce job to do the import fails due // to an IOException, we tell the user about it. // Create a table to attempt to import. createTableForColType("VARCHAR(32)", "'meep'"); Configuration conf = new Configuration(); // Make the output dir exist so we know the job will fail via IOException. Path outputPath = new Path(new Path(getWarehouseDir()), getTableName()); FileSystem fs = FileSystem.getLocal(conf); fs.mkdirs(outputPath); assertTrue(fs.exists(outputPath)); String [] argv = getArgv(true, new String [] { "" }, conf); Sqoop importer = new Sqoop(new ImportTool()); try { int ret = Sqoop.runSqoop(importer, argv); assertTrue("Expected job to fail due to no colnames.", 1==ret); } catch (Exception e) { // In debug mode, IOException is wrapped in RuntimeException. LOG.info("Got exceptional return (expected: ok). msg is: " + e); } }
private void execJob() { JobTool jobToolExec = new JobTool(); org.apache.sqoop.Sqoop sqoopExec = new org.apache.sqoop.Sqoop(jobToolExec); String[] argsExec = getExecJob(metaConnectString, metaUser, metaPass); assertEquals("Sqoop Job did not execute properly", 0, org.apache.sqoop.Sqoop.runSqoop(sqoopExec, argsExec)); }
@Test public void testDuplicateColumns() throws IOException { // Make sure that if a MapReduce job to do the import fails due // to an IOException, we tell the user about it. // Create a table to attempt to import. createTableForColType("VARCHAR(32)", "'meep'"); Configuration conf = new Configuration(); // Make the output dir exist so we know the job will fail via IOException. Path outputPath = new Path(new Path(getWarehouseDir()), getTableName()); FileSystem fs = FileSystem.getLocal(conf); fs.mkdirs(outputPath); assertTrue(fs.exists(outputPath)); String[] argv = getArgv(true, new String[] { "DATA_COL0,DATA_COL0" }, conf); Sqoop importer = new Sqoop(new ImportTool()); try { int ret = Sqoop.runSqoop(importer, argv); assertTrue("Expected job to fail!", 1 == ret); } catch (Exception e) { // In debug mode, ImportException is wrapped in RuntimeException. LOG.info("Got exceptional return (expected: ok). msg is: " + e); } }
@Test public void testFailedIllegalColumns() throws IOException { // Make sure that if a MapReduce job to do the import fails due // to an IOException, we tell the user about it. // Create a table to attempt to import. createTableForColType("VARCHAR(32)", "'meep'"); Configuration conf = new Configuration(); // Make the output dir exist so we know the job will fail via IOException. Path outputPath = new Path(new Path(getWarehouseDir()), getTableName()); FileSystem fs = FileSystem.getLocal(conf); fs.mkdirs(outputPath); assertTrue(fs.exists(outputPath)); // DATA_COL0 ok, by zyzzyva not good String [] argv = getArgv(true, new String [] { "DATA_COL0", "zyzzyva" }, conf); Sqoop importer = new Sqoop(new ImportTool()); try { int ret = Sqoop.runSqoop(importer, argv); assertTrue("Expected job to fail due bad colname.", 1==ret); } catch (Exception e) { // In debug mode, IOException is wrapped in RuntimeException. LOG.info("Got exceptional return (expected: ok). msg is: " + e); } }
@Test public void testCreateJob() throws IOException { org.apache.sqoop.tool.JobTool jobTool = new org.apache.sqoop.tool.JobTool(); org.apache.sqoop.Sqoop sqoop = new Sqoop(jobTool); String[] args = getCreateJob(metaConnectString, metaUser, metaPass); assertEquals("Error creating Sqoop Job", 0, Sqoop.runSqoop(sqoop, args)); }
@Test public void testJobToolWithAutoConnectDisabledFails() throws IOException { ArgumentArrayBuilder builder = new ArgumentArrayBuilder() .withProperty("sqoop.metastore.client.enable.autoconnect", "false"); String[] arguments = builder.build(); assertEquals(STATUS_FAILURE, Sqoop.runSqoop(sqoop, arguments)); }
private void runSqoopImport(String[] importCols) { Configuration conf = getConf(); SqoopOptions opts = getSqoopOptions(conf); String username = MSSQLTestUtils.getDBUserName(); String password = MSSQLTestUtils.getDBPassWord(); opts.setUsername(username); opts.setPassword(password); // run the tool through the normal entry-point. int ret; try { Sqoop importer = new Sqoop(new ImportTool(), conf, opts); ret = Sqoop.runSqoop(importer, getArgv(true, importCols, conf)); } catch (Exception e) { LOG.error("Got exception running Sqoop: " + e.toString()); throw new RuntimeException(e); } // expect a successful return. assertEquals("Failure during job", 0, ret); }
private void importData(String targetDir, SqoopOptions.FileLayout fileLayout) { SqoopOptions options; options = getSqoopOptions(newConf()); options.setTableName(TABLE_NAME); options.setNumMappers(1); options.setFileLayout(fileLayout); options.setDeleteMode(true); Path warehouse = new Path(getWarehouseDir()); options.setTargetDir(new Path(warehouse, targetDir).toString()); ImportTool importTool = new ImportTool(); Sqoop importer = new Sqoop(importTool, options.getConf(), options); int ret = Sqoop.runSqoop(importer, new String[0]); if (0 != ret) { fail("Initial import failed with exit code " + ret); } }
private int runJobToolWithAutoConnectUrlAndCorrectUsernamePasswordSpecified() { ArgumentArrayBuilder builder = new ArgumentArrayBuilder() .withProperty("sqoop.metastore.client.autoconnect.url", HsqldbTestServer.getUrl()) .withProperty("sqoop.metastore.client.autoconnect.username", TEST_USER) .withProperty("sqoop.metastore.client.autoconnect.password", TEST_PASSWORD) .withOption("list"); String[] arguments = builder.build(); return Sqoop.runSqoop(sqoop, arguments); }