@Override protected String [] getArgv(boolean includeHadoopFlags, int rowsPerStatement, int statementsPerTx, String... additionalArgv) { String [] subArgv = newStrArray(additionalArgv, "--username", OracleUtils.ORACLE_USER_NAME, "--password", OracleUtils.ORACLE_USER_PASS); return super.getArgv(includeHadoopFlags, rowsPerStatement, statementsPerTx, subArgv); }
@Override protected String[] getArgv(boolean includeHadoopFlags, int rowsPerStatement, int statementsPerTx, String... additionalArgv) { ArrayList<String> args = new ArrayList<String>(Arrays.asList(additionalArgv)); args.add("--username"); args.add(getUserName()); args.add("--password"); args.add(PASSWORD); args.add("--direct"); return super.getArgv(includeHadoopFlags, rowsPerStatement, statementsPerTx, args.toArray(new String[0])); }
@Override protected String[] getArgv(boolean includeHadoopFlags, int rowsPerStatement, int statementsPerTx, String... additionalArgv) { ArrayList<String> args = new ArrayList<String>(Arrays.asList(additionalArgv)); args.add("-D"); args.add("pgbulkload.bin=" + PG_BULKLOAD); args.add("--username"); args.add(getUserName()); args.add("--connection-manager"); args.add("org.apache.sqoop.manager.PGBulkloadManager"); args.add("--staging-table"); args.add("dummy"); args.add("--clear-staging-table"); return super.getArgv(includeHadoopFlags, rowsPerStatement, statementsPerTx, args.toArray(new String[0])); }
@Override protected String[] getArgv(boolean includeHadoopFlags, int rowsPerStmt, int statementsPerTx, String... additionalArgv) { // we need different class names per test, or the classloader will // just use the old class definition even though we've compiled a // new one! String[] args = newStrArray(additionalArgv, "--" + ExportTool.CALL_ARG, PROCEDURE_NAME, "--" + ExportTool.CLASS_NAME_ARG, name.getMethodName(), "--" + ExportTool.CONN_MANAGER_CLASS_NAME, GenericJdbcManager.class.getName(), "--" + ExportTool.DRIVER_ARG, Driver.class.getName()); return super .getArgv(includeHadoopFlags, rowsPerStmt, statementsPerTx, args); }
@Override protected String [] getArgv(boolean includeHadoopFlags, int rowsPerStatement, int statementsPerTx, String... additionalArgv) { String [] subArgv = newStrArray(mySQLTestUtils.addUserNameAndPasswordToArgs(additionalArgv),"--direct"); return super.getArgv(includeHadoopFlags, rowsPerStatement, statementsPerTx, subArgv); }
@Override protected String [] getArgv(boolean includeHadoopFlags, int rowsPerStatement, int statementsPerTx, String... additionalArgv) { String [] subArgv = newStrArray(mySqlTestUtils.addUserNameAndPasswordToArgs(additionalArgv)); return super.getArgv(includeHadoopFlags, rowsPerStatement, statementsPerTx, subArgv); }
@Override protected String[] getArgv(boolean includeHadoopFlags, int rowsPerStatement, int statementsPerTx, String... additionalArgv) { String[] subArgv = newStrArray(additionalArgv, "--username", CubridTestUtils.getCurrentUser(), "--password", CubridTestUtils.getPassword()); return super.getArgv(includeHadoopFlags, rowsPerStatement, statementsPerTx, subArgv); }
/** Export 10 rows from gzipped text files. */ @Test public void testGzipExport() throws IOException, SQLException { LOG.info("Beginning gzip export test"); final int TOTAL_RECORDS = 10; createTextFile(0, TOTAL_RECORDS, true); createTable(); runExport(getArgv(true, 10, 10)); verifyExport(TOTAL_RECORDS); LOG.info("Complete gzip export test"); }
protected void multiFileTest(int numFiles, int recordsPerMap, int numMaps, String... argv) throws IOException, SQLException { final int TOTAL_RECORDS = numFiles * recordsPerMap; try { LOG.info("Beginning test: numFiles=" + numFiles + "; recordsPerMap=" + recordsPerMap + "; numMaps=" + numMaps); for (int i = 0; i < numFiles; i++) { createTextFile(i, recordsPerMap, false); } createTable(); runExport(getArgv(true, 10, 10, newStrArray(argv, "-m", "" + numMaps))); verifyExport(TOTAL_RECORDS); } finally { LOG.info("multi-file test complete"); } }
@Override protected String[] getArgv(boolean includeHadoopFlags, int rowsPerStatement, int statementsPerTx, String... additionalArgv) { String[] argV = super.getArgv(includeHadoopFlags, rowsPerStatement, statementsPerTx); String[] subArgV = newStrArray(argV, "--username", NetezzaTestUtils.getNZUser(), "--password", NetezzaTestUtils.getNZPassword()); String[] newArgV = new String[subArgV.length + additionalArgv.length]; int i = 0; for (String s : subArgV) { newArgV[i++] = s; } for (String s : additionalArgv) { newArgV[i++] = s; } return newArgV; }
/** Run 2 mappers, make sure all records load in correctly. */ @Test public void testMultiMapTextExport() throws IOException, SQLException { final int RECORDS_PER_MAP = 10; final int NUM_FILES = 2; for (int f = 0; f < NUM_FILES; f++) { createTextFile(f, RECORDS_PER_MAP, false); } createTable(); runExport(getArgv(true, 10, 10)); verifyExport(RECORDS_PER_MAP * NUM_FILES); }
protected void testExportToTableWithName(String tableName) throws IOException, SQLException { final int TOTAL_RECORDS = 10; setCurTableName(tableName); createTextFile(0, TOTAL_RECORDS, false); createTable(); runExport(getArgv(true, 10, 10)); verifyExport(TOTAL_RECORDS); }
/** * Ensure that we use multiple transactions in a single mapper. */ @Test public void testMultiTransaction() throws IOException, SQLException { final int TOTAL_RECORDS = 20; createTextFile(0, TOTAL_RECORDS, true); createTable(); runExport(getArgv(true, 5, 2)); verifyExport(TOTAL_RECORDS); }
/** * Ensure that when we don't force a commit with a statement cap, * it happens anyway. */ @Test public void testUnlimitedTransactionSize() throws IOException, SQLException { final int TOTAL_RECORDS = 20; createTextFile(0, TOTAL_RECORDS, true); createTable(); runExport(getArgv(true, 5, -1)); verifyExport(TOTAL_RECORDS); }
/** * Ensure that we use multiple statements in a transaction. */ @Test public void testMultiStatement() throws IOException, SQLException { final int TOTAL_RECORDS = 20; createTextFile(0, TOTAL_RECORDS, true); createTable(); runExport(getArgv(true, 10, 10)); verifyExport(TOTAL_RECORDS); }
/** * Exercises the testMultiTransaction test with staging table specified. * @throws IOException * @throws SQLException */ @Test public void testMultiTransactionWithStaging() throws IOException, SQLException { final int TOTAL_RECORDS = 20; createTextFile(0, TOTAL_RECORDS, true); createTable(); createStagingTable(); runExport(getArgv(true, 5, 2, "--staging-table", getStagingTableName())); verifyExport(TOTAL_RECORDS); }
/** * Run 2 mappers with staging enabled, * make sure all records load in correctly. */ @Test public void testMultiMapTextExportWithStaging() throws IOException, SQLException { final int RECORDS_PER_MAP = 10; final int NUM_FILES = 2; for (int f = 0; f < NUM_FILES; f++) { createTextFile(f, RECORDS_PER_MAP, false); } createTable(); createStagingTable(); runExport(getArgv(true, 10, 10, "--staging-table", getStagingTableName())); verifyExport(RECORDS_PER_MAP * NUM_FILES); }
@Test public void testIntCol() throws IOException, SQLException { final int TOTAL_RECORDS = 10; // generate a column equivalent to rownum. ColumnGenerator gen = new ColumnGenerator() { public String getExportText(int rowNum) { return "" + rowNum; } public String getVerifyText(int rowNum) { return "" + rowNum; } public String getType() { return "INTEGER"; } }; createTextFile(0, TOTAL_RECORDS, false, gen); createTable(gen); runExport(getArgv(true, 10, 10)); verifyExport(TOTAL_RECORDS); assertColMinAndMax(forIdx(0), gen); }
@Test public void testBigIntCol() throws IOException, SQLException { final int TOTAL_RECORDS = 10; // generate a column that won't fit in a normal int. ColumnGenerator gen = new ColumnGenerator() { public String getExportText(int rowNum) { long val = (long) rowNum * 1000000000; return "" + val; } public String getVerifyText(int rowNum) { long val = (long) rowNum * 1000000000; return "" + val; } public String getType() { return getBigIntType(); } }; createTextFile(0, TOTAL_RECORDS, false, gen); createTable(gen); runExport(getArgv(true, 10, 10)); verifyExport(TOTAL_RECORDS); assertColMinAndMax(forIdx(0), gen); }
@Test public void testDatesAndTimes() throws IOException, SQLException { final int TOTAL_RECORDS = 10; ColumnGenerator genDate = getDateColumnGenerator(); ColumnGenerator genTime = getTimeColumnGenerator(); createTextFile(0, TOTAL_RECORDS, false, genDate, genTime); createTable(genDate, genTime); runExport(getArgv(true, 10, 10)); verifyExport(TOTAL_RECORDS); assertColMinAndMax(forIdx(0), genDate); assertColMinAndMax(forIdx(1), genTime); }