private Path generateUniqTempDir(boolean withDirCreated) throws IOException { FileSystem fs = FSUtils.getCurrentFileSystem(getConf()); Path dir = new Path(fs.getWorkingDirectory(), NAME); if (!fs.exists(dir)) { fs.mkdirs(dir); } Path newDir = new Path(dir, UUID.randomUUID().toString()); if (withDirCreated) { fs.mkdirs(newDir); } return newDir; }
LOG.info("command: ./bin/hbase org.apache.hadoop.hbase.tool.LoadIncrementalHFiles {} {}", this.bulkloadDir.toString(), this.dstTableName); code = new LoadIncrementalHFiles(this.getConf()) .run(new String[] { this.bulkloadDir.toString(), this.dstTableName }); if (code == 0) { FileSystem fs = FSUtils.getCurrentFileSystem(getConf()); if (!fs.delete(this.bulkloadDir, true)) { LOG.error("Deleting folder " + bulkloadDir + " failed!");
Job job = Job.getInstance(getConf(), getConf().get(JOB_NAME_CONF_KEY, jobName)); job.setJarByClass(CopyTable.class); Scan scan = new Scan(); scan.setCaching(cacheRow); } else { scan.setCaching(getConf().getInt(HConstants.HBASE_CLIENT_SCANNER_CACHING, 100)); LOG.info("HFiles will be stored at " + this.bulkloadDir); HFileOutputFormat2.setOutputPath(job, bulkloadDir); try (Connection conn = ConnectionFactory.createConnection(getConf()); Admin admin = conn.getAdmin()) { HFileOutputFormat2.configureIncrementalLoadMap(job,
@Override public int run(String[] args) throws Exception { Job job = createSubmittableJob(args); if (job == null) return 1; if (!job.waitForCompletion(true)) { LOG.info("Map-reduce job failed!"); if (bulkload) { LOG.info("Files are not bulkloaded!"); } return 1; } int code = 0; if (bulkload) { code = new LoadIncrementalHFiles(this.getConf()).run(new String[]{this.bulkloadDir.toString(), this.dstTableName}); if (code == 0) { // bulkloadDir is deleted only LoadIncrementalHFiles was successful so that one can rerun // LoadIncrementalHFiles. FileSystem fs = FileSystem.get(this.getConf()); if (!fs.delete(this.bulkloadDir, true)) { LOG.error("Deleting folder " + bulkloadDir + " failed!"); code = 1; } } } return code; } }
@Override public int run(String[] args) throws Exception { Job job = createSubmittableJob(args); if (job == null) return 1; if (!job.waitForCompletion(true)) { LOG.info("Map-reduce job failed!"); if (bulkload) { LOG.info("Files are not bulkloaded!"); } return 1; } int code = 0; if (bulkload) { code = new LoadIncrementalHFiles(this.getConf()) .run(new String[] { this.bulkloadDir.toString(), this.dstTableName }); if (code == 0) { // bulkloadDir is deleted only LoadIncrementalHFiles was successful so that one can rerun // LoadIncrementalHFiles. FileSystem fs = FSUtils.getCurrentFileSystem(getConf()); if (!fs.delete(this.bulkloadDir, true)) { LOG.error("Deleting folder " + bulkloadDir + " failed!"); code = 1; } } } return code; } }
@Override public int run(String[] args) throws Exception { String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs(); Job job = createSubmittableJob(otherArgs); if (job == null) return 1; if (!job.waitForCompletion(true)) { LOG.info("Map-reduce job failed!"); if (bulkload) { LOG.info("Files are not bulkloaded!"); } return 1; } int code = 0; if (bulkload) { code = new LoadIncrementalHFiles(this.getConf()).run(new String[]{this.bulkloadDir.toString(), this.dstTableName}); if (code == 0) { // bulkloadDir is deleted only LoadIncrementalHFiles was successful so that one can rerun // LoadIncrementalHFiles. FileSystem fs = FileSystem.get(this.getConf()); if (!fs.delete(this.bulkloadDir, true)) { LOG.error("Deleting folder " + bulkloadDir + " failed!"); code = 1; } } } return code; } }
Job job = Job.getInstance(getConf(), getConf().get(JOB_NAME_CONF_KEY, NAME + "_" + tableName)); FileSystem fs = FileSystem.get(getConf()); Random rand = new Random(); Path root = new Path(fs.getWorkingDirectory(), "copytable"); try (Connection conn = ConnectionFactory.createConnection(getConf()); Table htable = conn.getTable(TableName.valueOf(dstTableName))) { HFileOutputFormat2.configureIncrementalLoadMap(job, htable);
Job job = Job.getInstance(getConf(), getConf().get(JOB_NAME_CONF_KEY, NAME + "_" + tableName)); job.setJarByClass(CopyTable.class); Scan scan = new Scan(); scan.setCaching(cacheRow); } else { scan.setCaching(getConf().getInt(HConstants.HBASE_CLIENT_SCANNER_CACHING, 100)); FileSystem fs = FileSystem.get(getConf()); Random rand = new Random(); Path root = new Path(fs.getWorkingDirectory(), "copytable"); try (Connection conn = ConnectionFactory.createConnection(getConf()); Admin admin = conn.getAdmin()) { HFileOutputFormat2.configureIncrementalLoadMap(job,
Job job = Job.getInstance(getConf(), getConf().get(JOB_NAME_CONF_KEY, NAME + "_" + tableName)); job.setJarByClass(CopyTable.class); Scan scan = new Scan(); scan.setCaching(cacheRow); } else { scan.setCaching(getConf().getInt(HConstants.HBASE_CLIENT_SCANNER_CACHING, 100)); FileSystem fs = FSUtils.getCurrentFileSystem(getConf()); Random rand = new Random(); Path root = new Path(fs.getWorkingDirectory(), "copytable"); try (Connection conn = ConnectionFactory.createConnection(getConf()); Admin admin = conn.getAdmin()) { HFileOutputFormat2.configureIncrementalLoadMap(job,