public Map<LoadQueueItem, ByteBuffer> run(String dirPath, Map<byte[], List<Path>> map, TableName tableName) throws IOException { Map<org.apache.hadoop.hbase.tool.LoadIncrementalHFiles.LoadQueueItem, ByteBuffer> originRet; if (dirPath != null) { originRet = run(dirPath, tableName); } else { originRet = run(map, tableName); } Map<LoadQueueItem, ByteBuffer> ret = new HashMap<>(); originRet.forEach((k, v) -> { ret.put(new LoadQueueItem(k.getFamily(), k.getFilePath()), v); }); return ret; } }
@Override public int run(String[] args) throws Exception { String[] otherArgs = new GenericOptionsParser(getConf(), args).getRemainingArgs(); Job job = createSubmittableJob(otherArgs); if (job == null) return 1; if (!job.waitForCompletion(true)) { LOG.info("Map-reduce job failed!"); if (bulkload) { LOG.info("Files are not bulkloaded!"); } return 1; } int code = 0; if (bulkload) { code = new LoadIncrementalHFiles(this.getConf()).run(new String[]{this.bulkloadDir.toString(), this.dstTableName}); if (code == 0) { // bulkloadDir is deleted only LoadIncrementalHFiles was successful so that one can rerun // LoadIncrementalHFiles. FileSystem fs = FileSystem.get(this.getConf()); if (!fs.delete(this.bulkloadDir, true)) { LOG.error("Deleting folder " + bulkloadDir + " failed!"); code = 1; } } } return code; } }