loader.doBulkLoad(new Path(outFile), hDataTable);
@Override public int run(String[] args) throws Exception { if (args.length != 2) { usage(); return -1; } String dirPath = args[0]; String tableName = args[1]; boolean tableExists = this.doesTableExist(tableName); if (!tableExists) this.createTable(tableName,dirPath); Path hfofDir = new Path(dirPath); HTable table = new HTable(this.cfg, tableName); doBulkLoad(hfofDir, table); return 0; }
public void completeImport() throws Exception { LoadIncrementalHFiles loader = new LoadIncrementalHFiles(getConfiguration()); HTable table = new HTable(getConfiguration(), _tableName); loader.doBulkLoad(_hfilePath, table); FileSystem fs = _hfilePath.getFileSystem(getConfiguration()); fs.delete(_hfilePath, true); }
@Override public void commitJob(JobContext jobContext) throws IOException { try { baseOutputCommitter.commitJob(jobContext); Configuration conf = jobContext.getConfiguration(); try { //import hfiles new LoadIncrementalHFiles(conf) .doBulkLoad(HFileOutputFormat.getOutputPath(jobContext), new HTable(conf, conf.get(HBaseConstants.PROPERTY_OUTPUT_TABLE_NAME_KEY))); } catch (Exception e) { throw new IOException("BulkLoad failed.", e); } } finally { cleanupScratch(jobContext); } }
loader.doBulkLoad(outputPath, admin, table, regionLocator);
loader.doBulkLoad(path, htable); LOG.info("Successfully loaded: " + path.toString());
@VisibleForTesting void loadIncrementalHFiles(Path path) { if (doBulkLoad) { try { LOG.info("Bulk loading path {}", path); Configuration conf = rawSink.getConfiguration(); setPermissionsForHbaseUser(conf, path); final LoadIncrementalHFiles load = new LoadIncrementalHFiles(conf); final TableName t = TableName.valueOf(tableName); try (Connection conn = ConnectionFactory.createConnection(conf); Table table = conn.getTable(t); RegionLocator regionLocator = conn.getRegionLocator(t); Admin admin = conn.getAdmin()) { load.doBulkLoad(path, admin, table, regionLocator); } } catch (Exception ex) { throw new RuntimeException(ex); } } else { LOG.info("Skipping bulkloading by request."); } }
HalyardTableUtils.truncateTable(hTable).close(); new LoadIncrementalHFiles(getConf()).doBulkLoad(new Path(workdir), hTable); LOG.info("Bulk Load Completed.."); return 0;
loader.doBulkLoad(dir, table);
TableMapReduceUtil.addDependencyJars(job); if (job.waitForCompletion(true)) { new LoadIncrementalHFiles(getConf()).doBulkLoad(new Path(cmd.getOptionValue('f')), hTable); LOG.info("Bulk Delete Completed.."); return 0;
new LoadIncrementalHFiles(getConf()).doBulkLoad(outPath, hTable); LOG.log(Level.INFO, "Stage #{0} of {1} completed..", new Object[]{stage, stages}); } else {
throw new IOException(e.getMessage(), e); loadIncrementalHFiles.doBulkLoad(stagingResultDir, htable);
throw new IOException(e.getMessage(), e); loadIncrementalHFiles.doBulkLoad(stagingResultDir, htable);
@Override public int run(String[] args) throws Exception { if (args.length != 2) { usage(); return -1; } initialize(); String dirPath = args[0]; TableName tableName = TableName.valueOf(args[1]); boolean tableExists = this.doesTableExist(tableName); if (!tableExists) { if ("yes".equalsIgnoreCase(getConf().get(CREATE_TABLE_CONF_KEY, "yes"))) { this.createTable(tableName, dirPath); } else { String errorMsg = format("Table '%s' does not exist.", tableName); LOG.error(errorMsg); throw new TableNotFoundException(errorMsg); } } Path hfofDir = new Path(dirPath); try (Connection connection = ConnectionFactory.createConnection(getConf()); HTable table = (HTable) connection.getTable(tableName);) { doBulkLoad(hfofDir, table); } return 0; }
new LoadIncrementalHFiles(conf).doBulkLoad(hfileOutputPath, destHTable); } catch (Exception e) { throw new IOException("Bulkloader couldn't run", e);
new LoadIncrementalHFiles(conf).doBulkLoad(testDir, table);
loader.doBulkLoad(dir, table);
doBulkLoad(hfofDir, admin, t, rl);
new LoadIncrementalHFiles(conf).doBulkLoad(testDir, table);