protected void setupOutput(final Job job, final AddElementsFromHdfs operation) { job.setOutputFormatClass(AccumuloFileOutputFormat.class); FileOutputFormat.setOutputPath(job, new Path(operation.getOutputPath())); }
private void checkHdfsDirectories(final AddElementsFromHdfs operation, final AccumuloStore store) throws IOException { final AddElementsFromHdfsTool tool = new AddElementsFromHdfsTool(new AccumuloAddElementsFromHdfsJobFactory(), operation, store); LOGGER.info("Checking that the correct HDFS directories exist"); final FileSystem fs = FileSystem.get(tool.getConfig()); final Path outputPath = new Path(operation.getOutputPath()); LOGGER.info("Ensuring output directory {} doesn't exist", outputPath); if (fs.exists(outputPath)) { if (fs.listFiles(outputPath, true).hasNext()) { LOGGER.error("Output directory exists and is not empty: {}", outputPath); throw new IllegalArgumentException("Output directory exists and is not empty: " + outputPath); } LOGGER.info("Output directory exists and is empty so deleting: {}", outputPath); fs.delete(outputPath, true); } } }
private void importElements(final AddElementsFromHdfs operation, final AccumuloStore store) throws OperationException { final ImportElementsToAccumuloTool importTool; final int response; importTool = new ImportElementsToAccumuloTool(operation.getOutputPath(), operation.getFailurePath(), store, operation.getOptions()); try { LOGGER.info("Running import job"); response = ToolRunner.run(importTool, new String[0]); LOGGER.info("Finished running import job"); } catch (final Exception e) { LOGGER.error("Failed to import elements into Accumulo: {}", e.getMessage()); throw new OperationException("Failed to import elements into Accumulo", e); } if (ImportElementsToAccumuloTool.SUCCESS_RESPONSE != response) { LOGGER.error("Failed to import elements into Accumulo. Response code was {}", response); throw new OperationException("Failed to import elements into Accumulo. Response code was: " + response); } }
@Override public void setupJob(final Job job, final AddElementsFromHdfs operation, final String mapperGenerator, final Store store) throws IOException { job.setJarByClass(getClass()); job.setJobName(getJobName(mapperGenerator, operation.getOutputPath())); setupMapper(job); setupCombiner(job); setupReducer(job); setupOutput(job, operation); if (!NoPartitioner.class.equals(operation.getPartitioner())) { if (null != operation.getPartitioner()) { operation.setPartitioner(GafferKeyRangePartitioner.class); LOGGER.warn("Partitioner class " + operation.getPartitioner().getName() + " will be replaced with " + GafferKeyRangePartitioner.class.getName()); } setupPartitioner(job, operation, (AccumuloStore) store); } }