/** * Configures the output for the job. * * @param job The job to configure. * @throws IOException If there is an error. */ protected void configureOutput(Job job) throws IOException { if (null == mJobOutput) { throw new JobConfigurationException("Must specify job output."); } mJobOutput.configure(job); }
/** * Creates a new builder for Fiji transform jobs. * * @return a new Fiji transform job builder. */ public static FijiMapReduceJobBuilder create() { return new FijiMapReduceJobBuilder(); }
public Void call() throws Exception { hFileLoader.load(mHFile, table); return null; } };
/** {@inheritDoc} */ @Override public <T> void put(long timestamp, T value) throws IOException { Preconditions.checkNotNull(mEntityId); Preconditions.checkNotNull(mQualifier, "Producer output configured for a map-type family, use put(qualifier, timestamp, value)"); mTableContext.put(mEntityId, mFamily, mQualifier, timestamp, value); }
@Override protected FijiMapReduceJobBuilder createJobBuilder() { return FijiMapReduceJobBuilder.create(); }
/** * Configures the input for the job. * * @param job The job to configure. * @throws IOException If there is an error. */ protected void configureInput(Job job) throws IOException { getJobInput().configure(job); }
/** * {@inheritDoc} * Cleans up job resources. * User overridden cleanup methods must contain super.cleanup(). */ @Override protected void cleanup(Context hadoopContext) throws IOException, InterruptedException { Preconditions.checkState(mTableContext != null); mTableContext.close(); mTableContext = null; super.cleanup(hadoopContext); }
/** * Adds the jars from a directory into the distributed cache of a job. * * @param job The job to configure. * @param jarDirectory A path to a directory of jar files. * @throws IOException on I/O error. */ public static void addJarsToDistributedCache(Job job, String jarDirectory) throws IOException { addJarsToDistributedCache(job, qualifiedPathFromString(jarDirectory, job.getConfiguration())); }
/** {@inheritDoc} */ @Override protected FijiMapReduceJob build(Job job) { return FijiMapReduceJob.create(job); }
/** * Creates a new HFile loader. * * @param conf The configuration to be used by the new loader. * @return A new loader that can be used to add HFiles to HBase tables. */ public static HFileLoader create(Configuration conf) { return new HFileLoader(conf); }
/** * Creates a new <code>FijiMapReduceJob</code>. * * @param job is a Hadoop {@link Job} that interacts with Fiji and will be wrapped by the new * <code>FijiMapReduceJob</code>. * @return A new <code>FijiMapReduceJob</code> backed by a Hadoop {@link Job}. */ public static FijiMapReduceJob create(Job job) { return new FijiMapReduceJob(job); }
/** * Configures the output for a MapReduce job. * * @param job The job to configure. * @throws IOException If there is an error. */ public void configure(Job job) throws IOException { job.setOutputFormatClass(getOutputFormatClass()); }
/** {@inheritDoc} */ @Override protected final void reduce(K key, Iterable<V> values, Context hadoopContext) throws IOException, InterruptedException { // Implements the Hadoop reduce function: Preconditions.checkState(mTableContext != null, "KjiiTableContext is null because setup() " + "failed to execute. If you overrode setup(), did you call super.setup()?"); reduce(key, values, mTableContext); }
@Override public void configure(Job job) throws IOException { super.configure(job); FileInputFormat.setInputPaths(job, mPaths); } }
/** {@inheritDoc} */ @Override protected FijiMapReduceJob build(Job job) { return FijiMapReduceJob.create(job); }
/** {@inheritDoc} */ @Override protected FijiMapReduceJob build(Job job) { return FijiMapReduceJob.create(job); }
/** {@inheritDoc} */ @Override protected FijiMapReduceJob build(Job job) { return FijiMapReduceJob.create(job); }
/** {@inheritDoc} */ @Override protected FijiMapReduceJob build(Job job) { return FijiMapReduceJob.create(job); }