/** * Configures the job with an HBase scan. * * @param scan The scan to set in the job configuration. * @param job The job to configure. * @throws IOException If there is an error. */ public static void initTableScan(Scan scan, Job job) throws IOException { job.getConfiguration().set(TableInputFormat.SCAN, convertScanToString(scan)); addAllDependencyJars(job); }
/** * This is just like the TableMapReduceUtil.initTableMapperJob but * it takes any classes for input and output keys instead of just * Writables. This way we can work with AvroSerialization instead * of just WritableSerialization. */ public static void initGenericTableMapperJob(String table, Scan scan, Class<? extends TableMapper<?, ?>> mapper, Class<?> outputKeyClass, Class<?> outputValueClass, Job job) throws IOException { if (outputValueClass != null) { job.setMapOutputValueClass(outputValueClass); } if (outputKeyClass != null) { job.setMapOutputKeyClass(outputKeyClass); } job.setMapperClass(mapper); job.getConfiguration().set(TableInputFormat.INPUT_TABLE, table); job.getConfiguration().set(TableInputFormat.SCAN, convertScanToString(scan)); addAllDependencyJars(job); }