/** * <p>Configure and return an existing {@link org.apache.hadoop.conf.Configuration}.</p> * * @param conf a {@link org.apache.hadoop.conf.Configuration} object to configure. * @param projectId a {@link java.lang.String} object. * @param instanceId a {@link java.lang.String} object. * @return the modified {@link org.apache.hadoop.conf.Configuration} object. */ public static Configuration configure(Configuration conf, String projectId, String instanceId) { conf.set(BigtableOptionsFactory.PROJECT_ID_KEY, projectId); conf.set(BigtableOptionsFactory.INSTANCE_ID_KEY, instanceId); conf.set(HBASE_CLIENT_CONNECTION_IMPL, getConnectionClass().getCanonicalName()); return conf; }
/** * <p>Configure and return an existing {@link org.apache.hadoop.conf.Configuration}.</p> * * @param conf a {@link org.apache.hadoop.conf.Configuration} object to configure. * @param projectId a {@link java.lang.String} object. * @param instanceId a {@link java.lang.String} object. * @return the modified {@link org.apache.hadoop.conf.Configuration} object. */ public static Configuration configure(Configuration conf, String projectId, String instanceId) { conf.set(BigtableOptionsFactory.PROJECT_ID_KEY, projectId); conf.set(BigtableOptionsFactory.INSTANCE_ID_KEY, instanceId); conf.set(HBASE_CLIENT_CONNECTION_IMPL, getConnectionClass().getCanonicalName()); return conf; }
/** * <p>connect.</p> * * @param conf a {@link org.apache.hadoop.conf.Configuration} object. * @return a {@link org.apache.hadoop.hbase.client.Connection} object. */ public static Connection connect(Configuration conf) { Class<? extends Connection> connectionClass = getConnectionClass(); try { return connectionClass.getConstructor(Configuration.class).newInstance(conf); } catch (Exception e) { throw new IllegalStateException("Could not find an appropriate constructor for " + CONNECTION_CLASS.getCanonicalName(), e); } } }
/** * <p>connect.</p> * * @param conf a {@link org.apache.hadoop.conf.Configuration} object. * @return a {@link org.apache.hadoop.hbase.client.Connection} object. */ public static Connection connect(Configuration conf) { Class<? extends Connection> connectionClass = getConnectionClass(); try { return connectionClass.getConstructor(Configuration.class).newInstance(conf); } catch (Exception e) { throw new IllegalStateException("Could not find an appropriate constructor for " + CONNECTION_CLASS.getCanonicalName(), e); } } }
/** * <p>Configure and return an existing {@link org.apache.hadoop.conf.Configuration}.</p> * * @param conf a {@link org.apache.hadoop.conf.Configuration} object to configure. * @param projectId a {@link java.lang.String} object. * @param instanceId a {@link java.lang.String} object. * @param appProfileId a {@link java.lang.String} object. * @return the modified {@link org.apache.hadoop.conf.Configuration} object. */ public static Configuration configure(Configuration conf, String projectId, String instanceId, String appProfileId) { conf.set(BigtableOptionsFactory.PROJECT_ID_KEY, projectId); conf.set(BigtableOptionsFactory.INSTANCE_ID_KEY, instanceId); conf.set(BigtableOptionsFactory.APP_PROFILE_ID_KEY, appProfileId); conf.set(HBASE_CLIENT_CONNECTION_IMPL, getConnectionClass().getCanonicalName()); return conf; }
/** * <p>Configure and return an existing {@link org.apache.hadoop.conf.Configuration}.</p> * * @param conf a {@link org.apache.hadoop.conf.Configuration} object to configure. * @param projectId a {@link java.lang.String} object. * @param instanceId a {@link java.lang.String} object. * @param appProfileId a {@link java.lang.String} object. * @return the modified {@link org.apache.hadoop.conf.Configuration} object. */ public static Configuration configure(Configuration conf, String projectId, String instanceId, String appProfileId) { conf.set(BigtableOptionsFactory.PROJECT_ID_KEY, projectId); conf.set(BigtableOptionsFactory.INSTANCE_ID_KEY, instanceId); conf.set(BigtableOptionsFactory.APP_PROFILE_ID_KEY, appProfileId); conf.set(HBASE_CLIENT_CONNECTION_IMPL, getConnectionClass().getCanonicalName()); return conf; }
/** * Sets up the actual job. * * @param conf The current configuration. * @param args The command line parameters. * @return The newly created job. * @throws java.io.IOException When setting up the job fails. */ public static Job createSubmittableJob(Configuration conf, String[] args) throws IOException { conf.setIfUnset("hbase.client.connection.impl", BigtableConfiguration.getConnectionClass().getName()); conf.setIfUnset(BigtableOptionsFactory.BIGTABLE_RPC_TIMEOUT_MS_KEY, "60000"); conf.setBoolean(TableInputFormat.SHUFFLE_MAPS, true); String tableName = args[0]; Path outputDir = new Path(args[1]); Job job = Job.getInstance(conf, NAME + "_" + tableName); job.setJobName(NAME + "_" + tableName); job.setJarByClass(Export.class); // Set optional scan parameters Scan s = getConfiguredScanForJob(conf, args); TableMapReduceUtil.initTableMapperJob(tableName, s, IdentityTableMapper.class, ImmutableBytesWritable.class, Result.class, job, false); // No reducers. Just write straight to output files. job.setNumReduceTasks(0); job.setOutputFormatClass(SequenceFileOutputFormat.class); job.setOutputKeyClass(ImmutableBytesWritable.class); job.setOutputValueClass(Result.class); FileOutputFormat.setOutputPath(job, outputDir); // job conf doesn't contain the conf so doesn't have a default fs. return job; }
throws IOException { conf.setIfUnset("hbase.client.connection.impl", BigtableConfiguration.getConnectionClass().getName()); conf.setIfUnset(BigtableOptionsFactory.BIGTABLE_RPC_TIMEOUT_MS_KEY, "60000");