/** * A service endpoint to write to the {@link FakeDataset}. */ public class DatasetWriterService extends AbstractHttpServiceHandler { public static final String NAME = DatasetWriterService.class.getSimpleName(); @UseDataSet(FakeApp.DS_NAME) private FakeDataset fakeDataset; @Path("/write") @POST public void write(HttpServiceRequest request, HttpServiceResponder responder) { Map<String, String> req = new Gson().fromJson(StandardCharsets.UTF_8.decode(request.getContent()).toString(), new TypeToken<Map<String, String>>() { }.getType()); req.forEach((k, v) -> fakeDataset.put(Bytes.toBytes(k), Bytes.toBytes(v))); responder.sendStatus(200); } }
@Override public void visit(Object instance, Type inspectType, Type declareType, Field field) { if (Dataset.class.isAssignableFrom(field.getType())) { UseDataSet dataset = field.getAnnotation(UseDataSet.class); if (dataset == null || dataset.value().isEmpty()) { return; } dataSets.add(dataset.value()); } } }
public static class CustomHandler extends AbstractHttpServiceHandler { @UseDataSet(CUSTOM_TABLE) private KeyValueTable table; @GET @Path("{name}") public void sayHi(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("name") String name) { table.write("hi", name); responder.sendString(name); } } }
@Override public void visit(Object instance, Type inspectType, Type declareType, Field field) throws Exception { if (Dataset.class.isAssignableFrom(field.getType())) { UseDataSet useDataSet = field.getAnnotation(UseDataSet.class); if (useDataSet != null && !useDataSet.value().isEmpty()) { field.set(instance, dataSetContext.getDataset(useDataSet.value())); } } } }
/** * */ public static final class CubeHandler extends AbstractCubeHttpHandler { @UseDataSet(CUBE_NAME) private Cube cube; @Override protected Cube getCube() { return cube; } } }
@Override public void visit(Object instance, Type inspectType, Type declareType, Field field) throws Exception { if (Dataset.class.isAssignableFrom(field.getType())) { UseDataSet useDataSet = field.getAnnotation(UseDataSet.class); if (useDataSet != null && !useDataSet.value().isEmpty()) { field.set(instance, dataSetContext.getDataset(useDataSet.value())); } } } }
/** * A mapper. */ public static class DummyMapper extends Mapper<byte[], byte[], Text, Text> { @UseDataSet("foo") TrackingTable table; @Override protected void map(byte[] key, byte[] value, Context context) throws IOException, InterruptedException { byte[] val = table.read(key); context.write(new Text(key), new Text(val)); } }
public class NoOpHandler extends AbstractHttpServiceHandler { @UseDataSet(DATASET_NAME) private KeyValueTable table; @Path(ENDPOINT) @GET public void handler(HttpServiceRequest request, HttpServiceResponder responder) { LOG.info("Endpoint {} called in service {}", ENDPOINT, NAME); table = getContext().getDataset(DATASET_NAME); table.write("no-op-service", "no-op-service"); responder.sendStatus(200); } } }
public static final class NoOpHandler extends AbstractHttpServiceHandler { @UseDataSet(DATASET_NAME) private KeyValueTable table; @GET @Path("ping/{key}") public void ping(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("key") String key) throws IOException { responder.sendJson(Bytes.toString(table.read(key))); } @Override protected void configure() { createDataset(DATASET_NAME, KeyValueTable.class); } }
/** * Handler that writes to simpleInputDataset. */ public static final class SimpleWriteHandler extends AbstractHttpServiceHandler { @UseDataSet("simpleInputDataset") private KeyValueTable input; @PUT @Path("/put/{key}") public void put(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("key") String key) { String value = Bytes.toString(request.getContent()); input.write(key, value + loadTestClasses()); responder.sendStatus(200); } } }
/** * Greeting Service handler. */ public static final class GreetingHandler extends AbstractHttpServiceHandler { @UseDataSet("whom") private KeyValueTable whom; private Metrics metrics; @Path("greet") @GET public void greet(HttpServiceRequest request, HttpServiceResponder responder) { byte[] name = whom.read(""); String toGreet = name != null ? new String(name, Charsets.UTF_8) : "World"; if (toGreet.equals("Jane Doe")) { metrics.count("greetings.count.jane_doe", 1); } responder.sendString(String.format("Hello %s!", toGreet)); } }
/** * A handler. */ public static class DummyHandler extends AbstractHttpServiceHandler { @UseDataSet("foo") TrackingTable table; @GET @Path("{key}") public void handle(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("key") String key) { byte[] value = table.read(Bytes.toBytes(key)); responder.sendJson(Bytes.toString(value)); } @PUT @Path("{key}") public void put(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("key") String key) { table.write(Bytes.toBytes(key), Bytes.toBytes(key)); responder.sendStatus(200); } }
/** * Queries simpleOutputDataset. */ public static class SimpleGetOutput extends AbstractHttpServiceHandler { private static final Logger LOG = LoggerFactory.getLogger(SimpleGetOutput.class); @UseDataSet("simpleOutputDataset") private KeyValueTable output; @GET @Path("get/{key}") public void get(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("key") String key) throws IOException, InterruptedException { LOG.info("Hello " + loadTestClasses()); String value = Bytes.toString(output.read(Bytes.toBytes(key))); if (value == null) { value = "null"; } responder.sendJson(ImmutableMap.of(key, value)); } }
public static final class RecordHandler extends AbstractHttpServiceHandler { @UseDataSet("records") private KeyValueTable records; @POST @Path("/increment/{type}") public void increment(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("type") String type) { records.increment(Bytes.toBytes(new Record(type).getType().name()), 1L); responder.sendStatus(200); } @GET @Path("/query") public void query(HttpServiceRequest request, HttpServiceResponder responder, @QueryParam("type") Record record) { long count = Bytes.toLong(records.read(Bytes.toBytes(record.getType().name()))); responder.sendString(200, Long.toString(count), Charsets.UTF_8); } } }
/** * A map/reduce job. */ public static class DummyBatch extends AbstractMapReduce { @UseDataSet("foo") private TrackingTable table; @Override public void configure() { setName("dummy-batch"); setDescription("batch job that copies from foo to bar"); } @Override public void initialize() throws Exception { MapReduceContext context = getContext(); Job job = context.getHadoopJob(); job.setMapperClass(DummyMapper.class); job.setReducerClass(DummyReducer.class); context.addInput(Input.ofDataset("foo")); context.addOutput(Output.ofDataset("bar")); } }
/** * Queries simpleInputDataset. */ public static class SimpleGetInput extends AbstractHttpServiceHandler { private static final Logger LOG = LoggerFactory.getLogger(SimpleGetInput.class); @UseDataSet("simpleInputDataset") private KeyValueTable input; @GET @Path("get/{key}") public void get(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("key") String key) throws IOException, InterruptedException { LOG.info("Hello " + loadTestClasses()); String value = Bytes.toString(input.read(Bytes.toBytes(key))); if (value == null) { value = "null"; } responder.sendJson(ImmutableMap.of(key, value)); } }
/** * */ public static class MyHandlerWithUseDataSetAnnotation extends AbstractHttpServiceHandler { @UseDataSet("myTable") private KeyValueTableDefinition.KeyValueTable table; @PUT @Path("{key}") public void set(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("key") String key) throws Exception { String value = Bytes.toString(request.getContent()); table.put(key, value); responder.sendJson("OK"); } @GET @Path("{key}") public void get(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("key") String key) throws Exception { String value = table.get(key); responder.sendJson(value); } }
/** * */ public static class MyHandler extends AbstractHttpServiceHandler { @UseDataSet("myTable") private KeyValueTableDefinition.KeyValueTable table; @PUT @Path("{key}") public void set(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("key") String key) throws Exception { String value = Bytes.toString(request.getContent()); table.put(key, value); responder.sendJson("OK"); } @GET @Path("{key}") public void get(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("key") String key) throws Exception { String value = table.get(key); responder.sendJson(value); } }
/** * Mapper for cluster name test. */ public static final class ClusterNameMapper extends Mapper<LongWritable, Text, Text, LongWritable> implements ProgramLifecycle<MapReduceTaskContext<Text, LongWritable>> { @UseDataSet(CLUSTER_NAME_TABLE) private KeyValueTable clusterNameTable; private String clusterName; private String prefix; @Override public void initialize(MapReduceTaskContext<Text, LongWritable> context) throws Exception { clusterName = context.getClusterName(); WorkflowInfo workflowInfo = context.getWorkflowInfo(); prefix = workflowInfo == null ? "" : workflowInfo.getName() + "."; } @Override protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { Text output = new Text(); LongWritable one = new LongWritable(1L); for (String word : value.toString().split("\\w+")) { output.set(word); context.write(output, one); } clusterNameTable.write(prefix + "mapper.cluster.name", clusterName); } @Override public void destroy() { // no-op } }
/** * */ public static final class ClassicWordCount extends AbstractMapReduce { static final int MEMORY_MB = 1024; @UseDataSet("jobConfig") private KeyValueTable table; @Override protected void configure() { createDataset("jobConfig", KeyValueTable.class); setDriverResources(new Resources(MEMORY_MB)); } @Override public void initialize() throws Exception { String inputPath = Bytes.toString(table.read(Bytes.toBytes("inputPath"))); String outputPath = Bytes.toString(table.read(Bytes.toBytes("outputPath"))); Job hadoopJob = getContext().getHadoopJob(); WordCount.configureJob(hadoopJob, inputPath, outputPath); hadoopJob.setPartitionerClass(SimplePartitioner.class); hadoopJob.setNumReduceTasks(2); hadoopJob.setGroupingComparatorClass(SimpleComparator.class); hadoopJob.setSortComparatorClass(SimpleComparator.class); hadoopJob.setCombinerKeyGroupingComparatorClass(SimpleComparator.class); } }