@Nullable @Override public PartitionKey.Builder apply(@Nullable PartitionKey.Builder builder) { return builder.addLongField("z", 1L); } });
public Map<String, String> getOutputArguments(long snapshotTime, Map<String, String> otherProperties) { Map<String, String> args = new HashMap<>(); args.putAll(otherProperties); PartitionKey outputKey = PartitionKey.builder().addLongField(SNAPSHOT_FIELD, snapshotTime).build(); PartitionedFileSetArguments.setOutputPartitionKey(args, outputKey); return args; }
@Nullable @Override public PartitionKey.Builder apply(@Nullable PartitionKey.Builder builder) { return builder.addLongField("x", 1L); } });
PartitionKey partitionKey = PartitionKey.builder().addLongField("time", partition).build(); PartitionedFileSet pfs = getContext().getDataset(dataset); final PartitionOutput partitionOutput = pfs.getPartitionOutput(partitionKey);
PartitionKey key = PartitionKey.builder() .addIntField("i", i) .addLongField("l", 17L) .addStringField("s", "partitionKeys1") .build(); PartitionKey key = PartitionKey.builder() .addIntField("i", i) .addLongField("l", 17L) .addStringField("s", "partitionKeys2") .build();
PartitionDetail partition = pfs.getPartition(PartitionKey.builder().addLongField("time", 1).build()); verifyDataNotification(notifications.get(0), NamespaceId.DEFAULT.dataset(FileUploadApp.PFS_NAME), Collections.singletonList(PartitionKey.builder().addLongField("time", 1L).build()));
PartitionedFileSet cleanRecords = cleanRecordsManager.get(); PartitionKey outputPartition = PartitionKey.builder().addLongField("time", 5000).build(); PartitionOutput partitionOutput = cleanRecords.getPartitionOutput(outputPartition); Location partitionLocation = partitionOutput.getLocation();
.addLongField("l", 17L) .addStringField("s", "nonexistent") .build();
@Override public void run(DatasetContext context) throws Exception { Map<String, Long> wordCounts = new HashMap<>(); for (PartitionDetail partition : partitions) { ByteBuffer content; Location location = partition.getLocation(); content = ByteBuffer.wrap(ByteStreams.toByteArray(location.getInputStream())); String string = Bytes.toString(Bytes.toBytes(content)); for (String token : string.split(" ")) { Long count = Objects.firstNonNull(wordCounts.get(token), 0L); wordCounts.put(token, count + 1); } } IncrementingKeyValueTable counts = context.getDataset("counts"); for (Map.Entry<String, Long> entry : wordCounts.entrySet()) { counts.write(Bytes.toBytes(entry.getKey()), entry.getValue()); } PartitionedFileSet outputLines = context.getDataset("outputLines"); PartitionKey partitionKey = PartitionKey.builder().addLongField("time", System.currentTimeMillis()).build(); PartitionOutput outputPartition = outputLines.getPartitionOutput(partitionKey); Location partitionDir = outputPartition.getLocation(); partitionDir.mkdirs(); Location outputLocation = partitionDir.append("file"); outputLocation.createNew(); try (OutputStream outputStream = outputLocation.getOutputStream()) { outputStream.write(Bytes.toBytes(Joiner.on("\n").join(wordCounts.values()))); } outputPartition.addPartition(); } });
@Test public void testBuilderGetter() { PartitionKey key = PartitionKey.builder() .addField("a", "value") .addField("b", 1L) .addField("c", -17) .addField("d", true) .addIntField("e", 42) .addLongField("f", 15) .addStringField("g", "ghijk") .build(); Assert.assertEquals("value", key.getField("a")); Assert.assertEquals(1L, key.getField("b")); Assert.assertEquals(-17, key.getField("c")); Assert.assertEquals(true, key.getField("d")); Assert.assertEquals(42, key.getField("e")); Assert.assertEquals(15L, key.getField("f")); Assert.assertEquals("ghijk", key.getField("g")); }
@PUT @Path("lines") public void write(HttpServiceRequest request, HttpServiceResponder responder, @QueryParam("time") Long time) { PartitionKey key = PartitionKey.builder().addLongField("time", time).build(); PartitionOutput partitionOutput = lines.getPartitionOutput(key); Location location = partitionOutput.getLocation(); try { try (WritableByteChannel channel = Channels.newChannel(location.getOutputStream())) { channel.write(request.getContent()); } partitionOutput.addPartition(); } catch (IOException e) { responder.sendError(400, String.format("Unable to write path '%s'", location)); return; } responder.sendStatus(200); }
@Override public void apply() throws Exception { PartitionKey partitionKey = PartitionKey.builder() .addIntField("i", 42) .addLongField("l", 17L) .addStringField("s", "x") .build(); ImmutableMap<String, String> metadata = ImmutableMap.of("key1", "value", "key2", "value2", "key3", "value2"); PartitionOutput partitionOutput = dataset.getPartitionOutput(partitionKey); partitionOutput.setMetadata(metadata); partitionOutput.addPartition(); PartitionDetail partitionDetail = dataset.getPartition(partitionKey); Assert.assertNotNull(partitionDetail); Assert.assertEquals(metadata, partitionDetail.getMetadata().asMap()); } });
private PartitionDetail getLatestPartition() throws IOException { Long latestTime = getLatestSnapshot(); if (latestTime == null) { return null; } PartitionKey partitionKey = PartitionKey.builder().addLongField(SNAPSHOT_FIELD, latestTime).build(); PartitionDetail partitionDetail = files.getPartition(partitionKey); if (partitionDetail == null) { throw new IllegalStateException(String.format("No snapshot files found for latest recorded snapshot from '%d'. " + "This can happen if files are deleted manually without updating the state file. " + "Please fix the state file to contain the latest snapshot, or delete the file and write another snapshot.", latestTime)); } return partitionDetail; }
@Override public void initialize() throws Exception { MapReduceContext context = getContext(); batchPartitionCommitter = PartitionBatchInput.setInput(context, "lines", new KVTableStatePersistor("consumingState", "state.key")); Map<String, String> outputArgs = new HashMap<>(); PartitionKey partitionKey = PartitionKey.builder().addLongField("time", context.getLogicalStartTime()).build(); PartitionedFileSetArguments.setOutputPartitionKey(outputArgs, partitionKey); context.addOutput(Output.ofDataset("outputLines", outputArgs)); context.addOutput(Output.ofDataset("counts")); Job job = context.getHadoopJob(); job.setMapperClass(Tokenizer.class); job.setReducerClass(Counter.class); job.setNumReduceTasks(1); }
private PartitionKey generateUniqueKey() { return PartitionKey.builder() .addIntField("i", counter++) .addLongField("l", 17L) .addStringField("s", UUID.randomUUID().toString()) .build(); }
@Test public void testSetGetOutputPartitionKey() throws Exception { Map<String, String> arguments = new HashMap<>(); PartitionKey key = PartitionKey.builder() .addIntField("i", 42) .addLongField("l", 17L) .addStringField("s", "x") .build(); PartitionedFileSetArguments.setOutputPartitionKey(arguments, key); Assert.assertEquals(key, PartitionedFileSetArguments.getOutputPartitionKey(arguments, PARTITIONING)); }
private PartitionKey generateUniqueKey() { return PartitionKey.builder() .addIntField("i", counter++) .addLongField("l", 17L) .addStringField("s", UUID.randomUUID().toString()) .build(); } }
private PartitionKey generateUniqueKey() { return PartitionKey.builder() .addIntField("i", 1) .addLongField("l", 17L) .addStringField("s", UUID.randomUUID().toString()) .build(); } }
private PartitionKey generateUniqueKey() { return PartitionKey.builder() .addIntField("i", counter++) .addLongField("l", 17L) .addStringField("s", UUID.randomUUID().toString()) .build(); } }
private PartitionKey generateUniqueKey() { return PartitionKey.builder() .addIntField("i", counter++) .addLongField("l", 17L) .addStringField("s", UUID.randomUUID().toString()) .build(); }