private final static TaskName getStandbyTaskName(TaskName activeTaskName, int replicaNum) { return new TaskName(STANDBY_TASKNAME_PREFIX.concat(TASKNAME_SEPARATOR) .concat(activeTaskName.getTaskName()) .concat(TASKNAME_SEPARATOR) .concat(String.valueOf(replicaNum))); } }
public Map<TaskName, TaskMode> readTaskModes() { Map<TaskName, TaskMode> taskModeMap = new HashMap<>(); taskModeMappingMetadataStore.all().forEach((taskName, valueBytes) -> { String taskMode = taskModeSerde.fromBytes(valueBytes); if (taskMode != null) { taskModeMap.put(new TaskName(taskName), TaskMode.valueOf(taskMode)); } LOG.debug("Task mode assignment for task {}: {}", taskName, taskMode); }); return Collections.unmodifiableMap(new HashMap<>(taskModeMap)); }
public MockTaskSideInputStorageManagerBuilder(String taskName, String storeBaseDir) { this.taskName = new TaskName(taskName); this.storeBaseDir = storeBaseDir; initializeMocks(); }
@Override public TaskName deserialize(JsonParser jsonParser, DeserializationContext context) throws IOException, JsonProcessingException { ObjectCodec oc = jsonParser.getCodec(); JsonNode node = oc.readTree(jsonParser); return new TaskName(node.getTextValue()); } }
@Override public TaskName deserialize(JsonParser jsonParser, DeserializationContext context) throws IOException, JsonProcessingException { ObjectCodec oc = jsonParser.getCodec(); JsonNode node = oc.readTree(jsonParser); return new TaskName(node.getTextValue()); } }
@Override public TaskName deserialize(JsonParser jsonParser, DeserializationContext context) throws IOException, JsonProcessingException { ObjectCodec oc = jsonParser.getCodec(); JsonNode node = oc.readTree(jsonParser); return new TaskName(node.getTextValue()); } }
@Override public TaskName deserialize(JsonParser jsonParser, DeserializationContext context) throws IOException, JsonProcessingException { ObjectCodec oc = jsonParser.getCodec(); JsonNode node = oc.readTree(jsonParser); return new TaskName(node.getTextValue()); } }
private static Set<ContainerModel> getContainerMap() { Set<ContainerModel> retVal = new HashSet<>(); Map<TaskName, TaskModel> tasksForContainer0 = new HashMap<>(); tasksForContainer0.put(new TaskName("Partition 0"), getTaskModel(0)); tasksForContainer0.put(new TaskName("Partition 1"), getTaskModel(1)); retVal.add(new ContainerModel("0", tasksForContainer0)); Map<TaskName, TaskModel> tasksForContainer1 = new HashMap<>(); tasksForContainer1.put(new TaskName("Partition 2"), getTaskModel(2)); tasksForContainer1.put(new TaskName("Partition 3"), getTaskModel(3)); retVal.add(new ContainerModel("1", tasksForContainer1)); retVal.add(new ContainerModel("2", Collections.singletonMap(new TaskName("Partition 4"), getTaskModel(4)))); return retVal; }
@Test public void testBroadcastStreamsGroupedCorrectly() { Config config = new MapConfig(ImmutableMap.of("task.broadcast.inputs", "SystemA.StreamA#0, SystemA.StreamB#1")); GroupByPartition grouper = new GroupByPartition(config); Map<TaskName, Set<SystemStreamPartition>> result = grouper.group(ImmutableSet.of(aa0, aa1, aa2, ab1, ab2, ac0)); Map<TaskName, Set<SystemStreamPartition>> expectedResult = ImmutableMap.<TaskName, Set<SystemStreamPartition>>builder() .put(new TaskName("Partition 0"), ImmutableSet.of(aa0, ac0, ab1)) .put(new TaskName("Partition 1"), ImmutableSet.of(aa1, aa0, ab1)) .put(new TaskName("Partition 2"), ImmutableSet.of(aa2, aa0, ab2, ab1)) .build(); assertEquals(expectedResult, result); }
@Test public void testSerializeTaskModel() throws IOException { TaskModel taskModel = new TaskModel(new TaskName("Standby Partition 0"), new HashSet<>(), new Partition(0), TaskMode.Standby); String serializedString = this.samzaObjectMapper.writeValueAsString(taskModel); TaskModel deserializedTaskModel = this.samzaObjectMapper.readValue(serializedString, TaskModel.class); assertEquals(taskModel, deserializedTaskModel); String sampleSerializedString = "{\"task-name\":\"Partition 0\",\"system-stream-partitions\":[],\"changelog-partition\":0}"; deserializedTaskModel = this.samzaObjectMapper.readValue(sampleSerializedString, TaskModel.class); taskModel = new TaskModel(new TaskName("Partition 0"), new HashSet<>(), new Partition(0), TaskMode.Active); assertEquals(taskModel, deserializedTaskModel); }
@Test public void testBroadcastStreamGroupedCorrectly() { Config config = new MapConfig(ImmutableMap.of("task.broadcast.inputs", "SystemA.StreamA#0")); SystemStreamPartitionGrouper grouper = new GroupBySystemStreamPartition(config); Map<TaskName, Set<SystemStreamPartition>> result = grouper.group(ImmutableSet.of(aa0, aa1, aa2, ac0)); Map<TaskName, Set<SystemStreamPartition>> expectedResult = ImmutableMap.<TaskName, Set<SystemStreamPartition>>builder() .put(new TaskName(aa1.toString()), ImmutableSet.of(aa1, aa0)) .put(new TaskName(aa2.toString()), ImmutableSet.of(aa2, aa0)) .put(new TaskName(ac0.toString()), ImmutableSet.of(ac0, aa0)) .build(); assertEquals(expectedResult, result); }
@Test public void testBinaryCompatibility() { KafkaCheckpointLogKey logKey1 = new KafkaCheckpointLogKey(KafkaCheckpointLogKey.CHECKPOINT_KEY_TYPE, new TaskName("Partition 0"), GroupByPartitionFactory.class.getCanonicalName()); KafkaCheckpointLogKeySerde checkpointSerde = new KafkaCheckpointLogKeySerde(); byte[] bytes = ("{\"systemstreampartition-grouper-factory\"" + ":\"org.apache.samza.container.grouper.stream.GroupByPartitionFactory\",\"taskName\":\"Partition 0\"," + "\"type\":\"checkpoint\"}").getBytes(); // test that the checkpoints returned by the Serde are byte-wise identical to an actual checkpoint in Kafka Assert.assertEquals(true, Arrays.equals(bytes, checkpointSerde.toBytes(logKey1))); }
@Test public void testNoTaskOnlyContainsBroadcastStreams() { Config config = new MapConfig(ImmutableMap.of("task.broadcast.inputs", "SystemA.StreamA#0, SystemA.StreamB#1")); GroupByPartition grouper = new GroupByPartition(config); Map<TaskName, Set<SystemStreamPartition>> result = grouper.group(ImmutableSet.of(aa0, ab1, ab2)); Map<TaskName, Set<SystemStreamPartition>> expectedResult = ImmutableMap.<TaskName, Set<SystemStreamPartition>>builder() .put(new TaskName("Partition 2"), ImmutableSet.of(aa0, ab1, ab2)).build(); assertEquals(expectedResult, result); }
@Test public void testReadAfterWriteTaskLocality() { zkUtils.writeTaskLocality(new TaskName("task-1"), new LocationId("LocationId-1")); zkUtils.writeTaskLocality(new TaskName("task-2"), new LocationId("LocationId-2")); Map<TaskName, LocationId> taskLocality = ImmutableMap.of(new TaskName("task-1"), new LocationId("LocationId-1"), new TaskName("task-2"), new LocationId("LocationId-2")); Assert.assertEquals(taskLocality, zkUtils.readTaskLocality()); }
@Test public void testWriteTaskLocalityShouldUpdateTheExistingValue() { zkUtils.writeTaskLocality(new TaskName("task-1"), new LocationId("LocationId-1")); Map<TaskName, LocationId> taskLocality = ImmutableMap.of(new TaskName("task-1"), new LocationId("LocationId-1")); Assert.assertEquals(taskLocality, zkUtils.readTaskLocality()); zkUtils.writeTaskLocality(new TaskName("task-1"), new LocationId("LocationId-2")); taskLocality = ImmutableMap.of(new TaskName("task-1"), new LocationId("LocationId-2")); Assert.assertEquals(taskLocality, zkUtils.readTaskLocality()); }
@Test public void testSerde() { KafkaCheckpointLogKey key = new KafkaCheckpointLogKey(KafkaCheckpointLogKey.CHECKPOINT_KEY_TYPE, new TaskName("Partition 0"), GroupByPartitionFactory.class.getCanonicalName()); KafkaCheckpointLogKeySerde checkpointSerde = new KafkaCheckpointLogKeySerde(); // test that deserialize(serialize(k)) == k Assert.assertEquals(key, checkpointSerde.fromBytes(checkpointSerde.toBytes(key))); } }
@Before public void setup() { this.context = new MockContext(); // individual tests can override this config if necessary when(this.context.getJobContext().getConfig()).thenReturn(mock(Config.class)); TaskModel taskModel = mock(TaskModel.class); when(taskModel.getTaskName()).thenReturn(new TaskName("task 0")); when(this.context.getTaskContext().getTaskModel()).thenReturn(taskModel); when(this.context.getTaskContext().getTaskMetricsRegistry()).thenReturn(new MetricsRegistryMap()); when(this.context.getContainerContext().getContainerMetricsRegistry()).thenReturn(new MetricsRegistryMap()); }
private static TaskModel getTaskModel(int partitionNum) { return new TaskModel(new TaskName("Partition " + partitionNum), Collections.singleton(new SystemStreamPartition("test-system", "test-stream", new Partition(partitionNum))), new Partition(partitionNum)); } }
@Test public void testCreateCallback() { TaskCallbackImpl callback = callbackManager.createCallback(new TaskName("Partition 0"), null, null); assertTrue(callback.matchSeqNum(0)); callback = callbackManager.createCallback(new TaskName("Partition 0"), null, null); assertTrue(callback.matchSeqNum(1)); }
@Before public void setup() { Config config = new MapConfig(ImmutableMap.of("a", "b")); TaskName taskName = new TaskName("test"); Set<SystemStreamPartition> ssps = ImmutableSet.of(new SystemStreamPartition("foo", "bar", new Partition(1))); TaskModel taskModel = new TaskModel(taskName, ssps, new Partition(2)); Map<TaskName, TaskModel> tasks = ImmutableMap.of(taskName, taskModel); ContainerModel containerModel = new ContainerModel("1", tasks); Map<String, ContainerModel> containerMap = ImmutableMap.of("1", containerModel); this.jobModel = new JobModel(config, containerMap); this.samzaObjectMapper = SamzaObjectMapper.getObjectMapper(); }