public static TaskName getTaskName(int partitionId) { return new TaskName("Partition " + partitionId); }
/** * Creates and returns a File pointing to the directory for the given store and task, given a particular base directory. * * @param storeBaseDir the base directory to use * @param storeName the store name to use * @param taskName the task name which is referencing the store * @return the partition directory for the store */ public static File getStorePartitionDir(File storeBaseDir, String storeName, TaskName taskName) { return new File(storeBaseDir, (storeName + File.separator + taskName.toString()).replace(' ', '_')); } }
/** * Write the taskName to partition mapping. * @param changelogEntries The entries that needs to be written to the coordinator stream, the map takes the taskName * and it's corresponding changelog partition. */ public void writePartitionMapping(Map<TaskName, Integer> changelogEntries) { LOG.debug("Updating changelog information with: "); for (Map.Entry<TaskName, Integer> entry : changelogEntries.entrySet()) { LOG.debug("TaskName: {} to Partition: {}", entry.getKey().getTaskName(), entry.getValue()); coordinatorStreamManager.send(new SetChangelogMapping(SOURCE, entry.getKey().getTaskName(), entry.getValue())); } }
private final static TaskName getStandbyTaskName(TaskName activeTaskName, int replicaNum) { return new TaskName(STANDBY_TASKNAME_PREFIX.concat(TASKNAME_SEPARATOR) .concat(activeTaskName.getTaskName()) .concat(TASKNAME_SEPARATOR) .concat(String.valueOf(replicaNum))); } }
/** * Two {@link KafkaCheckpointLogKey}s are equal iff their grouperFactory class, taskName and type are equal. */ @Override public int hashCode() { int result = grouperFactoryClassName.hashCode(); result = 31 * result + taskName.hashCode(); result = 31 * result + type.hashCode(); return result; }
/** * Two {@link KafkaCheckpointLogKey}s are equal iff their grouperFactory class, taskName and type are equal. */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } KafkaCheckpointLogKey that = (KafkaCheckpointLogKey) o; if (!grouperFactoryClassName.equals(that.grouperFactoryClassName)) { return false; } if (!taskName.equals(that.taskName)) { return false; } return type.equals(that.type); }
public int compareTo(TaskModel other) { return taskName.compareTo(other.getTaskName()); } }
/** * Two {@link KafkaCheckpointLogKey}s are equal iff their grouperFactory class, taskName and type are equal. */ @Override public int hashCode() { int result = grouperFactoryClassName.hashCode(); result = 31 * result + taskName.hashCode(); result = 31 * result + type.hashCode(); return result; }
/** * Two {@link KafkaCheckpointLogKey}s are equal iff their grouperFactory class, taskName and type are equal. */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } KafkaCheckpointLogKey that = (KafkaCheckpointLogKey) o; if (!grouperFactoryClassName.equals(that.grouperFactoryClassName)) { return false; } if (!taskName.equals(that.taskName)) { return false; } return type.equals(that.type); }
public int compareTo(TaskModel other) { return taskName.compareTo(other.getTaskName()); } }
@Override public KafkaCheckpointLogKey fromBytes(byte[] bytes) { try { LinkedHashMap<String, String> deserializedKey = mapper.readValue(bytes, LinkedHashMap.class); if (!KafkaCheckpointLogKey.CHECKPOINT_KEY_TYPE.equals(deserializedKey.get(TYPE_FIELD))) { throw new IllegalArgumentException(String.format("Invalid key detected. Type of the key is %s", deserializedKey.get(TYPE_FIELD))); } return new KafkaCheckpointLogKey(deserializedKey.get(TYPE_FIELD), new TaskName(deserializedKey.get(TASK_NAME_FIELD)), deserializedKey.get(SSP_GROUPER_FACTORY_FIELD) ); } catch (Exception e) { throw new SamzaException(String.format("Exception in de-serializing checkpoint bytes: %s", Arrays.toString(bytes)), e); } } }
@VisibleForTesting File getStoreLocation(String storeName) { return new File(storeBaseDir, (storeName + File.separator + taskName.toString()).replace(' ', '_')); }
/** * Write the taskName to partition mapping. * @param changelogEntries The entries that needs to be written to the coordinator stream, the map takes the taskName * and it's corresponding changelog partition. */ public void writePartitionMapping(Map<TaskName, Integer> changelogEntries) { LOG.debug("Updating changelog information with: "); for (Map.Entry<TaskName, Integer> entry : changelogEntries.entrySet()) { LOG.debug("TaskName: {} to Partition: {}", entry.getKey().getTaskName(), entry.getValue()); coordinatorStreamManager.send(new SetChangelogMapping(SOURCE, entry.getKey().getTaskName(), entry.getValue())); } }
/** * Two {@link KafkaCheckpointLogKey}s are equal iff their grouperFactory class, taskName and type are equal. */ @Override public int hashCode() { int result = grouperFactoryClassName.hashCode(); result = 31 * result + taskName.hashCode(); result = 31 * result + type.hashCode(); return result; }
/** * Two {@link KafkaCheckpointLogKey}s are equal iff their grouperFactory class, taskName and type are equal. */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } KafkaCheckpointLogKey that = (KafkaCheckpointLogKey) o; if (!grouperFactoryClassName.equals(that.grouperFactoryClassName)) { return false; } if (!taskName.equals(that.taskName)) { return false; } return type.equals(that.type); }
@Override public KafkaCheckpointLogKey fromBytes(byte[] bytes) { try { LinkedHashMap<String, String> deserializedKey = mapper.readValue(bytes, LinkedHashMap.class); if (!KafkaCheckpointLogKey.CHECKPOINT_KEY_TYPE.equals(deserializedKey.get(TYPE_FIELD))) { throw new IllegalArgumentException(String.format("Invalid key detected. Type of the key is %s", deserializedKey.get(TYPE_FIELD))); } return new KafkaCheckpointLogKey(deserializedKey.get(TYPE_FIELD), new TaskName(deserializedKey.get(TASK_NAME_FIELD)), deserializedKey.get(SSP_GROUPER_FACTORY_FIELD) ); } catch (Exception e) { throw new SamzaException(String.format("Exception in de-serializing checkpoint bytes: %s", Arrays.toString(bytes)), e); } } }
@VisibleForTesting File getStoreLocation(String storeName) { return new File(storeBaseDir, (storeName + File.separator + taskName.toString()).replace(' ', '_')); }
/** * Write the taskName to partition mapping. * @param changelogEntries The entries that needs to be written to the coordinator stream, the map takes the taskName * and it's corresponding changelog partition. */ public void writePartitionMapping(Map<TaskName, Integer> changelogEntries) { LOG.debug("Updating changelog information with: "); for (Map.Entry<TaskName, Integer> entry : changelogEntries.entrySet()) { LOG.debug("TaskName: {} to Partition: {}", entry.getKey().getTaskName(), entry.getValue()); coordinatorStreamManager.send(new SetChangelogMapping(SOURCE, entry.getKey().getTaskName(), entry.getValue())); } }
@Override public int hashCode() { int result = taskName.hashCode(); result = 31 * result + systemStreamPartitions.hashCode(); result = 31 * result + changelogPartition.hashCode(); return result; }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TaskModel taskModel = (TaskModel) o; if (!changelogPartition.equals(taskModel.changelogPartition)) { return false; } if (!systemStreamPartitions.equals(taskModel.systemStreamPartitions)) { return false; } if (!taskName.equals(taskModel.taskName)) { return false; } return true; }