@JsonCreator public TopicWithSchema(@JsonProperty("schema") String schema, @JsonProperty("name") String qualifiedName, @JsonProperty("description") String description, @JsonProperty("owner") OwnerId owner, @JsonProperty("retentionTime") RetentionTime retentionTime, @JsonProperty("jsonToAvroDryRun") boolean jsonToAvroDryRunEnabled, @JsonProperty("ack") Ack ack, @JsonProperty("trackingEnabled") boolean trackingEnabled, @JsonProperty("migratedFromJsonType") boolean migratedFromJsonType, @JsonProperty("schemaVersionAwareSerializationEnabled") boolean schemaVersionAwareSerializationEnabled, @JsonProperty("contentType") ContentType contentType, @JsonProperty("maxMessageSize") Integer maxMessageSize, @JsonProperty("auth") PublishingAuth publishingAuth, @JsonProperty("subscribingRestricted") boolean subscribingRestricted, @JsonProperty("offlineStorage") TopicDataOfflineStorage offlineStorage) { super(qualifiedName, description, owner, retentionTime, jsonToAvroDryRunEnabled, ack, trackingEnabled, migratedFromJsonType, schemaVersionAwareSerializationEnabled, contentType, maxMessageSize, publishingAuth, subscribingRestricted, offlineStorage); this.topic = convertToTopic(); this.schema = schema; }
public void updateTopic(TopicName topicName, PatchData patch, String modifiedBy) { groupService.checkGroupExists(topicName.getGroupName()); Topic retrieved = getTopicDetails(topicName); Topic modified = Patch.apply(retrieved, patch); topicValidator.ensureUpdatedTopicIsValid(modified, retrieved); if (!retrieved.equals(modified)) { Instant beforeMigrationInstant = clock.instant(); if (retrieved.getRetentionTime() != modified.getRetentionTime()) { multiDCAwareService.manageTopic(brokerTopicManagement -> brokerTopicManagement.updateTopic(modified) ); } topicRepository.updateTopic(modified); if (!retrieved.wasMigratedFromJsonType() && modified.wasMigratedFromJsonType()) { logger.info("Waiting until all subscriptions have consumers assigned during topic {} content type migration...", topicName.qualifiedName()); topicContentTypeMigrationService.waitUntilAllSubscriptionsHasConsumersAssigned(modified, Duration.ofSeconds(topicProperties.getSubscriptionsAssignmentsCompletedTimeoutSeconds())); logger.info("Notifying subscriptions' consumers about changes in topic {} content type...", topicName.qualifiedName()); topicContentTypeMigrationService.notifySubscriptions(modified, beforeMigrationInstant); } auditor.objectUpdated(modifiedBy, retrieved, modified); topicOwnerCache.onUpdatedTopic(retrieved, modified); } }
@Override public void createTopic(Topic topic) { Properties config = createTopicConfig(topic.getRetentionTime().getDuration(), topicProperties, topic); kafkaNamesMapper.toKafkaTopics(topic).forEach(k -> adminZkClient.createTopic( k.name().asString(), topicProperties.getPartitions(), topicProperties.getReplicationFactor(), config, kafka.admin.RackAwareMode.Enforced$.MODULE$ ) ); }
@Override public void updateTopic(Topic topic) { Properties config = createTopicConfig(topic.getRetentionTime().getDuration(), topicProperties, topic); KafkaTopics kafkaTopics = kafkaNamesMapper.toKafkaTopics(topic); if (isMigrationToNewKafkaTopic(kafkaTopics)) { adminZkClient.createTopic( kafkaTopics.getPrimary().name().asString(), topicProperties.getPartitions(), topicProperties.getReplicationFactor(), config, kafka.admin.RackAwareMode.Enforced$.MODULE$ ); } else { adminZkClient.changeTopicConfig(kafkaTopics.getPrimary().name().asString(), config); } kafkaTopics.getSecondary().ifPresent(secondary -> adminZkClient.changeTopicConfig(secondary.name().asString(), config) ); }