static void addTags(ProcessorContext processorContext, SpanCustomizer result) { result.tag(KafkaStreamsTags.KAFKA_STREAMS_APPLICATION_ID_TAG, processorContext.applicationId()); result.tag(KafkaStreamsTags.KAFKA_STREAMS_TASK_ID_TAG, processorContext.taskId().toString()); }
/** * Get or create the directory for the provided {@link TaskId}. * @return directory for the {@link TaskId} * @throws ProcessorStateException if the task directory does not exists and could not be created */ public File directoryForTask(final TaskId taskId) { final File taskDir = new File(stateDir, taskId.toString()); if (!taskDir.exists() && !taskDir.mkdir()) { throw new ProcessorStateException( String.format("task directory [%s] doesn't exist and couldn't be created", taskDir.getPath())); } return taskDir; }
private void updateThreadMetadata(final Map<TaskId, StreamTask> activeTasks, final Map<TaskId, StandbyTask> standbyTasks) { final Set<TaskMetadata> activeTasksMetadata = new HashSet<>(); for (final Map.Entry<TaskId, StreamTask> task : activeTasks.entrySet()) { activeTasksMetadata.add(new TaskMetadata(task.getKey().toString(), task.getValue().partitions())); } final Set<TaskMetadata> standbyTasksMetadata = new HashSet<>(); for (final Map.Entry<TaskId, StandbyTask> task : standbyTasks.entrySet()) { standbyTasksMetadata.add(new TaskMetadata(task.getKey().toString(), task.getValue().partitions())); } threadMetadata = new ThreadMetadata(this.getName(), this.state().name(), activeTasksMetadata, standbyTasksMetadata); }
final String taskName = context.taskId().toString(); final Map<String, String> tagMap = metrics.tagMap("task-id", context.taskId().toString(), "processor-node-id", processorNodeName); final Map<String, String> allTagMap = metrics.tagMap("task-id", context.taskId().toString(), "processor-node-id", "all");
public static Sensor lateRecordDropSensor(final InternalProcessorContext context) { final StreamsMetricsImpl metrics = context.metrics(); final Sensor sensor = metrics.nodeLevelSensor( context.taskId().toString(), context.currentNode().name(), "late-record-drop", Sensor.RecordingLevel.INFO ); StreamsMetricsImpl.addInvocationRateAndCount( sensor, "stream-processor-node-metrics", metrics.tagMap("task-id", context.taskId().toString(), "processor-node-id", context.currentNode().name()), "late-record-drop" ); return sensor; }
@SuppressWarnings("unchecked") private void initInternal(final ProcessorContext context) { this.context = (InternalProcessorContext) context; this.serdes = new StateSerdes<>(ProcessorStateManager.storeChangelogTopic(context.applicationId(), underlying.name()), keySerde == null ? (Serde<K>) context.keySerde() : keySerde, valueSerde == null ? (Serde<V>) context.valueSerde() : valueSerde); this.cache = this.context.getCache(); this.cacheName = ThreadCache.nameSpaceFromTaskIdAndStore(context.taskId().toString(), underlying.name()); cache.addDirtyEntryFlushListener(cacheName, new ThreadCache.DirtyEntryFlushListener() { @Override public void apply(final List<ThreadCache.DirtyEntry> entries) { for (final ThreadCache.DirtyEntry entry : entries) { putAndMaybeForward(entry, (InternalProcessorContext) context); } } }); }
@Override public void init(final ProcessorContext context, final StateStore root) { this.context = (InternalProcessorContext) context; final StreamsMetricsImpl metrics = this.context.metrics(); final String taskName = context.taskId().toString(); expiredRecordSensor = metrics.storeLevelSensor( taskName, name(), "expired-window-record-drop", Sensor.RecordingLevel.INFO ); addInvocationRateAndCount( expiredRecordSensor, "stream-" + metricScope + "-metrics", metrics.tagMap("task-id", taskName, metricScope + "-id", name()), "expired-window-record-drop" ); keySchema.init(ProcessorStateManager.storeChangelogTopic(context.applicationId(), root.name())); segments.openExisting(this.context); bulkLoadSegments = new HashSet<>(segments.allSegments()); // register and possibly restore the state from the logs context.register(root, new RocksDBSegmentsBatchingRestoreCallback()); open = true; }
id.toString(), logContext, productionExceptionHandler,
@SuppressWarnings("unchecked") @Override public void init(final ProcessorContext context, final StateStore root) { this.context = context; this.serdes = new StateSerdes<>(ProcessorStateManager.storeChangelogTopic(context.applicationId(), name()), keySerde == null ? (Serde<K>) context.keySerde() : keySerde, valueSerde == null ? (Serde<V>) context.valueSerde() : valueSerde); this.metrics = (StreamsMetricsImpl) context.metrics(); taskName = context.taskId().toString(); final String metricsGroup = "stream-" + metricScope + "-metrics"; final Map<String, String> taskTags = metrics.tagMap("task-id", taskName, metricScope + "-id", "all"); final Map<String, String> storeTags = metrics.tagMap("task-id", taskName, metricScope + "-id", name()); putTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "put", metrics, metricsGroup, taskName, name(), taskTags, storeTags); fetchTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "fetch", metrics, metricsGroup, taskName, name(), taskTags, storeTags); flushTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "flush", metrics, metricsGroup, taskName, name(), taskTags, storeTags); final Sensor restoreTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "restore", metrics, metricsGroup, taskName, name(), taskTags, storeTags); // register and possibly restore the state from the logs final long startNs = time.nanoseconds(); try { inner.init(context, root); } finally { this.metrics.recordLatency( restoreTime, startNs, time.nanoseconds() ); } }
@SuppressWarnings("unchecked") @Override public void init(final ProcessorContext context, final StateStore root) { //noinspection unchecked this.serdes = new StateSerdes<>(ProcessorStateManager.storeChangelogTopic(context.applicationId(), name()), keySerde == null ? (Serde<K>) context.keySerde() : keySerde, valueSerde == null ? (Serde<V>) context.valueSerde() : valueSerde); this.metrics = (StreamsMetricsImpl) context.metrics(); taskName = context.taskId().toString(); final String metricsGroup = "stream-" + metricScope + "-metrics"; final Map<String, String> taskTags = metrics.tagMap("task-id", taskName, metricScope + "-id", "all"); final Map<String, String> storeTags = metrics.tagMap("task-id", taskName, metricScope + "-id", name()); putTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "put", metrics, metricsGroup, taskName, name(), taskTags, storeTags); fetchTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "fetch", metrics, metricsGroup, taskName, name(), taskTags, storeTags); flushTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "flush", metrics, metricsGroup, taskName, name(), taskTags, storeTags); removeTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "remove", metrics, metricsGroup, taskName, name(), taskTags, storeTags); final Sensor restoreTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "restore", metrics, metricsGroup, taskName, name(), taskTags, storeTags); // register and possibly restore the state from the logs final long startNs = time.nanoseconds(); try { inner.init(context, root); } finally { this.metrics.recordLatency( restoreTime, startNs, time.nanoseconds() ); } }
this.metrics = (StreamsMetricsImpl) context.metrics(); taskName = context.taskId().toString(); final String metricsGroup = "stream-" + metricScope + "-metrics"; final Map<String, String> taskTags = metrics.tagMap("task-id", taskName, metricScope + "-id", "all");
TaskMetrics(final TaskId id, final StreamsMetricsImpl metrics) { taskName = id.toString(); this.metrics = metrics; final String group = "stream-task-metrics";