static void addTags(ProcessorContext processorContext, SpanCustomizer result) { result.tag(KafkaStreamsTags.KAFKA_STREAMS_APPLICATION_ID_TAG, processorContext.applicationId()); result.tag(KafkaStreamsTags.KAFKA_STREAMS_TASK_ID_TAG, processorContext.taskId().toString()); }
@Override public DeserializationHandlerResponse handle( final ProcessorContext context, final ConsumerRecord<byte[], byte[]> record, final Exception exception ) { log.warn( "Exception caught during Deserialization, " + "taskId: {}, topic: {}, partition: {}, offset: {}", context.taskId(), record.topic(), record.partition(), record.offset(), exception ); StreamsErrorCollector.recordError(context.applicationId(), record.topic()); return DeserializationHandlerResponse.CONTINUE; }
@Override public TaskId taskId() { return delegate.taskId(); }
StoreChangeLogger(final String storeName, final ProcessorContext context, final StateSerdes<K, V> serialization) { this(storeName, context, context.taskId().partition, serialization); }
StoreChangeFlushingLogger(String storeName, ProcessorContext context, StateSerdes<K, V> serialization) { this(storeName, context, context.taskId().partition, serialization); }
@Override public DeserializationHandlerResponse handle(final ProcessorContext context, final ConsumerRecord<byte[], byte[]> record, final Exception exception) { log.error("Exception caught during Deserialization, " + "taskId: {}, topic: {}, partition: {}, offset: {}", context.taskId(), record.topic(), record.partition(), record.offset(), exception); return DeserializationHandlerResponse.FAIL; }
@Override public DeserializationHandlerResponse handle(final ProcessorContext context, final ConsumerRecord<byte[], byte[]> record, final Exception exception) { log.warn("Exception caught during Deserialization, " + "taskId: {}, topic: {}, partition: {}, offset: {}", context.taskId(), record.topic(), record.partition(), record.offset(), exception); return DeserializationHandlerResponse.CONTINUE; }
private StoreChangeFlushingLogger(String storeName, ProcessorContext context, int partition, StateSerdes<K, V> serialization) { this.topic = ProcessorStateManager.storeChangelogTopic(context.applicationId(), storeName); this.context = context; this.partition = partition; this.serialization = serialization; this.collector = ((RecordCollector.Supplier) context).recordCollector(); // Twitter Added this.taskId = context.taskId(); this.keySerializer = serialization.keySerializer(); this.valueSerializer = serialization.valueSerializer(); }
final String taskName = context.taskId().toString(); final Map<String, String> tagMap = metrics.tagMap("task-id", context.taskId().toString(), "processor-node-id", processorNodeName); final Map<String, String> allTagMap = metrics.tagMap("task-id", context.taskId().toString(), "processor-node-id", "all");
@SuppressWarnings("unchecked") private void initInternal(final ProcessorContext context) { this.context = (InternalProcessorContext) context; this.serdes = new StateSerdes<>(ProcessorStateManager.storeChangelogTopic(context.applicationId(), underlying.name()), keySerde == null ? (Serde<K>) context.keySerde() : keySerde, valueSerde == null ? (Serde<V>) context.valueSerde() : valueSerde); this.cache = this.context.getCache(); this.cacheName = ThreadCache.nameSpaceFromTaskIdAndStore(context.taskId().toString(), underlying.name()); cache.addDirtyEntryFlushListener(cacheName, new ThreadCache.DirtyEntryFlushListener() { @Override public void apply(final List<ThreadCache.DirtyEntry> entries) { for (final ThreadCache.DirtyEntry entry : entries) { putAndMaybeForward(entry, (InternalProcessorContext) context); } } }); }
@Override public void init(final ProcessorContext context, final StateStore root) { this.context = (InternalProcessorContext) context; final StreamsMetricsImpl metrics = this.context.metrics(); final String taskName = context.taskId().toString(); expiredRecordSensor = metrics.storeLevelSensor( taskName, name(), "expired-window-record-drop", Sensor.RecordingLevel.INFO ); addInvocationRateAndCount( expiredRecordSensor, "stream-" + metricScope + "-metrics", metrics.tagMap("task-id", taskName, metricScope + "-id", name()), "expired-window-record-drop" ); keySchema.init(ProcessorStateManager.storeChangelogTopic(context.applicationId(), root.name())); segments.openExisting(this.context); bulkLoadSegments = new HashSet<>(segments.allSegments()); // register and possibly restore the state from the logs context.register(root, new RocksDBSegmentsBatchingRestoreCallback()); open = true; }
@SuppressWarnings("unchecked") private void initInternal(final ProcessorContext context) { this.context = (InternalProcessorContext) context; final String topic = ProcessorStateManager.storeChangelogTopic(context.applicationId(), underlying.name()); serdes = new StateSerdes<>(topic, keySerde == null ? (Serde<K>) context.keySerde() : keySerde, valueSerde == null ? (Serde<V>) context.valueSerde() : valueSerde); bytesSerdes = new StateSerdes<>(topic, Serdes.Bytes(), Serdes.ByteArray()); name = context.taskId() + "-" + underlying.name(); cache = this.context.getCache(); cache.addDirtyEntryFlushListener(name, new ThreadCache.DirtyEntryFlushListener() { @Override public void apply(final List<ThreadCache.DirtyEntry> entries) { for (final ThreadCache.DirtyEntry entry : entries) { final byte[] binaryWindowKey = cacheFunction.key(entry.key()).get(); final long timestamp = WindowKeySchema.extractStoreTimestamp(binaryWindowKey); final Windowed<K> windowedKey = WindowKeySchema.fromStoreKey(binaryWindowKey, windowSize, serdes); final Bytes key = Bytes.wrap(WindowKeySchema.extractStoreKeyBytes(binaryWindowKey)); maybeForward(entry, key, windowedKey, (InternalProcessorContext) context); underlying.put(key, entry.newValue(), timestamp); } } }); }
@SuppressWarnings("unchecked") @Override public void init(final ProcessorContext context, final StateStore root) { this.context = context; this.serdes = new StateSerdes<>(ProcessorStateManager.storeChangelogTopic(context.applicationId(), name()), keySerde == null ? (Serde<K>) context.keySerde() : keySerde, valueSerde == null ? (Serde<V>) context.valueSerde() : valueSerde); this.metrics = (StreamsMetricsImpl) context.metrics(); taskName = context.taskId().toString(); final String metricsGroup = "stream-" + metricScope + "-metrics"; final Map<String, String> taskTags = metrics.tagMap("task-id", taskName, metricScope + "-id", "all"); final Map<String, String> storeTags = metrics.tagMap("task-id", taskName, metricScope + "-id", name()); putTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "put", metrics, metricsGroup, taskName, name(), taskTags, storeTags); fetchTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "fetch", metrics, metricsGroup, taskName, name(), taskTags, storeTags); flushTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "flush", metrics, metricsGroup, taskName, name(), taskTags, storeTags); final Sensor restoreTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "restore", metrics, metricsGroup, taskName, name(), taskTags, storeTags); // register and possibly restore the state from the logs final long startNs = time.nanoseconds(); try { inner.init(context, root); } finally { this.metrics.recordLatency( restoreTime, startNs, time.nanoseconds() ); } }
@SuppressWarnings("unchecked") @Override public void init(final ProcessorContext context, final StateStore root) { //noinspection unchecked this.serdes = new StateSerdes<>(ProcessorStateManager.storeChangelogTopic(context.applicationId(), name()), keySerde == null ? (Serde<K>) context.keySerde() : keySerde, valueSerde == null ? (Serde<V>) context.valueSerde() : valueSerde); this.metrics = (StreamsMetricsImpl) context.metrics(); taskName = context.taskId().toString(); final String metricsGroup = "stream-" + metricScope + "-metrics"; final Map<String, String> taskTags = metrics.tagMap("task-id", taskName, metricScope + "-id", "all"); final Map<String, String> storeTags = metrics.tagMap("task-id", taskName, metricScope + "-id", name()); putTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "put", metrics, metricsGroup, taskName, name(), taskTags, storeTags); fetchTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "fetch", metrics, metricsGroup, taskName, name(), taskTags, storeTags); flushTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "flush", metrics, metricsGroup, taskName, name(), taskTags, storeTags); removeTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "remove", metrics, metricsGroup, taskName, name(), taskTags, storeTags); final Sensor restoreTime = createTaskAndStoreLatencyAndThroughputSensors(DEBUG, "restore", metrics, metricsGroup, taskName, name(), taskTags, storeTags); // register and possibly restore the state from the logs final long startNs = time.nanoseconds(); try { inner.init(context, root); } finally { this.metrics.recordLatency( restoreTime, startNs, time.nanoseconds() ); } }
this.metrics = (StreamsMetricsImpl) context.metrics(); taskName = context.taskId().toString(); final String metricsGroup = "stream-" + metricScope + "-metrics"; final Map<String, String> taskTags = metrics.tagMap("task-id", taskName, metricScope + "-id", "all");