/** * Loads the connector's persistent offset (if present) via the given loader. */ @Override protected OffsetContext getPreviousOffset(OffsetContext.Loader loader) { Map<String, ?> partition = loader.getPartition(); Map<String, Object> previousOffset = context.offsetStorageReader() .offsets(Collections.singleton(partition)) .get(partition); if (previousOffset != null) { OffsetContext offsetContext = loader.load(previousOffset); LOGGER.info("Found previous offset {}", offsetContext); return offsetContext; } else { return null; } }
/** * Loads the connector's persistent offset (if present) via the given loader. */ protected OffsetContext getPreviousOffset(OffsetContext.Loader loader) { Map<String, ?> partition = loader.getPartition(); Map<String, Object> previousOffset = context.offsetStorageReader() .offsets(Collections.singleton(partition)) .get(partition); if (previousOffset != null) { OffsetContext offsetContext = loader.load(previousOffset); LOGGER.info("Found previous offset {}", offsetContext); return offsetContext; } else { return null; } } }
context.offsetStorageReader().offsets(partitions).forEach(source::setOffsetFor);
/** * Loads the current saved offsets. */ private void loadOffsets() { List<Map<String, String>> partitions = new ArrayList<>(); for (String db : databases) { Map<String, String> partition = Collections.singletonMap("mongodb", db); partitions.add(partition); } offsets.putAll(context.offsetStorageReader().offsets(partitions)); } }
private Map<Map<String, String>, Map<String, Object>> loadAndGetOffsets(OffsetStorageReader reader, String jobUrls) { String[] jobUrlArray = jobUrls.split(","); logger.debug("Total jobs: {}. Loading offsets from Connect.", jobUrlArray.length); Collection<Map<String, String>> partitions = new ArrayList<>(jobUrlArray.length); for (String jobUrl : jobUrlArray) { partitions.add(Collections.singletonMap(JenkinsSourceTask.JOB_NAME, urlDecode(extractJobName(jobUrl)))); } return reader.offsets(partitions); } }
/** * Loads the connector's persistent offset (if present) via the given loader. */ protected OffsetContext getPreviousOffset(OffsetContext.Loader loader) { Map<String, ?> partition = loader.getPartition(); Map<String, Object> previousOffset = context.offsetStorageReader() .offsets(Collections.singleton(partition)) .get(partition); if (previousOffset != null) { OffsetContext offsetContext = loader.load(previousOffset); LOGGER.info("Found previous offset {}", offsetContext); return offsetContext; } else { return null; } } }
partitionIds.stream().map(TopicPartitionSerDe::asMap).collect(Collectors.toList()); Map<Map<String, Object>, Map<String, Object>> offsets = context.offsetStorageReader().offsets(partitionMaps); if (offsets == null) { return;
leaderTopicPartition.toTopicPartitionString())) .collect(Collectors.toList()); Map<String, Long> topicPartitionStringsOffsets = context.offsetStorageReader().offsets(offsetLookupPartitions) .entrySet().stream() .filter(e -> e != null && e.getKey() != null && e.getKey().get(TOPIC_PARTITION_KEY) != null
break; offsets = context.offsetStorageReader().offsets(partitions);
private void mockConsumerInitialization() throws Exception { TopicPartition firstTopicPartition = new TopicPartition(FIRST_TOPIC, FIRST_PARTITION); Collection<TopicPartition> topicPartitions = new ArrayList<>(); topicPartitions.add(firstTopicPartition); Map<TopicPartition, Long> endOffsets = Collections.singletonMap(firstTopicPartition, FIRST_OFFSET); EasyMock.expect(context.offsetStorageReader()).andReturn(offsetStorageReader); EasyMock.expect(offsetStorageReader.offsets(EasyMock.<List<Map<String, String>>>anyObject())) .andReturn(new HashMap<>()); PowerMock.expectNew(KafkaConsumer.class, new Class[] { Properties.class }, config.getKafkaConsumerProperties()) .andReturn(consumer); EasyMock.expect(consumer.endOffsets(topicPartitions)).andReturn(endOffsets); consumer.assign(topicPartitions); EasyMock.expectLastCall(); consumer.seek(firstTopicPartition, FIRST_OFFSET); EasyMock.expectLastCall(); }
@Test public void testStartNoStoredPartitionsStartEnd() throws Exception { TopicPartition firstTopicPartition = new TopicPartition(FIRST_TOPIC, FIRST_PARTITION); Collection<TopicPartition> topicPartitions = new ArrayList<>(); topicPartitions.add(firstTopicPartition); Map<TopicPartition, Long> endOffsets = Collections.singletonMap(firstTopicPartition, FIRST_OFFSET); EasyMock.expect(context.offsetStorageReader()).andReturn(offsetStorageReader); EasyMock.expect(offsetStorageReader.offsets(EasyMock.<List<Map<String, String>>>anyObject())) .andReturn(new HashMap<>()); PowerMock.expectNew(KafkaConsumer.class, new Class[] { Properties.class }, config.getKafkaConsumerProperties()) .andReturn(consumer); EasyMock.expect(consumer.endOffsets(topicPartitions)).andReturn(endOffsets); consumer.assign(topicPartitions); EasyMock.expectLastCall(); consumer.seek(firstTopicPartition, FIRST_OFFSET); EasyMock.expectLastCall(); replayAll(); objectUnderTest.start(opts); verifyAll(); }
@Test public void testStartAllStoredPartitions() throws Exception { TopicPartition firstTopicPartition = new TopicPartition(FIRST_TOPIC, FIRST_PARTITION); Collection<TopicPartition> topicPartitions = new ArrayList<>(); topicPartitions.add(firstTopicPartition); Map<Map<String, String>, Map<String, Object>> storedOffsets = Collections.singletonMap( Collections.singletonMap(TOPIC_PARTITION_KEY, String.format("%s:%d", FIRST_TOPIC, FIRST_PARTITION)), Collections.singletonMap(OFFSET_KEY, FIRST_OFFSET)); EasyMock.expect(context.offsetStorageReader()).andReturn(offsetStorageReader); EasyMock.expect(offsetStorageReader.offsets(EasyMock.<List<Map<String, String>>>anyObject())) .andReturn(storedOffsets); PowerMock.expectNew(KafkaConsumer.class, new Class[] { Properties.class }, config.getKafkaConsumerProperties()) .andReturn(consumer); consumer.assign(topicPartitions); EasyMock.expectLastCall(); consumer.seek(firstTopicPartition, FIRST_OFFSET); EasyMock.expectLastCall(); replayAll(); objectUnderTest.start(opts); verifyAll(); }
@Test public void testStartNoStoredPartitionsStartBeginning() throws Exception { opts.put(KafkaSourceConnectorConfig.CONSUMER_AUTO_OFFSET_RESET_CONFIG, "earliest"); config = new KafkaSourceConnectorConfig(opts); props = new Properties(); props.putAll(config.allWithPrefix(KafkaSourceConnectorConfig.CONSUMER_PREFIX)); TopicPartition firstTopicPartition = new TopicPartition(FIRST_TOPIC, FIRST_PARTITION); Collection<TopicPartition> topicPartitions = new ArrayList<>(); topicPartitions.add(firstTopicPartition); Map<TopicPartition, Long> endOffsets = Collections.singletonMap(firstTopicPartition, FIRST_OFFSET); EasyMock.expect(context.offsetStorageReader()).andReturn(offsetStorageReader); EasyMock.expect(offsetStorageReader.offsets(EasyMock.<List<Map<String, String>>>anyObject())) .andReturn(new HashMap<>()); PowerMock.expectNew(KafkaConsumer.class, new Class[] { Properties.class }, config.getKafkaConsumerProperties()) .andReturn(consumer); EasyMock.expect(consumer.beginningOffsets(topicPartitions)).andReturn(endOffsets); consumer.assign(topicPartitions); EasyMock.expectLastCall(); consumer.seek(firstTopicPartition, FIRST_OFFSET); EasyMock.expectLastCall(); replayAll(); objectUnderTest.start(opts); verifyAll(); }
EasyMock.expect(offsetStorageReader.offsets(EasyMock.<List<Map<String, String>>>anyObject())) .andReturn(storedOffsets); PowerMock.expectNew(KafkaConsumer.class, new Class[] { Properties.class }, config.getKafkaConsumerProperties())
EasyMock.expect(offsetStorageReader.offsets(EasyMock.<List<Map<String, String>>>anyObject())) .andReturn(new HashMap<>()); PowerMock.expectNew(KafkaConsumer.class, new Class[] { Properties.class }, config.getKafkaConsumerProperties())