@Override public boolean equals(Object o) { if (o instanceof LazyDownConversionRecords) { LazyDownConversionRecords that = (LazyDownConversionRecords) o; return toMagic == that.toMagic && firstOffset == that.firstOffset && topicPartition.equals(that.topicPartition) && records.equals(that.records); } return false; }
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PartitionState<?> that = (PartitionState<?>) o; return topicPartition.equals(that.topicPartition) && value.equals(that.value); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } KafkaTridentSpoutTopicPartition that = (KafkaTridentSpoutTopicPartition) o; return topicPartition.equals(that.topicPartition); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } KafkaSpoutMessageId messageId = (KafkaSpoutMessageId) o; if (offset != messageId.offset) { return false; } return topicPart.equals(messageId.topicPart); }
@Override public boolean equals(Object other) { if (!(other instanceof ExecutionProposal)) { return false; } if (this == other) { return true; } ExecutionProposal otherProposal = (ExecutionProposal) other; return _tp.equals(otherProposal._tp) && _oldLeader == otherProposal._oldLeader && _oldReplicas.equals(otherProposal._oldReplicas) && _newReplicas.equals(otherProposal._newReplicas); }
@Override public boolean equals(Object other) { return other != null && other instanceof PartitionEntity && _tp.equals(((PartitionEntity) other).tp()); } }
@Override public int compareTo(CheckpointableWatermark o) { Preconditions.checkArgument(o instanceof KafkaWatermark); KafkaWatermark ko = (KafkaWatermark) o; Preconditions.checkArgument(_topicPartition.equals(ko._topicPartition)); return _lwm.compareTo(ko._lwm); }
/** * Add follower to the partition. * * @param follower Follower replica. * @param index the index the follower should be at. */ void addFollower(Replica follower, int index) { if (follower.isLeader()) { throw new IllegalArgumentException("Inconsistent leadership information. Trying to add follower replica " + follower + " while it is a leader."); } if (!follower.topicPartition().equals(_tp)) { throw new IllegalArgumentException("Inconsistent topic partition. Trying to add follower replica " + follower + " to partition " + _tp + "."); } // Add follower to the list of followers. _replicas.add(index, follower); }
@Test public void testMissingTopicBytesInMetric() throws UnknownVersionException { CruiseControlMetricsProcessor processor = new CruiseControlMetricsProcessor(); Set<CruiseControlMetric> metrics = getCruiseControlMetrics(); Set<RawMetricType> metricTypeToExclude = new HashSet<>(Arrays.asList(TOPIC_BYTES_IN, TOPIC_BYTES_OUT, TOPIC_REPLICATION_BYTES_IN, TOPIC_REPLICATION_BYTES_OUT)); for (CruiseControlMetric metric : metrics) { if (metricTypeToExclude.contains(metric.rawMetricType())) { TopicMetric tm = (TopicMetric) metric; if (tm.brokerId() == BROKER_ID_0 && tm.topic().equals(TOPIC1)) { continue; } } processor.addMetric(metric); } MetricSampler.Samples samples = processor.process(getCluster(), Arrays.asList(T1P0, T1P1, T2P0, T2P1), MetricSampler.SamplingMode.ALL); assertEquals(4, samples.partitionMetricSamples().size()); assertEquals(2, samples.brokerMetricSamples().size()); for (PartitionMetricSample sample : samples.partitionMetricSamples()) { if (sample.entity().tp().equals(T1P0)) { // T1P0 should not have any IO or CPU usage. validatePartitionMetricSample(sample, _time.milliseconds() + 2, 0.0, 0.0, 0.0, 100.0); } } }
if (sample.entity().tp().equals(T1P0)) { validatePartitionMetricSample(sample, _time.milliseconds() + 2, 1.27610208, 20.0, 80.0, 100.0); } else if (sample.entity().tp().equals(T1P1)) { validatePartitionMetricSample(sample, _time.milliseconds() + 2, 18.5758513, 500.0, 500.0, 300.0); } else if (sample.entity().tp().equals(T2P0)) { validatePartitionMetricSample(sample, _time.milliseconds() + 2, 20.0116009, 400.0, 650.0, 200.0); } else if (sample.entity().tp().equals(T2P1)) { validatePartitionMetricSample(sample, _time.milliseconds() + 2, 20.0116009, 400.0, 650.0, 500.0); } else {
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; KafkaTridentSpoutTopicPartition that = (KafkaTridentSpoutTopicPartition) o; return topicPartition != null ? topicPartition.equals(that.topicPartition) : that.topicPartition == null; }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } KafkaSpoutMessageId messageId = (KafkaSpoutMessageId) o; if (offset != messageId.offset) { return false; } return topicPart.equals(messageId.topicPart); }
partition.equals(tp), "checkpointed partition %s and assigned partition %s don't match", partition,
if (meta.getTopicPartition().equals(tp)) { kc.resumePartition(tp); } else {
if (meta.getTopicPartition().equals(tp)) { kc.resumePartition(tp); } else {
partition.equals(tp), "checkpointed partition %s and assigned partition %s don't match", partition,