private static void reassignPartitions(KafkaZkClient zkClient, Collection<Broker> brokers, String topic, int partitionCount, int replicationFactor) { scala.collection.mutable.ArrayBuffer<BrokerMetadata> brokersMetadata = new scala.collection.mutable.ArrayBuffer<>(brokers.size()); for (Broker broker : brokers) { brokersMetadata.$plus$eq(new BrokerMetadata(broker.id(), broker.rack())); } scala.collection.Map<Object, Seq<Object>> assignedReplicas = AdminUtils.assignReplicasToBrokers(brokersMetadata, partitionCount, replicationFactor, 0, 0); scala.collection.immutable.Map<TopicPartition, Seq<Object>> newAssignment = new scala.collection.immutable.HashMap<>(); scala.collection.Iterator<scala.Tuple2<Object, scala.collection.Seq<Object>>> it = assignedReplicas.iterator(); while (it.hasNext()) { scala.Tuple2<Object, scala.collection.Seq<Object>> scalaTuple = it.next(); TopicPartition tp = new TopicPartition(topic, (Integer) scalaTuple._1); newAssignment = newAssignment.$plus(new scala.Tuple2<>(tp, scalaTuple._2)); } scala.collection.immutable.Set<String> topicList = new scala.collection.immutable.Set.Set1<>(topic); scala.collection.Map<Object, scala.collection.Seq<Object>> currentAssignment = zkClient.getPartitionAssignmentForTopics(topicList).apply(topic); String currentAssignmentJson = formatAsReassignmentJson(topic, currentAssignment); String newAssignmentJson = formatAsReassignmentJson(topic, assignedReplicas); LOG.info("Reassign partitions for topic " + topic); LOG.info("Current partition replica assignment " + currentAssignmentJson); LOG.info("New partition replica assignment " + newAssignmentJson); zkClient.createPartitionReassignment(newAssignment); }
private static List<PartitionInfo> getPartitionInfo(KafkaZkClient zkClient, String topic) { scala.collection.immutable.Set<String> topicList = new scala.collection.immutable.Set.Set1<>(topic); scala.collection.Map<Object, scala.collection.Seq<Object>> partitionAssignments = zkClient.getPartitionAssignmentForTopics(topicList).apply(topic); List<PartitionInfo> partitionInfoList = new ArrayList<>(); scala.collection.Iterator<scala.Tuple2<Object, scala.collection.Seq<Object>>> it = partitionAssignments.iterator(); while (it.hasNext()) { scala.Tuple2<Object, scala.collection.Seq<Object>> scalaTuple = it.next(); Integer partition = (Integer) scalaTuple._1(); scala.Option<Object> leaderOption = zkClient.getLeaderForPartition(new TopicPartition(topic, partition)); Node leader = leaderOption.isEmpty() ? null : new Node((Integer) leaderOption.get(), "", -1); Node[] replicas = new Node[scalaTuple._2().size()]; for (int i = 0; i < replicas.length; i++) { Integer brokerId = (Integer) scalaTuple._2().apply(i); replicas[i] = new Node(brokerId, "", -1); } partitionInfoList.add(new PartitionInfo(topic, partition, leader, replicas, null)); } return partitionInfoList; }
/** * Generates a Stream that traverses the key-value pairs of a scala.collection.Map. * <p> * Only sequential operations will be efficient. * For efficient parallel operation, use the streamAccumulated method instead, but * note that this creates a new collection containing the Map's key-value pairs. * * @param coll The Map to traverse * @return A Stream view of the collection which, by default, executes sequentially. */ public static <K,V> Stream< scala.Tuple2<K, V> > stream(scala.collection.Map<K, V> coll) { return StreamSupport.stream(new StepsAnyIterator< scala.Tuple2<K, V> >(coll.iterator()), false); }
/** * Generates a Stream that traverses the key-value pairs of a scala.collection.Map. * <p> * Only sequential operations will be efficient. * For efficient parallel operation, use the streamAccumulated method instead, but * note that this creates a new collection containing the Map's key-value pairs. * * @param coll The Map to traverse * @return A Stream view of the collection which, by default, executes sequentially. */ public static <K,V> Stream< scala.Tuple2<K, V> > stream(scala.collection.Map<K, V> coll) { return StreamSupport.stream(new StepsAnyIterator< scala.Tuple2<K, V> >(coll.iterator()), false); }
/** * Generates a Stream that traverses the key-value pairs of a scala.collection.Map. * <p> * Only sequential operations will be efficient. * For efficient parallel operation, use the streamAccumulated method instead, but * note that this creates a new collection containing the Map's key-value pairs. * * @param coll The Map to traverse * @return A Stream view of the collection which, by default, executes sequentially. */ public static <K,V> Stream< scala.Tuple2<K, V> > stream(scala.collection.Map<K, V> coll) { return StreamSupport.stream(new StepsAnyIterator< scala.Tuple2<K, V> >(coll.iterator()), false); }
private void fillMetricsBuffer(StatsSummary summary, int epochSecs) { buffer.reset(); OpenTsdbClient.MetricsBuffer buf = buffer; Map<String, Long> counters = (Map<String, Long>) (Map<String, ?>) summary.counters(); Iterator<Tuple2<String, Long>> countersIter = counters.iterator(); while (countersIter.hasNext()) { Tuple2<String, Long> tuple = countersIter.next(); converter.convertCounter(tuple._1(), epochSecs, tuple._2(), buf); } Map<String, Double> gauges = (Map<String, Double>) (Map<String, ?>) summary.gauges(); Iterator<Tuple2<String, Double>> gaugesIter = gauges.iterator(); while (gaugesIter.hasNext()) { Tuple2<String, Double> tuple = gaugesIter.next(); converter.convertGauge(tuple._1(), epochSecs, (float) tuple._2().doubleValue(), buf); } Map<String, Distribution> metrics = summary.metrics(); Iterator<Tuple2<String, Distribution>> metricsIter = metrics.iterator(); while (metricsIter.hasNext()) { Tuple2<String, Distribution> tuple = metricsIter.next(); converter.convertMetric(tuple._1(), epochSecs, tuple._2(), buf); } }
private void fillMetricsBuffer(StatsSummary summary, int epochSecs) { buffer.reset(); OpenTsdbClient.MetricsBuffer buf = buffer; Map<String, Long> counters = (Map<String, Long>) (Map<String, ?>) summary.counters(); Iterator<Tuple2<String, Long>> countersIter = counters.iterator(); while (countersIter.hasNext()) { Tuple2<String, Long> tuple = countersIter.next(); converter.convertCounter(tuple._1(), epochSecs, tuple._2(), buf); } Map<String, Double> gauges = (Map<String, Double>) (Map<String, ?>) summary.gauges(); Iterator<Tuple2<String, Double>> gaugesIter = gauges.iterator(); while (gaugesIter.hasNext()) { Tuple2<String, Double> tuple = gaugesIter.next(); converter.convertGauge(tuple._1(), epochSecs, (float) tuple._2().doubleValue(), buf); } Map<String, Distribution> metrics = summary.metrics(); Iterator<Tuple2<String, Distribution>> metricsIter = metrics.iterator(); while (metricsIter.hasNext()) { Tuple2<String, Distribution> tuple = metricsIter.next(); converter.convertMetric(tuple._1(), epochSecs, tuple._2(), buf); } }