Map<Integer, String> rackByBroker = new HashMap<>(); for (BrokerMetadata bm : JavaConversions.seqAsJavaList(AdminUtils.getBrokerMetadatas(zkUtils, RackAwareMode.Enforced$.MODULE$, Option.empty()))) { String rack = bm.rack().isEmpty() ? String.valueOf(bm.id()) : bm.rack().get(); brokersByRack.putIfAbsent(rack, new ArrayList<>()); brokersByRack.get(rack).add(bm.id()); JavaConverters.asScalaIteratorConverter(newAssignedReplica.iterator()).asScala().toSeq());
public static Optional<File> compile(File source, File sourceDirectory, File generatedDirectory, String formatterType, Collection<String> additionalImports, List<String> constructorAnnotations, Codec codec, boolean inclusiveDot) { Seq<String> scalaAdditionalImports = JavaConverters$.MODULE$.asScalaBufferConverter(new ArrayList<String>(additionalImports)).asScala(); Seq<String> scalaConstructorAnnotations = JavaConverters$.MODULE$.asScalaBufferConverter(constructorAnnotations).asScala(); Option<File> option = play.twirl.compiler.TwirlCompiler.compile(source, sourceDirectory, generatedDirectory, formatterType, scalaAdditionalImports, scalaConstructorAnnotations, codec, inclusiveDot); return Optional.ofNullable(option.nonEmpty() ? option.get() : null); }
@Deprecated @SuppressWarnings("unchecked") public static void configure(Job job, Map<String, String> dataStoreParams, String featureTypeName, String filter, String[] transform) { Object m = JavaConverters.mapAsScalaMapConverter(dataStoreParams).asScala(); scala.collection.immutable.Map<String, String> scalaParams = ((scala.collection.mutable.Map<String, String>) m).toMap(Predef.<Tuple2<String, String>>conforms()); Option<String> f = Option.apply(filter); Option<String[]> t = Option.apply(transform); GeoMesaAccumuloInputFormat$.MODULE$.configure(job, scalaParams, featureTypeName, f, t); } }
TaskInstance createTaskInstance(AsyncStreamTask task, TaskName taskName, SystemStreamPartition ssp, OffsetManager manager, SystemConsumers consumers) { TaskModel taskModel = mock(TaskModel.class); when(taskModel.getTaskName()).thenReturn(taskName); TaskInstanceMetrics taskInstanceMetrics = new TaskInstanceMetrics("task", new MetricsRegistryMap()); scala.collection.immutable.Set<SystemStreamPartition> sspSet = JavaConverters.asScalaSetConverter(Collections.singleton(ssp)).asScala().toSet(); return new TaskInstance(task, taskModel, taskInstanceMetrics, null, consumers, mock(TaskInstanceCollector.class), manager, null, null, null, sspSet, new TaskInstanceExceptionHandler(taskInstanceMetrics, new scala.collection.immutable.HashSet<String>()), null, null, null, new scala.collection.immutable.HashSet<>(), null, mock(JobContext.class), mock(ContainerContext.class), Option.apply(null), Option.apply(null), Option.apply(null)); }
@Override public Object deserialize(JsonNode n, ObjectMapper mapper) { HashMap<String, Object> o = new HashMap<String, Object>(); try { logger.debug("using custom map deserializer"); Iterator<Map.Entry<String, JsonNode>> e = n.fields(); while (e.hasNext()) { Map.Entry<String, JsonNode> ee = e.next(); o.put(ee.getKey(), parent.deserialize(ee.getValue(), mapper)); } } catch (Exception e) { logger.error("exception deserializing Map {}", e.getMessage()); o = null; } if (o != null) return JavaConverters.mapAsScalaMapConverter(o).asScala().toMap(Predef.<Tuple2<String, Object>>conforms()); return null; }
Map<SystemStream, SystemStreamMetadata> metadata = JavaConverters.mapAsJavaMapConverter( streamMetadataCache.getStreamMetadata( JavaConverters.asScalaSetConverter(systemStreamToSsp.keySet()).asScala().toSet(), false)).asJava();
@Test public void combineByKey() { JavaRDD<Integer> originalRDD = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5, 6)); Function<Integer, Integer> keyFunction = v1 -> v1 % 3; Function<Integer, Integer> createCombinerFunction = v1 -> v1; Function2<Integer, Integer, Integer> mergeValueFunction = (v1, v2) -> v1 + v2; JavaPairRDD<Integer, Integer> combinedRDD = originalRDD.keyBy(keyFunction) .combineByKey(createCombinerFunction, mergeValueFunction, mergeValueFunction); Map<Integer, Integer> results = combinedRDD.collectAsMap(); ImmutableMap<Integer, Integer> expected = ImmutableMap.of(0, 9, 1, 5, 2, 7); assertEquals(expected, results); Partitioner defaultPartitioner = Partitioner.defaultPartitioner( combinedRDD.rdd(), JavaConverters.collectionAsScalaIterableConverter( Collections.<RDD<?>>emptyList()).asScala().toSeq()); combinedRDD = originalRDD.keyBy(keyFunction) .combineByKey( createCombinerFunction, mergeValueFunction, mergeValueFunction, defaultPartitioner, false, new KryoSerializer(new SparkConf())); results = combinedRDD.collectAsMap(); assertEquals(expected, results); }
/** * Converts a Java List to Scala Seq. * * @param list the java list. * @return the converted Seq. * @param <T> the element type. */ public static <T> scala.collection.Seq<T> toSeq(java.util.List<T> list) { return scala.collection.JavaConverters.asScalaBufferConverter(list).asScala(); }
private List<TopicAndPartition> getTopicPartitions(String t) { List<TopicAndPartition> tpList = new ArrayList<>(); List<String> l = Arrays.asList(t); java.util.Map<String, Seq<Object>> tpMap = JavaConverters.mapAsJavaMapConverter( zkUtils.getPartitionsForTopics( JavaConverters.asScalaIteratorConverter(l.iterator()).asScala().toSeq())) .asJava(); if (tpMap != null) { ArrayList<Object> partitionLists = new ArrayList<>(JavaConverters.seqAsJavaListConverter(tpMap.get(t)).asJava()); tpList = partitionLists.stream().map(p -> new TopicAndPartition(t, (Integer) p)).collect(toList()); } return tpList; }
public Map<TopicAndPartition, Integer> checkReassignStatus(String reassignStr) { Map<TopicAndPartition, Seq<Object>> partitionsToBeReassigned = JavaConverters.mapAsJavaMapConverter(zkUtils.parsePartitionReassignmentData(reassignStr)) .asJava(); JavaConverters.mapAsJavaMapConverter(zkUtils.getPartitionsBeingReassigned()) .asJava() .entrySet() pbr.getKey(), pbr.getValue(), JavaConverters.mapAsScalaMapConverter(partitionsToBeReassigned) .asScala(), JavaConverters.mapAsScalaMapConverter(partitionsBeingReassigned) .asScala())));
private static UnsafeProjection projection(Schema finalSchema, Schema readSchema) { StructType struct = convert(readSchema); List<AttributeReference> refs = seqAsJavaListConverter(struct.toAttributes()).asJava(); List<Attribute> attrs = Lists.newArrayListWithExpectedSize(struct.fields().length); List<org.apache.spark.sql.catalyst.expressions.Expression> exprs = Lists.newArrayListWithExpectedSize(struct.fields().length); for (AttributeReference ref : refs) { attrs.add(ref.toAttribute()); } for (Types.NestedField field : finalSchema.columns()) { int indexInReadSchema = struct.fieldIndex(field.name()); exprs.add(refs.get(indexInReadSchema)); } return UnsafeProjection.create( asScalaBufferConverter(exprs).asScala().toSeq(), asScalaBufferConverter(attrs).asScala().toSeq()); }
public static <T> Set<T> toScalaSet(@NonNull final java.util.Set<T> javaSet) { return JavaConverters.asScalaSetConverter(javaSet).asScala().<T>toSet(); }
/** * This convenience method should only be called in test code. */ @VisibleForTesting public void write(Iterator<Product2<K, V>> records) throws IOException { write(JavaConverters.asScalaIteratorConverter(records).asScala()); }
/** * Creates {@link SparkConf} with {@link org.apache.spark.serializer.KryoSerializer} along with * registering default/user-input serializable classes and user-input Avro Schemas. * Once {@link SparkContext} is created, we can no longer register serialization classes and Avro schemas. */ public SparkConf createSparkConf(@NonNull final SparkArgs sparkArgs) { /** * By custom registering classes the full class name of each object * is not stored during serialization which reduces storage space. */ final SparkConf sparkConf = new SparkConf(); sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); final List<Class> serializableClasses = getDefaultSerializableClasses(); serializableClasses.addAll(sparkArgs.getUserSerializationClasses()); sparkConf.registerKryoClasses(serializableClasses.toArray(new Class[0])); if (sparkArgs.getAvroSchemas().isPresent()) { sparkConf.registerAvroSchemas( JavaConverters .iterableAsScalaIterableConverter(sparkArgs.getAvroSchemas().get()) .asScala() .toSeq()); } // override spark properties final Map<String, String> sparkProps = sparkArgs.getOverrideSparkProperties(); for (Entry<String, String> entry : sparkProps.entrySet()) { log.info("Setting spark key:val {} : {}", entry.getKey(), entry.getValue()); sparkConf.set(entry.getKey(), entry.getValue()); } return sparkConf; }
/** * Convert a Java LinkedList to a Scala Iterable. * @param linkedList Java LinkedList to convert * @return Scala Iterable */ public static scala.collection.Iterable linkedListToScalaIterable(LinkedList<?> linkedList) { return JavaConverters.asScalaIterableConverter(linkedList).asScala(); }
@SuppressWarnings("unchecked") @Override public Object deserialize(JsonNode n, ObjectMapper mapper) { org.apache.commons.lang3.tuple.Pair<String, Object> deserializeObject = TableDisplayDeSerializer.getDeserializeObject(parent, n, mapper); String subtype = deserializeObject.getLeft(); if (subtype != null && subtype.equals(TableDisplay.DICTIONARY_SUBTYPE)) { return JavaConverters.mapAsScalaMapConverter((Map<String, Object>) deserializeObject.getRight()).asScala().toMap(Predef.<Tuple2<String, Object>>conforms()); } else if (subtype != null && subtype.equals(TableDisplay.LIST_OF_MAPS_SUBTYPE)) { List<Map<String, Object>> rows = (List<Map<String, Object>>) deserializeObject.getRight(); List<Object> oo = new ArrayList<Object>(); for (Map<String, Object> row : rows) { oo.add(JavaConverters.mapAsScalaMapConverter(row).asScala().toMap(Predef.<Tuple2<String, Object>>conforms())); } return scala.collection.JavaConversions.collectionAsScalaIterable(oo); } else if (subtype != null && subtype.equals(TableDisplay.MATRIX_SUBTYPE)) { List<List<?>> matrix = (List<List<?>>) deserializeObject.getRight(); ArrayList<Object> ll = new ArrayList<Object>(); for (List<?> ob : matrix) { ll.add(scala.collection.JavaConversions.asScalaBuffer(ob).toList()); } return scala.collection.JavaConversions.asScalaBuffer(ll).toList(); } return deserializeObject.getRight(); }
Map<SystemStream, SystemStreamMetadata> metadata = JavaConverters.mapAsJavaMapConverter( streamMetadataCache.getStreamMetadata( JavaConverters.asScalaSetConverter(systemStreamToSsp.keySet()).asScala().toSet(), false)).asJava();
@Test public void combineByKey() { JavaRDD<Integer> originalRDD = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5, 6)); Function<Integer, Integer> keyFunction = v1 -> v1 % 3; Function<Integer, Integer> createCombinerFunction = v1 -> v1; Function2<Integer, Integer, Integer> mergeValueFunction = (v1, v2) -> v1 + v2; JavaPairRDD<Integer, Integer> combinedRDD = originalRDD.keyBy(keyFunction) .combineByKey(createCombinerFunction, mergeValueFunction, mergeValueFunction); Map<Integer, Integer> results = combinedRDD.collectAsMap(); ImmutableMap<Integer, Integer> expected = ImmutableMap.of(0, 9, 1, 5, 2, 7); assertEquals(expected, results); Partitioner defaultPartitioner = Partitioner.defaultPartitioner( combinedRDD.rdd(), JavaConverters.collectionAsScalaIterableConverter( Collections.<RDD<?>>emptyList()).asScala().toSeq()); combinedRDD = originalRDD.keyBy(keyFunction) .combineByKey( createCombinerFunction, mergeValueFunction, mergeValueFunction, defaultPartitioner, false, new KryoSerializer(new SparkConf())); results = combinedRDD.collectAsMap(); assertEquals(expected, results); }
/** * Converts a Java List to Scala Seq. * * @param list the java list. * @return the converted Seq. * @param <T> the element type. */ public static <T> scala.collection.Seq<T> toSeq(java.util.List<T> list) { return scala.collection.JavaConverters.asScalaBufferConverter(list).asScala(); }
@Override public Void run(SentryGenericServiceClient client) throws Exception { for (String role : roles) { final Set<TSentryPrivilege> rolePrivileges = client.listPrivilegesByRoleName( requestorName, role, COMPONENT_NAME, instanceName); final scala.collection.immutable.Set<TSentryPrivilege> rolePrivilegesScala = scala.collection.JavaConverters.asScalaSetConverter(rolePrivileges).asScala().toSet(); rolePrivilegesMap.put(role, rolePrivilegesScala); } return null; } });