@Override public Object deserialize(JsonNode n, ObjectMapper mapper) { HashMap<String, Object> o = new HashMap<String, Object>(); try { logger.debug("using custom map deserializer"); Iterator<Map.Entry<String, JsonNode>> e = n.fields(); while (e.hasNext()) { Map.Entry<String, JsonNode> ee = e.next(); o.put(ee.getKey(), parent.deserialize(ee.getValue(), mapper)); } } catch (Exception e) { logger.error("exception deserializing Map {}", e.getMessage()); o = null; } if (o != null) return JavaConverters.mapAsScalaMapConverter(o).asScala().toMap(Predef.<Tuple2<String, Object>>conforms()); return null; }
@SuppressWarnings("unchecked") @Override public Object deserialize(JsonNode n, ObjectMapper mapper) { org.apache.commons.lang3.tuple.Pair<String, Object> deserializeObject = TableDisplayDeSerializer.getDeserializeObject(parent, n, mapper); String subtype = deserializeObject.getLeft(); if (subtype != null && subtype.equals(TableDisplay.DICTIONARY_SUBTYPE)) { return JavaConverters.mapAsScalaMapConverter((Map<String, Object>) deserializeObject.getRight()).asScala().toMap(Predef.<Tuple2<String, Object>>conforms()); } else if (subtype != null && subtype.equals(TableDisplay.LIST_OF_MAPS_SUBTYPE)) { List<Map<String, Object>> rows = (List<Map<String, Object>>) deserializeObject.getRight(); List<Object> oo = new ArrayList<Object>(); for (Map<String, Object> row : rows) { oo.add(JavaConverters.mapAsScalaMapConverter(row).asScala().toMap(Predef.<Tuple2<String, Object>>conforms())); } return scala.collection.JavaConversions.collectionAsScalaIterable(oo); } else if (subtype != null && subtype.equals(TableDisplay.MATRIX_SUBTYPE)) { List<List<?>> matrix = (List<List<?>>) deserializeObject.getRight(); ArrayList<Object> ll = new ArrayList<Object>(); for (List<?> ob : matrix) { ll.add(scala.collection.JavaConversions.asScalaBuffer(ob).toList()); } return scala.collection.JavaConversions.asScalaBuffer(ll).toList(); } return deserializeObject.getRight(); }
JavaConverters.asScalaIteratorConverter(newAssignedReplica.iterator()).asScala().toSeq());
/** * Converts the varargs to a scala buffer, * takes care of wrapping varargs into a intermediate list if necessary * * @param args the message arguments * @return scala type for message processing */ private static Seq<Object> convertArgsToScalaBuffer(final Object... args) { return scala.collection.JavaConverters.asScalaBufferConverter(wrapArgsToListIfNeeded(args)).asScala().toList(); }
/** * Converts the varargs to a scala buffer, * takes care of wrapping varargs into a intermediate list if necessary * * @param args the message arguments * @return scala type for message processing */ private static Buffer<Object> convertArgsToScalaBuffer(final Object... args) { return scala.collection.JavaConverters.asScalaBufferConverter(wrapArgsToListIfNeeded(args)).asScala(); }
/** * Converts a Java Collection to a Scala Seq. * * @param javaCollection the java collection * @param <A> the type of Seq element * @return the scala Seq. */ public static <A> scala.collection.immutable.Seq<A> asScala(Collection<A> javaCollection) { return scala.collection.JavaConverters.collectionAsScalaIterableConverter(javaCollection).asScala().toList(); }
/** * Converts a Java List to Scala Seq. */ public static <T> scala.collection.Seq<T> toSeq(java.util.List<T> list) { return scala.collection.JavaConverters.asScalaBufferConverter(list).asScala().toList(); }
/** * Converts a Java List to Scala Seq. * * @param list the java list. * @return the converted Seq. * @param <T> the element type. */ public static <T> scala.collection.Seq<T> toSeq(java.util.List<T> list) { return scala.collection.JavaConverters.asScalaBufferConverter(list).asScala(); }
/** * Converts a Java List to Scala Seq. * * @param list the java list. * @return the converted Seq. * @param <T> the element type. */ public static <T> scala.collection.Seq<T> toSeq(java.util.List<T> list) { return scala.collection.JavaConverters.asScalaBufferConverter(list).asScala(); }
@Override public final Seq<play.api.inject.Binding<?>> bindings(final play.api.Environment environment, final play.api.Configuration configuration) { List<play.api.inject.Binding<?>> list = bindings(environment.asJava(), configuration.underlying()).stream() .map(Binding::asScala) .collect(Collectors.toList()); return JavaConverters.collectionAsScalaIterableConverter(list).asScala().toList(); }
/** * Converts a Java Collection to a Scala Seq. */ public static <A> scala.collection.immutable.Seq<A> asScala(Collection<A> javaCollection) { return scala.collection.JavaConverters.collectionAsScalaIterableConverter(javaCollection).asScala().toList(); }
/** * Converts a Java Map to Scala. */ public static <A,B> scala.collection.immutable.Map<A,B> asScala(Map<A,B> javaMap) { return play.utils.Conversions.newMap( scala.collection.JavaConverters.mapAsScalaMapConverter(javaMap).asScala().toSeq() ); }
/** * Converts the varargs to a scala buffer, * takes care of wrapping varargs into a intermediate list if necessary * * @param args the message arguments * @return scala type for message processing */ private static Seq<Object> convertArgsToScalaBuffer(final Object... args) { return scala.collection.JavaConverters.asScalaBufferConverter(wrapArgsToListIfNeeded(args)).asScala().toList(); }
/** * Converts a Java List to Scala Seq. * * @param list the java list. * @return the converted Seq. * @param <T> the element type. */ public static <T> scala.collection.Seq<T> toSeq(java.util.List<T> list) { return scala.collection.JavaConverters.asScalaBufferConverter(list).asScala(); }
@Test public void combineByKey() { JavaRDD<Integer> originalRDD = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5, 6)); Function<Integer, Integer> keyFunction = v1 -> v1 % 3; Function<Integer, Integer> createCombinerFunction = v1 -> v1; Function2<Integer, Integer, Integer> mergeValueFunction = (v1, v2) -> v1 + v2; JavaPairRDD<Integer, Integer> combinedRDD = originalRDD.keyBy(keyFunction) .combineByKey(createCombinerFunction, mergeValueFunction, mergeValueFunction); Map<Integer, Integer> results = combinedRDD.collectAsMap(); ImmutableMap<Integer, Integer> expected = ImmutableMap.of(0, 9, 1, 5, 2, 7); assertEquals(expected, results); Partitioner defaultPartitioner = Partitioner.defaultPartitioner( combinedRDD.rdd(), JavaConverters.collectionAsScalaIterableConverter( Collections.<RDD<?>>emptyList()).asScala().toSeq()); combinedRDD = originalRDD.keyBy(keyFunction) .combineByKey( createCombinerFunction, mergeValueFunction, mergeValueFunction, defaultPartitioner, false, new KryoSerializer(new SparkConf())); results = combinedRDD.collectAsMap(); assertEquals(expected, results); }
@Test public void combineByKey() { JavaRDD<Integer> originalRDD = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5, 6)); Function<Integer, Integer> keyFunction = v1 -> v1 % 3; Function<Integer, Integer> createCombinerFunction = v1 -> v1; Function2<Integer, Integer, Integer> mergeValueFunction = (v1, v2) -> v1 + v2; JavaPairRDD<Integer, Integer> combinedRDD = originalRDD.keyBy(keyFunction) .combineByKey(createCombinerFunction, mergeValueFunction, mergeValueFunction); Map<Integer, Integer> results = combinedRDD.collectAsMap(); ImmutableMap<Integer, Integer> expected = ImmutableMap.of(0, 9, 1, 5, 2, 7); assertEquals(expected, results); Partitioner defaultPartitioner = Partitioner.defaultPartitioner( combinedRDD.rdd(), JavaConverters.collectionAsScalaIterableConverter( Collections.<RDD<?>>emptyList()).asScala().toSeq()); combinedRDD = originalRDD.keyBy(keyFunction) .combineByKey( createCombinerFunction, mergeValueFunction, mergeValueFunction, defaultPartitioner, false, new KryoSerializer(new SparkConf())); results = combinedRDD.collectAsMap(); assertEquals(expected, results); }
@Test public void combineByKey() { JavaRDD<Integer> originalRDD = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5, 6)); Function<Integer, Integer> keyFunction = v1 -> v1 % 3; Function<Integer, Integer> createCombinerFunction = v1 -> v1; Function2<Integer, Integer, Integer> mergeValueFunction = (v1, v2) -> v1 + v2; JavaPairRDD<Integer, Integer> combinedRDD = originalRDD.keyBy(keyFunction) .combineByKey(createCombinerFunction, mergeValueFunction, mergeValueFunction); Map<Integer, Integer> results = combinedRDD.collectAsMap(); ImmutableMap<Integer, Integer> expected = ImmutableMap.of(0, 9, 1, 5, 2, 7); assertEquals(expected, results); Partitioner defaultPartitioner = Partitioner.defaultPartitioner( combinedRDD.rdd(), JavaConverters.collectionAsScalaIterableConverter( Collections.<RDD<?>>emptyList()).asScala().toSeq()); combinedRDD = originalRDD.keyBy(keyFunction) .combineByKey( createCombinerFunction, mergeValueFunction, mergeValueFunction, defaultPartitioner, false, new KryoSerializer(new SparkConf())); results = combinedRDD.collectAsMap(); assertEquals(expected, results); }
/** * This convenience method should only be called in test code. */ @VisibleForTesting public void write(Iterator<Product2<K, V>> records) throws IOException { write(JavaConverters.asScalaIteratorConverter(records).asScala()); }
/** * This convenience method should only be called in test code. */ @VisibleForTesting public void write(Iterator<Product2<K, V>> records) throws IOException { write(JavaConverters.asScalaIteratorConverter(records).asScala()); }
/** * This convenience method should only be called in test code. */ @VisibleForTesting public void write(Iterator<Product2<K, V>> records) throws IOException { write(JavaConverters.asScalaIteratorConverter(records).asScala()); }