/** * Util method that checks that input schema contains only one double type. * * @param mdl Pipeline model. * @return Name of output field. */ private String checkAndGetOutputSchema(PipelineModel mdl) { Transformer lastTransformer = mdl.transformers().last(); StructType outputSchema = lastTransformer.outputSchema(); List<StructField> output = new ArrayList<>(JavaConverters.seqAsJavaListConverter(outputSchema.fields()).asJava()); if (output.size() != 1) throw new IllegalArgumentException("Parser supports only scalar outputs"); return output.get(0).name(); }
/** * Generates the Kafka bootstrap connection string from the metadata stored in Zookeeper. * Allows for backwards compatibility of the zookeeperConnect configuration. */ private String lookupBootstrap(String zookeeperConnect, SecurityProtocol securityProtocol) { try (KafkaZkClient zkClient = KafkaZkClient.apply(zookeeperConnect, JaasUtils.isZkSecurityEnabled(), ZK_SESSION_TIMEOUT, ZK_CONNECTION_TIMEOUT, 10, Time.SYSTEM, "kafka.server", "SessionExpireListener")) { List<Broker> brokerList = JavaConverters.seqAsJavaListConverter(zkClient.getAllBrokersInCluster()).asJava(); List<BrokerEndPoint> endPoints = brokerList.stream() .map(broker -> broker.brokerEndPoint( ListenerName.forSecurityProtocol(securityProtocol)) ) .collect(Collectors.toList()); List<String> connections = new ArrayList<>(); for (BrokerEndPoint endPoint : endPoints) { connections.add(endPoint.connectionString()); } return StringUtils.join(connections, ','); } }
/** * Util method that checks that output schema contains only double types and returns list of field names. * * @param mdl Pipeline model. * @return List of field names. */ private List<String> checkAndGetInputSchema(PipelineModel mdl) { Transformer firstTransformer = mdl.transformers().head(); StructType inputSchema = firstTransformer.inputSchema(); List<StructField> input = new ArrayList<>(JavaConverters.seqAsJavaListConverter(inputSchema.fields()).asJava()); List<String> schema = new ArrayList<>(); for (StructField field : input) { String fieldName = field.name(); schema.add(field.name()); if (!ScalarType.Double().base().equals(field.dataType().base())) throw new IllegalArgumentException("Parser supports only double types [name=" + fieldName + ",type=" + field.dataType() + "]"); } return schema; } }
/** * Converts a Scala List to Java. */ public static <T> java.util.List<T> asJava(scala.collection.Seq<T> scalaList) { return scala.collection.JavaConverters.seqAsJavaListConverter(scalaList).asJava(); }
/** * Converts a Scala List to Java. * * @param scalaList the scala list. * @return the java list * @param <T> the return type. */ public static <T> java.util.List<T> asJava(scala.collection.Seq<T> scalaList) { return scala.collection.JavaConverters.seqAsJavaListConverter(scalaList).asJava(); }
/** * Converts a Scala List to Java. * * @param scalaList the scala list. * @return the java list * @param <T> the return type. */ public static <T> java.util.List<T> asJava(scala.collection.Seq<T> scalaList) { return scala.collection.JavaConverters.seqAsJavaListConverter(scalaList).asJava(); }
/** * Converts a Scala List to Java. * * @param scalaList the scala list. * @return the java list * @param <T> the return type. */ public static <T> java.util.List<T> asJava(scala.collection.Seq<T> scalaList) { return scala.collection.JavaConverters.seqAsJavaListConverter(scalaList).asJava(); }
/** * Converts a Scala List to Java. */ public static <T> java.util.List<T> asJava(scala.collection.Seq<T> scalaList) { return scala.collection.JavaConverters.seqAsJavaListConverter(scalaList).asJava(); }
private static java.util.Map<String, List<String>> asJava(scala.collection.Map<String, Seq<String>> scalaMap) { return ScalaStreamSupport.stream(scalaMap).collect(toMap(f -> f._1(), f -> seqAsJavaListConverter(f._2()).asJava())); }
private List<TopicAndPartition> getTopicPartitions(String t) { List<TopicAndPartition> tpList = new ArrayList<>(); List<String> l = Arrays.asList(t); java.util.Map<String, Seq<Object>> tpMap = JavaConverters.mapAsJavaMapConverter( zkUtils.getPartitionsForTopics( JavaConverters.asScalaIteratorConverter(l.iterator()).asScala().toSeq())) .asJava(); if (tpMap != null) { ArrayList<Object> partitionLists = new ArrayList<>(JavaConverters.seqAsJavaListConverter(tpMap.get(t)).asJava()); tpList = partitionLists.stream().map(p -> new TopicAndPartition(t, (Integer) p)).collect(toList()); } return tpList; }
private static java.util.Map<String, List<String>> asJava(scala.collection.Map<String, Seq<String>> scalaMap) { return ScalaStreamSupport.stream(scalaMap).collect(toMap(f -> f._1(), f -> seqAsJavaListConverter(f._2()).asJava())); }
public static Map<String, MetricsReporter> getMetricsReporters(MetricsConfig config, String containerName) { Map<String, MetricsReporter> metricsReporters = new HashMap<>(); for (String metricsReporterName : JavaConverters.seqAsJavaListConverter(config.getMetricReporterNames()).asJava()) { String metricsFactoryClassName = config.getMetricsFactoryClass(metricsReporterName).get(); if (metricsFactoryClassName == null) { throw new SamzaException(String.format("Metrics reporter %s missing .class config", metricsReporterName)); } MetricsReporterFactory metricsReporterFactory = Util.getObj(metricsFactoryClassName, MetricsReporterFactory.class); metricsReporters.put(metricsReporterName, metricsReporterFactory.getMetricsReporter(metricsReporterName, containerName, config)); } return metricsReporters; } }
public static Map<String, MetricsReporter> getMetricsReporters(MetricsConfig config, String containerName) { Map<String, MetricsReporter> metricsReporters = new HashMap<>(); for (String metricsReporterName : JavaConverters.seqAsJavaListConverter(config.getMetricReporterNames()).asJava()) { String metricsFactoryClassName = config.getMetricsFactoryClass(metricsReporterName).get(); if (metricsFactoryClassName == null) { throw new SamzaException(String.format("Metrics reporter %s missing .class config", metricsReporterName)); } MetricsReporterFactory metricsReporterFactory = Util.getObj(metricsFactoryClassName, MetricsReporterFactory.class); metricsReporters.put(metricsReporterName, metricsReporterFactory.getMetricsReporter(metricsReporterName, containerName, config)); } return metricsReporters; } }
public static Map<String, MetricsReporter> getMetricsReporters(MetricsConfig config, String containerName) { Map<String, MetricsReporter> metricsReporters = new HashMap<>(); for (String metricsReporterName : JavaConverters.seqAsJavaListConverter(config.getMetricReporterNames()).asJava()) { String metricsFactoryClassName = config.getMetricsFactoryClass(metricsReporterName).get(); if (metricsFactoryClassName == null) { throw new SamzaException(String.format("Metrics reporter %s missing .class config", metricsReporterName)); } MetricsReporterFactory metricsReporterFactory = Util.getObj(metricsFactoryClassName, MetricsReporterFactory.class); metricsReporters.put(metricsReporterName, metricsReporterFactory.getMetricsReporter(metricsReporterName, containerName, config)); } return metricsReporters; } }
public static Map<String, MetricsReporter> getMetricsReporters(MetricsConfig config, String containerName) { Map<String, MetricsReporter> metricsReporters = new HashMap<>(); for (String metricsReporterName : JavaConverters.seqAsJavaListConverter(config.getMetricReporterNames()).asJava()) { String metricsFactoryClassName = config.getMetricsFactoryClass(metricsReporterName).get(); if (metricsFactoryClassName == null) { throw new SamzaException(String.format("Metrics reporter %s missing .class config", metricsReporterName)); } MetricsReporterFactory metricsReporterFactory = Util.getObj(metricsFactoryClassName, MetricsReporterFactory.class); metricsReporters.put(metricsReporterName, metricsReporterFactory.getMetricsReporter(metricsReporterName, containerName, config)); } return metricsReporters; } }
private static List toList(Seq<?> val) { return val == null ? null : seqAsJavaListConverter(val).asJava(); }
Assert.assertArrayEquals( bean.getC().get("hello"), Ints.toArray(JavaConverters.seqAsJavaListConverter(outputBuffer).asJava())); Seq<String> d = first.getAs(3); Assert.assertEquals(bean.getD().size(), d.length());
Assert.assertArrayEquals( bean.getC().get("hello"), Ints.toArray(JavaConverters.seqAsJavaListConverter(outputBuffer).asJava())); Seq<String> d = first.getAs(3); Assert.assertEquals(bean.getD().size(), d.length());
Assert.assertArrayEquals( bean.getC().get("hello"), Ints.toArray(JavaConverters.seqAsJavaListConverter(outputBuffer).asJava())); Seq<String> d = first.getAs(3); Assert.assertEquals(bean.getD().size(), d.length());
@Override public void setUp() throws IOException { super.setUp(); int nPoints = 3; // The following coefficients and xMean/xVariance are computed from iris dataset with // lambda=0.2. // As a result, we are drawing samples from probability distribution of an actual model. double[] coefficients = { -0.57997, 0.912083, -0.371077, -0.819866, 2.688191, -0.16624, -0.84355, -0.048509, -0.301789, 4.170682}; double[] xMean = {5.843, 3.057, 3.758, 1.199}; double[] xVariance = {0.6856, 0.1899, 3.116, 0.581}; List<LabeledPoint> points = JavaConverters.seqAsJavaListConverter( generateMultinomialLogisticInput(coefficients, xMean, xVariance, true, nPoints, 42) ).asJava(); datasetRDD = jsc.parallelize(points, 2); dataset = spark.createDataFrame(datasetRDD, LabeledPoint.class); }