@Override public synchronized void onJobStart(SparkListenerJobStart jobStart) { int jobId = jobStart.jobId(); int size = jobStart.stageIds().size(); int[] intStageIds = new int[size]; for (int i = 0; i < size; i++) { Integer stageId = (Integer) jobStart.stageIds().apply(i); intStageIds[i] = stageId; stageIdToJobId.put(stageId, jobId); } jobIdToStageId.put(jobId, intStageIds); }
/** * Util method that checks that output schema contains only double types and returns list of field names. * * @param mdl Pipeline model. * @return List of field names. */ private List<String> checkAndGetInputSchema(PipelineModel mdl) { Transformer firstTransformer = mdl.transformers().head(); StructType inputSchema = firstTransformer.inputSchema(); List<StructField> input = new ArrayList<>(JavaConverters.seqAsJavaListConverter(inputSchema.fields()).asJava()); List<String> schema = new ArrayList<>(); for (StructField field : input) { String fieldName = field.name(); schema.add(field.name()); if (!ScalarType.Double().base().equals(field.dataType().base())) throw new IllegalArgumentException("Parser supports only double types [name=" + fieldName + ",type=" + field.dataType() + "]"); } return schema; } }
/** * Converts a Scala List to an Array. */ public static <T> T[] asArray(Class<T> clazz, scala.collection.Seq<T> scalaList) { T[] arr = (T[]) Array.newInstance(clazz, scalaList.length()); scalaList.copyToArray(arr); return arr; }
brokerSampleRetentionMs = Math.max(_minBrokerSampleStoreTopicRetentionTimeMs, brokerSampleRetentionMs); int numberOfBrokersInCluster = zkUtils.getAllBrokersInCluster().size(); if (numberOfBrokersInCluster <= 1) { throw new IllegalStateException(
public static String getBrokers(String zkUrl, SecurityProtocol securityProtocol) { ZkUtils zkUtils = getZkUtils(zkUrl); Seq<Broker> brokersSeq = zkUtils.getAllBrokersInCluster(); Broker[] brokers = new Broker[brokersSeq.size()]; brokersSeq.copyToArray(brokers); String brokersStr = Arrays.stream(brokers) .map(b -> b.brokerEndPoint( ListenerName.forSecurityProtocol(securityProtocol)).connectionString()) .reduce(null, (a, b) -> (a == null) ? b : a + "," + b); return brokersStr; }
for (Sentence s : x.getValue()) { Production p = (Production) s; if (p.items().size() == 3) { Terminal t = (Terminal) p.items().tail().head(); ul.separator = t.value(); ul.klabel = p.klabel().get(); ul.childSort = ((NonTerminal) p.items().head()).sort(); ul.pList = p; } else if (p.items().size() == 1 && p.items().head() instanceof Terminal) { ul.terminatorKLabel = p.klabel().get(); ul.pTerminator = p;
private void executeAndVerifyProposals(ZkUtils zkUtils, Collection<ExecutionProposal> proposalsToExecute, Collection<ExecutionProposal> proposalsToCheck) { KafkaCruiseControlConfig configs = new KafkaCruiseControlConfig(getExecutorProperties()); Executor executor = new Executor(configs, new SystemTime(), new MetricRegistry(), 86400000L, 43200000L); executor.setExecutionMode(false); executor.executeProposals(proposalsToExecute, Collections.emptySet(), null, EasyMock.mock(LoadMonitor.class), null, null, null); Map<TopicPartition, Integer> replicationFactors = new HashMap<>(); for (ExecutionProposal proposal : proposalsToCheck) { int replicationFactor = zkUtils.getReplicasForPartition(proposal.topic(), proposal.partitionId()).size(); replicationFactors.put(new TopicPartition(proposal.topic(), proposal.partitionId()), replicationFactor); } waitUntilExecutionFinishes(executor); for (ExecutionProposal proposal : proposalsToCheck) { TopicPartition tp = new TopicPartition(proposal.topic(), proposal.partitionId()); int expectedReplicationFactor = replicationFactors.get(tp); assertEquals("Replication factor for partition " + tp + " should be " + expectedReplicationFactor, expectedReplicationFactor, zkUtils.getReplicasForPartition(tp.topic(), tp.partition()).size()); if (proposal.hasReplicaAction()) { for (int brokerId : proposal.newReplicas()) { assertTrue("The partition should have moved for " + tp, zkUtils.getReplicasForPartition(tp.topic(), tp.partition()).contains(brokerId)); } } assertEquals("The leader should have moved for " + tp, proposal.newLeader(), zkUtils.getLeaderForPartition(tp.topic(), tp.partition()).get()); } }
public static void stopServer(KafkaServerStartable serverStartable) { serverStartable.shutdown(); FileUtils.deleteQuietly(new File(serverStartable.serverConfig().logDirs().apply(0))); }
/** * Returns the names of columns to be selected from the table. */ @SuppressWarnings("RedundantCast") public String[] selectedColumnNames() { ClassTag<String> classTag = getClassTag(String.class); return (String[]) rdd().selectedColumnNames().toArray(classTag); }
@Test public void testFrequentItems() { Dataset<Row> df = spark.table("testData2"); String[] cols = {"a"}; Dataset<Row> results = df.stat().freqItems(cols, 0.2); Assert.assertTrue(results.collectAsList().get(0).getSeq(0).contains(1)); }
@Override protected Seq<KoreanToken> perform(Seq<KoreanToken> tokens) { KoreanToken[] performed = new KoreanToken[tokens.length()]; int i = 0; Iterator<KoreanToken> tokenIterator = tokens.iterator(); while (tokenIterator.hasNext()) { KoreanToken token = tokenIterator.next(); performed[i++] = token.stem().nonEmpty() ? stem(token) : token; } return JavaConverters.asScalaBuffer(Arrays.asList(performed)).toSeq(); }
private static void checkCircularModuleImports(Module mainModule, scala.collection.Seq<Module> visitedModules) { if (visitedModules.contains(mainModule)) { String msg = "Found circularity in module imports: "; for (Module m : mutable(visitedModules)) { // JavaConversions.seqAsJavaList(visitedModules) msg += m.getName() + " < "; } msg += visitedModules.head().getName(); throw KEMException.compilerError(msg); } }
public static <V> List<V> scalaToJavaList(scala.collection.Seq<V> scalaList) { List<V> javaList = Lists.newArrayList(); scala.collection.Iterator<V> iterator = scalaList.iterator(); while (iterator.hasNext()) { javaList.add(iterator.next()); } return javaList; } }
@Override public void incrementMessagesDropped(Throwable cause) { if (cause instanceof FinagleSender.WrappedException) cause = cause.getCause(); Seq<Traversable<String>> paths = Throwables.mkString(cause).inits().toSeq(); for (Iterator<Traversable<String>> i = paths.iterator(); i.hasNext();) { messagesDropped.counter(i.next().toSeq()).incr(); } }
Production p = (Production) s; assert p.items().head() instanceof Terminal || p.items().head() instanceof RegexTerminal; assert p.items().last() instanceof Terminal || p.items().last() instanceof RegexTerminal; final ProductionItem body; if (cfgInfo.isLeafCell(p.sort())) { body = p.items().tail().head(); } else { body = NonTerminal(Sorts.Bag()); Seq<ProductionItem> pi = Seq(p.items().head(), optDots, body, optDots, p.items().last()); Production p1 = Production(p.klabel().get(), p.sort(), pi, p.att()); Production p2 = Production(Sorts.Cell(), Seq(NonTerminal(p.sort())));
@Override public void run(ResultIterator resultIterator) throws Exception { ScalaRootScope rootScope = ((ScalaParserResult) resultIterator.getParserResult()).rootScope(); if (rootScope == null) { return; } rootScope.visibleDfns(ElementKind.CLASS); scala.collection.Seq<AstDfn> tmpls = rootScope.visibleDfns(ElementKind.CLASS); if (!tmpls.isEmpty()) { scala.collection.Iterator itr = tmpls.iterator(); while (itr.hasNext()) { AstDfn tmpl = (AstDfn) itr.next(); if (classes[0].length() > 0) { classes[0] = classes[0] + " "; // NOI18N } classes[0] = classes[0] + tmpl.getName().toString().replace('.', '/') + "*.class"; // NOI18N } } } });
/** * @param zkUrl zookeeper connection url * @return number of brokers in this cluster */ public static int getBrokerCount(String zkUrl) { ZkUtils zkUtils = ZkUtils.apply(zkUrl, ZK_SESSION_TIMEOUT_MS, ZK_CONNECTION_TIMEOUT_MS, JaasUtils.isZkSecurityEnabled()); try { return zkUtils.getAllBrokersInCluster().size(); } finally { zkUtils.close(); } }
public static String getBrokers(String zkUrl, SecurityProtocol securityProtocol) { ZkUtils zkUtils = getZkUtils(zkUrl); Seq<Broker> brokersSeq = zkUtils.getAllBrokersInCluster(); Broker[] brokers = new Broker[brokersSeq.size()]; brokersSeq.copyToArray(brokers); String brokersStr = Arrays.stream(brokers) .map(b -> b.brokerEndPoint( ListenerName.forSecurityProtocol(securityProtocol)).connectionString()) .reduce(null, (a, b) -> (a == null) ? b : a + "," + b); return brokersStr; }