Refine search
private static Seq<String> gaugeName(String name) { return scala.collection.JavaConversions.asScalaBuffer(Arrays.asList(name)).toList(); }
Map<Integer, String> rackByBroker = new HashMap<>(); for (BrokerMetadata bm : JavaConversions.seqAsJavaList(AdminUtils.getBrokerMetadatas(zkUtils, RackAwareMode.Enforced$.MODULE$, Option.empty()))) {
@Override public boolean canBeUsed(Object obj, boolean expand) { if (!expand) return false; if (!(obj instanceof scala.collection.immutable.Seq<?>)) return false; Collection<?> col = scala.collection.JavaConversions.asJavaCollection((Iterable<?>) obj); if (col.isEmpty()) return false; for (Object o : col) { if (!(o instanceof scala.collection.immutable.Seq)) return false; Collection<?> col2 = scala.collection.JavaConversions.asJavaCollection((Iterable<?>) o); for (Object o2 : col2) { if (!parent.isPrimitiveType(o2.getClass().getName())) return false; } } return true; }
@Override public boolean writeObject(Object obj, JsonGenerator jgen, boolean expand) throws JsonProcessingException, IOException { logger.debug("list of maps"); // convert this 'on the fly' to a datatable Collection<?> col = scala.collection.JavaConversions.asJavaCollection((Iterable<?>) obj); List<Map<String, Object>> tab = new ArrayList<Map<String, Object>>(); for (Object o : col) { Map<String, Object> row = scala.collection.JavaConversions.mapAsJavaMap((scala.collection.Map<String, Object>) o); tab.add(row); } TableDisplay t = new TableDisplay(tab, parent); jgen.writeObject(t); return true; } }
@SuppressWarnings("unchecked") @Override public Object deserialize(JsonNode n, ObjectMapper mapper) { org.apache.commons.lang3.tuple.Pair<String, Object> deserializeObject = TableDisplayDeSerializer.getDeserializeObject(parent, n, mapper); String subtype = deserializeObject.getLeft(); if (subtype != null && subtype.equals(TableDisplay.DICTIONARY_SUBTYPE)) { return JavaConverters.mapAsScalaMapConverter((Map<String, Object>) deserializeObject.getRight()).asScala().toMap(Predef.<Tuple2<String, Object>>conforms()); } else if (subtype != null && subtype.equals(TableDisplay.LIST_OF_MAPS_SUBTYPE)) { List<Map<String, Object>> rows = (List<Map<String, Object>>) deserializeObject.getRight(); List<Object> oo = new ArrayList<Object>(); for (Map<String, Object> row : rows) { oo.add(JavaConverters.mapAsScalaMapConverter(row).asScala().toMap(Predef.<Tuple2<String, Object>>conforms())); } return scala.collection.JavaConversions.collectionAsScalaIterable(oo); } else if (subtype != null && subtype.equals(TableDisplay.MATRIX_SUBTYPE)) { List<List<?>> matrix = (List<List<?>>) deserializeObject.getRight(); ArrayList<Object> ll = new ArrayList<Object>(); for (List<?> ob : matrix) { ll.add(scala.collection.JavaConversions.asScalaBuffer(ob).toList()); } return scala.collection.JavaConversions.asScalaBuffer(ll).toList(); } return deserializeObject.getRight(); }
.fetchTopicMetadataFromZk(JavaConversions.asScalaSet(Collections.singleton(topicName)), zkClient); if (topicMetadatas != null && topicMetadatas.size() > 0) { return JavaConversions.asJavaSet(topicMetadatas).iterator().next().partitionsMetadata().size(); } else { throw new IllegalStateException("Failed to get metadata for topic " + topicName);
public void pushToStream(String message) { int streamNo = (int) this.nextStream.incrementAndGet() % this.queues.size(); AtomicLong offset = this.offsets.get(streamNo); BlockingQueue<FetchedDataChunk> queue = this.queues.get(streamNo); AtomicLong thisOffset = new AtomicLong(offset.incrementAndGet()); List<Message> seq = Lists.newArrayList(); seq.add(new Message(message.getBytes(Charsets.UTF_8))); ByteBufferMessageSet messageSet = new ByteBufferMessageSet(NoCompressionCodec$.MODULE$, offset, JavaConversions.asScalaBuffer(seq)); FetchedDataChunk chunk = new FetchedDataChunk(messageSet, new PartitionTopicInfo("topic", streamNo, queue, thisOffset, thisOffset, new AtomicInteger(1), "clientId"), thisOffset.get()); queue.add(chunk); }
return JavaConversions.asScalaBuffer(result).toSeq(); return JavaConversions.mapAsScalaMap(result);
private DirectedGraph<String> getDependencies(uk.gov.dstl.baleen.types.language.Sentence key) { List<WordToken> tokens = ImmutableList.copyOf(indexWords.get(key)); Set<Object> roots = new HashSet<>(); List<Edge<String>> edges = indexDependency .get(key) .stream() .peek( d -> { if (MaltParser.ROOT.equals(d.getDependencyType())) { roots.add(tokens.indexOf(d.getGovernor())); } }) .map( d -> { int source = tokens.indexOf(d.getGovernor()); int destination = tokens.indexOf(d.getDependent()); return new Edge<>(source, destination, d.getDependencyType().toLowerCase()); }) .collect(toList()); return new DirectedGraph<>( JavaConversions.asScalaBuffer(edges).toList(), JavaConversions.asScalaSet(roots).toSet()); } }
private long flushMessages(List<Message> messages, long payloadSize) { if (messages.isEmpty()) { LOG.debug("No messages to flush, not trying to write an empty message set."); return -1L; } final ByteBufferMessageSet messageSet = new ByteBufferMessageSet(JavaConversions.asScalaBuffer(messages).toSeq()); if (LOG.isDebugEnabled()) { LOG.debug("Trying to write ByteBufferMessageSet with size of {} bytes to journal", messageSet.sizeInBytes()); } final LogAppendInfo appendInfo = kafkaLog.append(messageSet, true); long lastWriteOffset = appendInfo.lastOffset(); if (LOG.isDebugEnabled()) { LOG.debug("Wrote {} messages to journal: {} bytes (payload {} bytes), log position {} to {}", messages.size(), messageSet.sizeInBytes(), payloadSize, appendInfo.firstOffset(), lastWriteOffset); } writtenMessages.mark(messages.size()); return lastWriteOffset; }
public void getDependents(String MMTUri) { ToSubject dependsOn = new ToSubject(DependsOn$.MODULE$); ToSubject hasMeta = new ToSubject(HasMeta$.MODULE$); ToSubject includes = new ToSubject(Includes$.MODULE$); RelationExp reflexive = Reflexive$.MODULE$; List<RelationExp> rels = Arrays.asList(new RelationExp[]{dependsOn, hasMeta, includes, reflexive}); Transitive transitive = new Transitive(new Choice(JavaConversions.asScalaBuffer(rels))); Path startURI = Path.parse(MMTUri); for (Path p : JavaConversions.asJavaCollection(controller.depstore().queryList(startURI, transitive))) { System.out.println(p); } }
@Override public boolean writeObject(Object obj, JsonGenerator jgen, boolean expand) throws JsonProcessingException, IOException { logger.debug("primitive type map"); List<String> columns = new ArrayList<String>(); columns.add("Key"); columns.add("Value"); List<List<?>> values = new ArrayList<List<?>>(); Map<?, ?> m = scala.collection.JavaConversions.mapAsJavaMap((scala.collection.Map<?, ?>) obj); Set<?> keys = m.keySet(); for (Object key : keys) { Object val = m.get(key); List<Object> l = new ArrayList<Object>(); l.add(key.toString()); l.add(val); values.add(l); } jgen.writeStartObject(); jgen.writeObjectField("type", "TableDisplay"); jgen.writeObjectField("columnNames", columns); jgen.writeObjectField("values", values); jgen.writeObjectField("subtype", TableDisplay.DICTIONARY_SUBTYPE); jgen.writeEndObject(); return true; } }
private void ensureTopicCreated(ZkUtils zkUtils, Set<String> allTopics, String topic, long retentionMs, int replicationFactor, int partitionCount) { Properties props = new Properties(); props.setProperty(LogConfig.RetentionMsProp(), Long.toString(retentionMs)); props.setProperty(LogConfig.CleanupPolicyProp(), DEFAULT_CLEANUP_POLICY); if (!allTopics.contains(topic)) { AdminUtils.createTopic(zkUtils, topic, partitionCount, replicationFactor, props, RackAwareMode.Safe$.MODULE$); } else { try { AdminUtils.changeTopicConfig(zkUtils, topic, props); MetadataResponse.TopicMetadata topicMetadata = AdminUtils.fetchTopicMetadataFromZk(JavaConversions.asScalaSet(Collections.singleton(topic)), zkUtils, ListenerName.forSecurityProtocol(SecurityProtocol.PLAINTEXT)).head(); maybeIncreaseTopicReplicationFactor(zkUtils, topicMetadata, replicationFactor, topic); maybeIncreaseTopicPartitionCount(zkUtils, topic, topicMetadata, partitionCount); } catch (RuntimeException re) { LOG.error("Skip updating topic " + topic + " configuration due to failure:" + re.getMessage() + "."); } } }
private Sort nthArgSort(KLabel klabel, int n) { java.util.Set<Tuple2<Seq<Sort>,Sort>> sigs = mutable(JavaConversions.mapAsJavaMap(module.signatureFor()).get(klabel)); if (sigs == null) { throw new IllegalArgumentException("Not found signature for label: " + klabel); } Sort sort = null; for (Tuple2<Seq<Sort>,Sort> sig : sigs) { List<Sort> sorts = JavaConversions.seqAsJavaList(sig._1()); if (n >= sorts.size()) continue; sort = sorts.get(n); if (!sort.equals(Sorts.K())) { return sort; } } return sort; } }
static <A, B> scala.collection.mutable.Map<A, B> mapAsScalaMap(java.util.Map<A, B> map) { return JavaConversions.mapAsScalaMap(map); } }
val = JavaConversions.mapAsScalaMap(convertedMap); val = JavaConverters.asScalaBufferConverter(childValues).asScala().toSeq();
/** * Read number of partitions for the given topic on the specified zookeeper * @param zkUrl zookeeper connection url * @param topic topic name * * @return the number of partitions of the given topic */ public static int getPartitionNumForTopic(String zkUrl, String topic) { ZkUtils zkUtils = ZkUtils.apply(zkUrl, ZK_SESSION_TIMEOUT_MS, ZK_CONNECTION_TIMEOUT_MS, JaasUtils.isZkSecurityEnabled()); try { Seq<String> topics = scala.collection.JavaConversions.asScalaBuffer(Arrays.asList(topic)); return zkUtils.getPartitionsForTopics(topics).apply(topic).size(); } catch (NoSuchElementException e) { return 0; } finally { zkUtils.close(); } }
return JavaConversions.asScalaBuffer(result).toSeq(); return JavaConversions.mapAsScalaMap(result);
private DirectedGraph<String> getDependencies(uk.gov.dstl.baleen.types.language.Sentence key) { List<WordToken> tokens = ImmutableList.copyOf(indexWords.get(key)); Set<Object> roots = new HashSet<>(); List<Edge<String>> edges = indexDependency .get(key) .stream() .peek( d -> { if (MaltParser.ROOT.equals(d.getDependencyType())) { roots.add(tokens.indexOf(d.getGovernor())); } }) .map( d -> { int source = tokens.indexOf(d.getGovernor()); int destination = tokens.indexOf(d.getDependent()); return new Edge<>(source, destination, d.getDependencyType().toLowerCase()); }) .collect(toList()); return new DirectedGraph<>( JavaConversions.asScalaBuffer(edges).toList(), JavaConversions.asScalaSet(roots).toSet()); } }
/** * Creates a new `Spool` of given `elems`. */ public static <T> Spool<T> newSpool(Collection<T> elems) { Seq<T> seq = JavaConversions.asScalaBuffer(new ArrayList<T>(elems)).toSeq(); return new Spool.ToSpool<T>(seq).toSpool(); }