Refine search
/** * Just an example of fetching status information from the graph updater manager to expose it in a web service. * More useful stuff should be added later. */ public Map<Integer, String> getUpdaterDescriptions () { Map<Integer, String> ret = Maps.newTreeMap(); int i = 0; for (GraphUpdater updater : updaterList) { ret.put(i++, updater.toString()); } return ret; }
private Map<String, JsonNode> parseAsJson(byte[] value) throws IOException { JsonNode document = mapper.readValue(value, JsonNode.class); //Hive Column names are case insensitive. Map<String, JsonNode> documentMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); document.fields().forEachRemaining(field -> documentMap.put(field.getKey().toLowerCase(), field.getValue())); return documentMap; }
private void listThreads() { final Set<Thread> threads = Thread.getAllStackTraces().keySet(); final Map<String, Thread> sorted = Maps.newTreeMap(); for (final Thread t : threads) { final ThreadGroup tg = t.getThreadGroup(); if (t.isAlive() && (tg == null || !tg.getName().equals("system"))) { sorted.put(t.getName(), t); } } log.info("= THREADS " + Strings.repeat("=", 70)); for (final Thread t : sorted.values()) { final ThreadGroup tg = t.getThreadGroup(); log.info("{}: \"{}\" ({}{})", t.getId(), t.getName(), (tg == null ? "" : tg.getName() + " "), (t.isDaemon() ? "daemon" : "")); } log.info(Strings.repeat("=", 80)); }
/** * Helper function for {@link #coalesceIncrements} to increment a counter * value in the passed data structure. * * @param counters Nested data structure containing the counters. * @param row Row key to increment. * @param family Column family to increment. * @param qualifier Column qualifier to increment. * @param count Amount to increment by. */ private void incrementCounter( Map<byte[], Map<byte[], NavigableMap<byte[], Long>>> counters, byte[] row, byte[] family, byte[] qualifier, Long count) { Map<byte[], NavigableMap<byte[], Long>> families = counters.get(row); if (families == null) { families = Maps.newTreeMap(Bytes.BYTES_COMPARATOR); counters.put(row, families); } NavigableMap<byte[], Long> qualifiers = families.get(family); if (qualifiers == null) { qualifiers = Maps.newTreeMap(Bytes.BYTES_COMPARATOR); families.put(family, qualifiers); } Long existingValue = qualifiers.get(qualifier); if (existingValue == null) { qualifiers.put(qualifier, count); } else { qualifiers.put(qualifier, existingValue + count); } }
public DateHistogramResult(HistogramAggregation result, String originalQuery, String builtQuery, Searches.DateHistogramInterval interval, long tookMs) { super(originalQuery, builtQuery, tookMs); this.result = Maps.newTreeMap(); for (HistogramAggregation.Histogram histogram : result.getBuckets()) { final DateTime keyAsDate = new DateTime(histogram.getKey()); this.result.put(keyAsDate.getMillis() / 1000L, histogram.getCount()); } this.interval = interval; }
private void findAvailableXMLFiles() { if (contributingPluginKeyToClassLoader == null) { contributingPluginKeyToClassLoader = Maps.newTreeMap(); // Add default model contributingPluginKeyToClassLoader.put(DEFAULT_MODEL, getClass().getClassLoader()); for (PluginInfo pluginInfo : pluginRepository.getPluginInfos()) { String pluginKey = pluginInfo.getKey(); Plugin plugin = pluginRepository.getPluginInstance(pluginKey); ClassLoader classLoader = plugin.getClass().getClassLoader(); if (classLoader.getResource(getXMLFilePath(pluginKey)) != null) { contributingPluginKeyToClassLoader.put(pluginKey, classLoader); } } } contributingPluginKeyToClassLoader = Collections.unmodifiableMap(contributingPluginKeyToClassLoader); }
/** * Constructs DAGNodes for each Hive MR task */ private void createNodeIdToDAGNode() { // creates DAGNodes: each node represents a MR job nodeIdToDAGNode = Maps.newTreeMap(); for (Task<MapredWork> task : allTasks) { DAGNode<Job> dagNode = asDAGNode(task); nodeIdToDAGNode.put(dagNode.getName(), dagNode); } // get job dependencies Map<String, List<String>> nodeIdToDependencies = getNodeIdToDependencies(); // wire DAGNodes for (Map.Entry<String, List<String>> entry : nodeIdToDependencies.entrySet()) { String nodeId = entry.getKey(); List<String> successorIds = entry.getValue(); DAGNode<Job> dagNode = nodeIdToDAGNode.get(nodeId); List<DAGNode<? extends Job>> dagSuccessors = Lists.newArrayListWithCapacity(successorIds.size()); for (String sId : successorIds) { DAGNode<Job> successor = nodeIdToDAGNode.get(sId); dagSuccessors.add(successor); } dagNode.setSuccessors(dagSuccessors); } }
public static Map<String, Integer> getValidEncodings() { if (factoryMap == null) initFactoryMap(); Map<String, Integer> result = Maps.newTreeMap(); for (Pair<String, Integer> p : factoryMap.keySet()) { if (result.containsKey(p.getFirst())) { if (result.get(p.getFirst()) > p.getSecond()) { continue;//skip small versions } } result.put(p.getFirst(), p.getSecond()); } result.put(DictionaryDimEnc.ENCODING_NAME, 1); return result; }
public TermsHistogramResult(@Nullable DateHistogramAggregation result, String originalQuery, String builtQuery, long size, long tookMs, Searches.DateHistogramInterval interval, List<String> fields) { super(originalQuery, builtQuery, tookMs); this.size = size; this.interval = interval; this.result = Maps.newTreeMap(); this.terms = new HashSet<>(); if (result != null) { for (DateHistogramAggregation.DateHistogram histogram : result.getBuckets()) { final DateTime keyAsDate = new DateTime(histogram.getKey()); final TermsAggregation termsAggregation = histogram.getFilterAggregation(Searches.AGG_FILTER).getTermsAggregation(Searches.AGG_TERMS); final MissingAggregation missingAgregation = histogram.getMissingAggregation("missing"); final TermsResult termsResult = new TermsResult(termsAggregation, missingAgregation.getMissing(), histogram.getCount(), "", "", tookMs, fields); this.terms.addAll(termsResult.getTerms().keySet()); this.result.put(keyAsDate.getMillis() / 1000L, termsResult); } } }
Map<Property<?>, Object> valueMap = Maps.newTreeMap(Comparator.comparing(Property::getName)); BlockState stateMaker = new BlockState(blockType); for (int i = 0; i < valueList.size(); i++) { Property<?> property = properties.get(i); Object value = valueList.get(i); valueMap.put(property, value); stateMaker.setState(property, value); stateMap.put(valueMap, stateMaker); stateMap.put(new LinkedHashMap<>(), new BlockState(blockType));
final Map<Long, Map<String, Number>> results = Maps.newTreeMap(); for (HistogramAggregation.Histogram b : histogramAggregation.getBuckets()) { final ImmutableMap.Builder<String, Number> resultMap = ImmutableMap.builder(); results.put(timestamp, resultMap.build());
@Test public void onlyRelevantValidatorsAreBuilt() throws ProcessingException { final Map<String, JsonNode> digests = Maps.newTreeMap(); digests.put(K1, JacksonUtils.nodeFactory().nullNode()); final SchemaDigest digest = new SchemaDigest(null, digests); final ProcessingReport report = mock(ProcessingReport.class); final ValidatorList context = validatorBuilder.process(report, digest); final List<KeywordValidator> list = Lists.newArrayList(context); assertEquals(list.size(), 1); assertSame(list.get(0).getClass(), Keyword1.class); }
fetchDirs.put(node.getId(), executor.submit(new Callable<String>() { Map<Node, Response> fetchResponseMap = Maps.newTreeMap(); boolean fetchErrors = false; try{ String response = val.get(); fetchResponseMap.put(node, new Response(response)); }catch(Exception e){ if (e.getCause() instanceof UnauthorizedStoreException){ fetchResponseMap.put(node, new Response(e)); failedNodes.add(node);
@Test public void allRelevantValidatorsAreBuilt() throws ProcessingException { final Map<String, JsonNode> digests = Maps.newTreeMap(); digests.put(K1, JacksonUtils.nodeFactory().nullNode()); digests.put(K2, JacksonUtils.nodeFactory().nullNode()); final SchemaDigest digest = new SchemaDigest(null, digests); final ProcessingReport report = mock(ProcessingReport.class); final ValidatorList context = validatorBuilder.process(report, digest); final List<KeywordValidator> list = Lists.newArrayList(context); assertEquals(list.size(), 2); assertSame(list.get(0).getClass(), Keyword1.class); assertSame(list.get(1).getClass(), Keyword2.class); }
@Test public void challengedConstructorRaisesAnException() { final Map<String, JsonNode> digests = Maps.newTreeMap(); digests.put(K1, JacksonUtils.nodeFactory().nullNode()); digests.put(CHALLENGED, JacksonUtils.nodeFactory().nullNode()); final SchemaDigest digest = new SchemaDigest(null, digests); final ProcessingReport report = mock(ProcessingReport.class); try { validatorBuilder.process(report, digest); fail("No exception thrown??"); } catch (ProcessingException ignored) { } }
MessageImpl(String topic, BatchMessageIdImpl batchMessageIdImpl, MessageMetadata msgMetadata, PulsarApi.SingleMessageMetadata singleMessageMetadata, ByteBuf payload, Optional<EncryptionContext> encryptionCtx, ClientCnx cnx, Schema<T> schema, int redeliveryCount) { this.msgMetadataBuilder = MessageMetadata.newBuilder(msgMetadata); this.messageId = batchMessageIdImpl; this.topic = topic; this.cnx = cnx; this.redeliveryCount = redeliveryCount; this.payload = Unpooled.copiedBuffer(payload); this.encryptionCtx = encryptionCtx; if (singleMessageMetadata.getPropertiesCount() > 0) { Map<String, String> properties = Maps.newTreeMap(); for (KeyValue entry : singleMessageMetadata.getPropertiesList()) { properties.put(entry.getKey(), entry.getValue()); } this.properties = Collections.unmodifiableMap(properties); } else { properties = Collections.emptyMap(); } if (singleMessageMetadata.hasPartitionKey()) { msgMetadataBuilder.setPartitionKeyB64Encoded(singleMessageMetadata.getPartitionKeyB64Encoded()); msgMetadataBuilder.setPartitionKey(singleMessageMetadata.getPartitionKey()); } if (singleMessageMetadata.hasEventTime()) { msgMetadataBuilder.setEventTime(singleMessageMetadata.getEventTime()); } this.schema = schema; }
set = Sets.newHashSet(); set.add(real); models.put(m, set); costs.put(m, cost); } else { set.add(real); RealizationCost curCost = costs.get(m); if (cost.compareTo(curCost) < 0) costs.put(m, cost); TreeMap<DataModelDesc, Set<IRealization>> result = Maps.newTreeMap(new Comparator<DataModelDesc>() { @Override public int compare(DataModelDesc o1, DataModelDesc o2) {
exprFields.put(r.toString(), RelOptUtil.InputFinder.bits(r)); allExprsDigests.add(r.toString()); exprFields.put(r.toString(), RelOptUtil.InputFinder.bits(r)); allExprsDigests.add(r.toString()); equivalence = Maps.newTreeMap(); equalityPredicates = new HashSet<>(); for (int i = 0; i < nSysFields + nFieldsLeft + nFieldsRight; i++) {
TreeMap<Node, AdminStoreSwapper.Response> toSwap = Maps.newTreeMap(); for(int nodeId = 0; nodeId < NUM_NODES; nodeId++) { if(nodeId != 1) { versionToNode.put(nodeId, adminClient.readonlyOps.getROCurrentVersion(nodeId, Lists.newArrayList(STORE_NAME))
Map<Integer, MutationMap> mutationMaps = Maps.newTreeMap(); for (long ts : Ordering.natural().immutableSortedCopy(cellVersions.getValue())) { if (!mutationMaps.containsKey(mapIndex)) { mutationMaps.put(mapIndex, new MutationMap());