public Collection<Widget> getWidgets() { return widgetsByColumn.values(); }
public Collection<Input> getBoundInputs() { return uriBoundInputs.values(); }
@Override public synchronized Set<CommandMapping> getCommands() { return ImmutableSet.copyOf(this.commands.values()); }
@Override public void initialize() { // throw them all in a set to remove duplicates Collection<FacetProvider> facetProviders = new LinkedHashSet<>(facetProviderChains.values()); facetProviders.forEach(FacetProvider::initialize); worldRasterizers.forEach(WorldRasterizer::initialize); entityProviders.forEach(EntityProvider::initialize); } }
public List<String> getLeaves() { return unmodifiableList(relations.values() .stream() .filter(qualifier -> relations.get(qualifier).isEmpty()) .collect(Collectors.toList())); }
@Override public boolean containsMapping(CommandMapping mapping) { checkNotNull(mapping, "mapping"); for (CommandMapping test : this.commands.values()) { if (mapping.equals(test)) { return true; } } return false; }
@Override public synchronized Set<String> getAliases() { Set<String> aliases = new HashSet<>(); for (CommandMapping mapping : this.commands.values()) { aliases.addAll(mapping.getAllAliases()); } return Collections.unmodifiableSet(aliases); }
@Override public synchronized Set<String> getPrimaryAliases() { Set<String> aliases = new HashSet<>(); for (CommandMapping mapping : this.commands.values()) { aliases.add(mapping.getPrimaryAlias()); } return Collections.unmodifiableSet(aliases); }
/** * Sets the inputs for a given bind, replacing any previous inputs * */ public void setBinds(SimpleUri bindUri, Iterable<Input> inputs) { Set<Input> uniqueInputs = Sets.newLinkedHashSet(inputs); // Clear existing usages of the given inputs Iterator<Input> iterator = uriBoundInputs.values().iterator(); while (iterator.hasNext()) { Input i = iterator.next(); if (uniqueInputs.contains(i)) { iterator.remove(); } } uriBoundInputs.replaceValues(bindUri, uniqueInputs); }
/** * Remove all mappings contained with the given collection. * * @param mappings The collection * @return Whether the at least one command was removed */ public synchronized boolean removeMappings(Collection<?> mappings) { checkNotNull(mappings, "mappings"); boolean found = false; Iterator<CommandMapping> it = this.commands.values().iterator(); while (it.hasNext()) { if (mappings.contains(it.next())) { it.remove(); found = true; } } return found; }
/** * Remove a command identified by the given mapping. * * @param mapping The mapping * @return The previous mapping associated with the alias, if one was found */ public synchronized Optional<CommandMapping> removeMapping(CommandMapping mapping) { checkNotNull(mapping, "mapping"); CommandMapping found = null; Iterator<CommandMapping> it = this.commands.values().iterator(); while (it.hasNext()) { CommandMapping current = it.next(); if (current.equals(mapping)) { it.remove(); found = current; } } return Optional.ofNullable(found); }
public ResourceTypeTree build() { Collection<String> children = relations.values(); for (ResourceType type : types) { if (!children.contains(type.getQualifier())) { root = type; break; } } return new ResourceTypeTree(this); } }
@Override public boolean testPermission(CommandSource source) { for (CommandMapping mapping : this.commands.values()) { if (mapping.getCallable().testPermission(source)) { return true; } } return false; }
/** * Commits the DB transaction and adds the issues to Elasticsearch index. * <p> * If indexing fails, then the recovery daemon will retry later and this * method successfully returns. Meanwhile these issues will be "eventually * consistent" when requesting the index. */ public void commitAndIndexIssues(DbSession dbSession, Collection<IssueDto> issues) { ListMultimap<String, EsQueueDto> itemsByIssueKey = ArrayListMultimap.create(); issues.stream() .map(issue -> createQueueDto(issue.getKey(), ID_TYPE_ISSUE_KEY, issue.getProjectUuid())) // a mutable ListMultimap is needed for doIndexIssueItems, so MoreCollectors.index() is // not used .forEach(i -> itemsByIssueKey.put(i.getDocId(), i)); dbClient.esQueueDao().insert(dbSession, itemsByIssueKey.values()); dbSession.commit(); doIndexIssueItems(dbSession, itemsByIssueKey); }
public void testLinkedClear() { ListMultimap<String, Integer> map = create(); map.put("foo", 1); map.put("foo", 2); map.put("bar", 3); List<Integer> foos = map.get("foo"); Collection<Integer> values = map.values(); assertEquals(asList(1, 2), foos); assertThat(values).containsExactly(1, 2, 3).inOrder(); map.clear(); assertEquals(Collections.emptyList(), foos); assertThat(values).isEmpty(); assertEquals("[]", map.entries().toString()); assertEquals("{}", map.toString()); }
return; indicesToCalculate = indexSets.values().size();
); libraries.addAll(bundleLibs.values()); libraries.addAll(configLibs.values()); libraries.addAll(dependencyLibs.values());
private IndexingResult doIndexIssueItems(DbSession dbSession, ListMultimap<String, EsQueueDto> itemsByIssueKey) { if (itemsByIssueKey.isEmpty()) { return new IndexingResult(); } IndexingListener listener = new OneToOneResilientIndexingListener(dbClient, dbSession, itemsByIssueKey.values()); BulkIndexer bulkIndexer = createBulkIndexer(Size.REGULAR, listener); bulkIndexer.start(); try (IssueIterator issues = issueIteratorFactory.createForIssueKeys(itemsByIssueKey.keySet())) { while (issues.hasNext()) { IssueDoc issue = issues.next(); bulkIndexer.add(newIndexRequest(issue)); itemsByIssueKey.removeAll(issue.getId()); } } // the remaining uuids reference issues that don't exist in db. They must // be deleted from index. itemsByIssueKey.values().forEach( item -> bulkIndexer.addDeletion(INDEX_TYPE_ISSUE, item.getDocId(), item.getDocRouting())); return bulkIndexer.stop(); }
int maxTop = 0; int maxBottom = 0; for (FacetProvider facetProvider : providerChains.values()) {
private IndexingResult doIndexProjectItems(DbSession dbSession, ListMultimap<String, EsQueueDto> itemsByProjectUuid) { if (itemsByProjectUuid.isEmpty()) { return new IndexingResult(); } // one project, referenced by es_queue.doc_id = many issues IndexingListener listener = new OneToManyResilientIndexingListener(dbClient, dbSession, itemsByProjectUuid.values()); BulkIndexer bulkIndexer = createBulkIndexer(Size.REGULAR, listener); bulkIndexer.start(); for (String projectUuid : itemsByProjectUuid.keySet()) { // TODO support loading of multiple projects in a single SQL request try (IssueIterator issues = issueIteratorFactory.createForProject(projectUuid)) { if (issues.hasNext()) { do { IssueDoc doc = issues.next(); bulkIndexer.add(newIndexRequest(doc)); } while (issues.hasNext()); } else { // project does not exist or has no issues. In both case // all the documents related to this project are deleted. addProjectDeletionToBulkIndexer(bulkIndexer, projectUuid); } } } return bulkIndexer.stop(); }