@Override public IndexingResult index(DbSession dbSession, Collection<EsQueueDto> items) { this.calledItems = items; return new IndexingResult(); }
private IndexingResult doIndex(DbSession dbSession, IndexType type, Collection<EsQueueDto> typeItems) { LOGGER.trace(LOG_PREFIX + "processing {} {}", typeItems.size(), type); ResilientIndexer indexer = indexersByType.get(type); if (indexer == null) { LOGGER.error(LOG_PREFIX + "ignore {} items with unsupported type {}", typeItems.size(), type); return new IndexingResult(); } return indexer.index(dbSession, typeItems); }
@Override public IndexingResult index(DbSession dbSession, Collection<EsQueueDto> items) { IndexingResult result = new IndexingResult(); if (!items.isEmpty()) { ListMultimap<IndexType, EsQueueDto> itemsByType = groupItemsByType(items); result.add(doIndexRules(dbSession, itemsByType.get(INDEX_TYPE_RULE))); result.add(doIndexRuleExtensions(dbSession, itemsByType.get(INDEX_TYPE_RULE_EXTENSION))); } return result; }
@Override public IndexingResult index(DbSession dbSession, Collection<EsQueueDto> items) { called.add(items); IndexingResult result = new IndexingResult(); items.forEach(i -> result.incrementRequests()); return result; } }
/** * This is based on the fact that a WebService is only calling {@link ViewIndexer#delete(DbSession, Collection)} * So the resiliency is only taking in account a deletion of view component * A safety check is done by not deleting any component that still exist in database. * * This should not occur but prevent any misuse on this resiliency */ @Override public IndexingResult index(DbSession dbSession, Collection<EsQueueDto> items) { if (items.isEmpty()) { return new IndexingResult(); } Set<String> views = items .stream() .map(EsQueueDto::getDocId) .collect(toHashSet(items.size())); BulkIndexer bulkIndexer = newBulkIndexer(Size.REGULAR, new OneToOneResilientIndexingListener(dbClient, dbSession, items)); bulkIndexer.start(); // Safety check to remove all views that may not have been deleted views.removeAll(dbClient.componentDao().selectExistingUuids(dbSession, views)); views.forEach(v -> bulkIndexer.addDeletion(INDEX_TYPE_VIEW, v)); return bulkIndexer.stop(); }
@Override public IndexingResult index(DbSession dbSession, Collection<EsQueueDto> items) { ListMultimap<String, EsQueueDto> itemsByIssueKey = ArrayListMultimap.create(); ListMultimap<String, EsQueueDto> itemsByProjectKey = ArrayListMultimap.create(); items.forEach(i -> { if (ID_TYPE_ISSUE_KEY.equals(i.getDocIdType())) { itemsByIssueKey.put(i.getDocId(), i); } else if (ID_TYPE_PROJECT_UUID.equals(i.getDocIdType())) { itemsByProjectKey.put(i.getDocId(), i); } else { LOGGER.error("Unsupported es_queue.doc_id_type for issues. Manual fix is required: " + i); } }); IndexingResult result = new IndexingResult(); result.add(doIndexIssueItems(dbSession, itemsByIssueKey)); result.add(doIndexProjectItems(dbSession, itemsByProjectKey)); return result; }
@Override public IndexingResult index(DbSession dbSession, Collection<EsQueueDto> items) { IndexingResult result = new IndexingResult(); List<BulkIndexer> bulkIndexers = items.stream() .map(EsQueueDto::getDocType) .distinct() .map(IndexType::parse) .filter(indexTypes::contains) .map(indexType -> new BulkIndexer(esClient, indexType, Size.REGULAR, new OneToOneResilientIndexingListener(dbClient, dbSession, items))) .collect(Collectors.toList()); if (bulkIndexers.isEmpty()) { return result; } bulkIndexers.forEach(BulkIndexer::start); PermissionIndexerDao permissionIndexerDao = new PermissionIndexerDao(); Set<String> remainingProjectUuids = items.stream().map(EsQueueDto::getDocId).collect(MoreCollectors.toHashSet()); permissionIndexerDao.selectByUuids(dbClient, dbSession, remainingProjectUuids).forEach(p -> { remainingProjectUuids.remove(p.getProjectUuid()); bulkIndexers.forEach(bi -> bi.add(newIndexRequest(p, bi.getIndexType()))); }); // the remaining references on projects that don't exist in db. They must // be deleted from index. remainingProjectUuids.forEach(projectUuid -> bulkIndexers.forEach(bi -> bi.addDeletion(bi.getIndexType(), projectUuid, projectUuid))); bulkIndexers.forEach(b -> result.add(b.stop())); return result; }
@Override public IndexingResult index(DbSession dbSession, Collection<EsQueueDto> items) { if (items.isEmpty()) { return new IndexingResult(); } OneToOneResilientIndexingListener listener = new OneToOneResilientIndexingListener(dbClient, dbSession, items); BulkIndexer bulkIndexer = createBulkIndexer(Size.REGULAR, listener); bulkIndexer.start(); List<String> projectUuids = items.stream().map(EsQueueDto::getDocId).collect(MoreCollectors.toArrayList(items.size())); Iterator<String> it = projectUuids.iterator(); while (it.hasNext()) { String projectUuid = it.next(); try (ProjectMeasuresIndexerIterator rowIt = ProjectMeasuresIndexerIterator.create(dbSession, projectUuid)) { while (rowIt.hasNext()) { bulkIndexer.add(newIndexRequest(toProjectMeasuresDoc(rowIt.next()))); it.remove(); } } } // the remaining uuids reference projects that don't exist in db. They must // be deleted from index. projectUuids.forEach(projectUuid -> bulkIndexer.addDeletion(INDEX_TYPE_PROJECT_MEASURES, projectUuid, projectUuid)); return bulkIndexer.stop(); }
@Override public IndexingResult index(DbSession dbSession, Collection<EsQueueDto> items) { if (items.isEmpty()) { return new IndexingResult(); } OneToManyResilientIndexingListener listener = new OneToManyResilientIndexingListener(dbClient, dbSession, items); BulkIndexer bulkIndexer = new BulkIndexer(esClient, INDEX_TYPE_COMPONENT, Size.REGULAR, listener); bulkIndexer.start(); Set<String> branchUuids = items.stream().map(EsQueueDto::getDocId).collect(MoreCollectors.toHashSet(items.size())); Set<String> remaining = new HashSet<>(branchUuids); for (String branchUuid : branchUuids) { // TODO allow scrolling multiple projects at the same time dbClient.componentDao().scrollForIndexing(dbSession, branchUuid, context -> { ComponentDto dto = context.getResultObject(); bulkIndexer.add(newIndexRequest(toDocument(dto))); remaining.remove(dto.projectUuid()); }); } // the remaining uuids reference projects that don't exist in db. They must // be deleted from index. remaining.forEach(projectUuid -> addProjectDeletionToBulkIndexer(bulkIndexer, projectUuid)); return bulkIndexer.stop(); }
@Override public IndexingResult index(DbSession dbSession, Collection<EsQueueDto> items) { called.addAll(items); int success = successfulReturns.next(); IndexingResult result = new IndexingResult(); items.stream().limit(success).forEach(i -> { db.getDbClient().esQueueDao().delete(dbSession, i); result.incrementSuccess(); indexed.add(i); }); rangeClosed(1, items.size()).forEach(i -> result.incrementRequests()); dbSession.commit(); return result; }
@Override public IndexingResult index(DbSession dbSession, Collection<EsQueueDto> items) { called.add(items); IndexingResult result = new IndexingResult(); items.forEach(i -> result.incrementSuccess().incrementRequests()); db.getDbClient().esQueueDao().delete(dbSession, items); dbSession.commit(); return result; } }
@Test public void onFinish_must_not_throw_any_exception_if_no_failure() { IndexingResult indexingResult = new IndexingResult(); indexingResult.incrementRequests(); indexingResult.incrementSuccess(); IndexingListener.FAIL_ON_ERROR.onFinish(indexingResult); } }
@Override public IndexingResult index(DbSession dbSession, Collection<EsQueueDto> items) { IndexingResult result = new IndexingResult(); items.forEach(item -> { result.incrementRequests(); if (!item.getUuid().equals(failing.getUuid())) { result.incrementSuccess(); db.getDbClient().esQueueDao().delete(dbSession, item); dbSession.commit(); } }); return result; }
private IndexingResult doIndexRuleProfiles(DbSession dbSession, Map<String, EsQueueDto> ruleProfileItems) { IndexingResult result = new IndexingResult(); for (Map.Entry<String, EsQueueDto> entry : ruleProfileItems.entrySet()) { String ruleProfileUUid = entry.getKey(); EsQueueDto item = entry.getValue(); IndexingResult profileResult; RulesProfileDto profile = dbClient.qualityProfileDao().selectRuleProfile(dbSession, ruleProfileUUid); if (profile == null) { // profile does not exist anymore in db --> related documents must be deleted from index rules/activeRule SearchRequestBuilder search = esClient.prepareSearch(INDEX_TYPE_ACTIVE_RULE) .setQuery(QueryBuilders.boolQuery().must(termQuery(FIELD_ACTIVE_RULE_PROFILE_UUID, ruleProfileUUid))); profileResult = BulkIndexer.delete(esClient, INDEX_TYPE_ACTIVE_RULE, search); } else { BulkIndexer bulkIndexer = createBulkIndexer(Size.REGULAR, IndexingListener.FAIL_ON_ERROR); bulkIndexer.start(); dbClient.activeRuleDao().scrollByRuleProfileForIndexing(dbSession, ruleProfileUUid, i -> bulkIndexer.add(newIndexRequest(i))); profileResult = bulkIndexer.stop(); } if (profileResult.isSuccess()) { deleteQueueDto(dbSession, item); } result.add(profileResult); } return result; }
@Test public void onFinish_must_throw_ISE_when_an_error_is_present() { IndexingResult indexingResult = new IndexingResult(); indexingResult.incrementRequests(); expectedException.expect(IllegalStateException.class); expectedException.expectMessage("Unrecoverable indexation failures"); IndexingListener.FAIL_ON_ERROR.onFinish(indexingResult); }
@Test public void onSuccess_deletes_rows_from_ES_QUEUE_table() { EsQueueDto item1 = insertInQueue(INDEX_TYPE_ISSUE, "foo"); EsQueueDto item2 = insertInQueue(INDEX_TYPE_ISSUE, "bar"); EsQueueDto item3 = insertInQueue(INDEX_TYPE_ISSUE, "baz"); db.commit(); IndexingListener underTest = newListener(asList(item1, item2, item3)); underTest.onSuccess(emptyList()); assertThatEsTableContainsOnly(item1, item2, item3); underTest.onSuccess(asList(toDocId(item1), toDocId(item3))); assertThatEsTableContainsOnly(item2); // onFinish does nothing underTest.onFinish(new IndexingResult()); assertThatEsTableContainsOnly(item2); }
private IndexingResult doIndexIssueItems(DbSession dbSession, ListMultimap<String, EsQueueDto> itemsByIssueKey) { if (itemsByIssueKey.isEmpty()) { return new IndexingResult(); } IndexingListener listener = new OneToOneResilientIndexingListener(dbClient, dbSession, itemsByIssueKey.values()); BulkIndexer bulkIndexer = createBulkIndexer(Size.REGULAR, listener); bulkIndexer.start(); try (IssueIterator issues = issueIteratorFactory.createForIssueKeys(itemsByIssueKey.keySet())) { while (issues.hasNext()) { IssueDoc issue = issues.next(); bulkIndexer.add(newIndexRequest(issue)); itemsByIssueKey.removeAll(issue.getId()); } } // the remaining uuids reference issues that don't exist in db. They must // be deleted from index. itemsByIssueKey.values().forEach( item -> bulkIndexer.addDeletion(INDEX_TYPE_ISSUE, item.getDocId(), item.getDocRouting())); return bulkIndexer.stop(); }
private IndexingResult doIndexProjectItems(DbSession dbSession, ListMultimap<String, EsQueueDto> itemsByProjectUuid) { if (itemsByProjectUuid.isEmpty()) { return new IndexingResult(); } // one project, referenced by es_queue.doc_id = many issues IndexingListener listener = new OneToManyResilientIndexingListener(dbClient, dbSession, itemsByProjectUuid.values()); BulkIndexer bulkIndexer = createBulkIndexer(Size.REGULAR, listener); bulkIndexer.start(); for (String projectUuid : itemsByProjectUuid.keySet()) { // TODO support loading of multiple projects in a single SQL request try (IssueIterator issues = issueIteratorFactory.createForProject(projectUuid)) { if (issues.hasNext()) { do { IssueDoc doc = issues.next(); bulkIndexer.add(newIndexRequest(doc)); } while (issues.hasNext()); } else { // project does not exist or has no issues. In both case // all the documents related to this project are deleted. addProjectDeletionToBulkIndexer(bulkIndexer, projectUuid); } } } return bulkIndexer.stop(); }
@Test public void ES_QUEUE_rows_are_not_deleted_on_partial_error() { EsQueueDto item1 = insertInQueue(INDEX_TYPE_ISSUE, "P1"); EsQueueDto item2 = insertInQueue(INDEX_TYPE_ISSUE, "P2"); EsQueueDto outOfScopeItem = insertInQueue(ComponentIndexDefinition.INDEX_TYPE_COMPONENT, "P1"); db.commit(); // does not contain outOfScopeItem IndexingListener underTest = newListener(asList(item1, item2)); DocId issue1 = newDocId(INDEX_TYPE_ISSUE, "I1"); DocId issue2 = newDocId(INDEX_TYPE_ISSUE, "I2"); underTest.onSuccess(asList(issue1, issue2)); assertThatEsTableContainsOnly(item1, item2, outOfScopeItem); // one failure among the 2 indexing requests of issues IndexingResult result = new IndexingResult(); result.incrementSuccess().incrementRequests(); result.incrementRequests(); underTest.onFinish(result); assertThatEsTableContainsOnly(item1, item2, outOfScopeItem); }
@Test public void ES_QUEUE_rows_are_deleted_when_all_docs_are_successfully_indexed() { EsQueueDto item1 = insertInQueue(INDEX_TYPE_ISSUE, "P1"); EsQueueDto item2 = insertInQueue(INDEX_TYPE_ISSUE, "P2"); EsQueueDto outOfScopeItem = insertInQueue(ComponentIndexDefinition.INDEX_TYPE_COMPONENT, "P1"); db.commit(); // does not contain outOfScopeItem IndexingListener underTest = newListener(asList(item1, item2)); DocId issue1 = newDocId(INDEX_TYPE_ISSUE, "I1"); DocId issue2 = newDocId(INDEX_TYPE_ISSUE, "I2"); underTest.onSuccess(asList(issue1, issue2)); assertThatEsTableContainsOnly(item1, item2, outOfScopeItem); // onFinish deletes all items IndexingResult result = new IndexingResult(); result.incrementSuccess().incrementRequests(); result.incrementSuccess().incrementRequests(); underTest.onFinish(result); assertThatEsTableContainsOnly(outOfScopeItem); }