/** * Return the index name for the given parameters. * * @param projectUuid * @param branchUuid * @param type * @return */ default String getIndexName(String projectUuid, String branchUuid, ContainerType type) { return composeIndexName(projectUuid, branchUuid, getSchemaContainerVersion().getUuid(), type); }
/** * Assert that the field uses the given schema. * * @param schemaVersion * schema container * @return fluent API */ public NodeGraphFieldContainerAssert isOf(SchemaContainerVersion schemaVersion) { assertThat(actual.getSchemaContainerVersion().getVersion()).as("Schema version").isEqualTo(schemaVersion.getVersion()); assertThat(actual.getSchemaContainerVersion().getUuid()).as("Schema version Uuid").isEqualTo(schemaVersion.getUuid()); return this; }
public static Node mockNode(Node parentNode, Project project, User user, String languageTag, Tag tagA, Tag tagB) { Node node = mock(Node.class); when(node.getParentNode(anyString())).thenReturn(parentNode); when(node.getProject()).thenReturn(project); TraversalResult<? extends Tag> tagResult = new TraversalResult<>(Arrays.asList(tagA, tagB)); Mockito.<TraversalResult<? extends Tag>>when(node.getTags(any(Branch.class))).thenReturn(tagResult); SchemaContainer schemaContainer = mockSchemaContainer("content", user); SchemaContainerVersion latestVersion = schemaContainer.getLatestVersion(); when(latestVersion.getUuid()).thenReturn(UUID_2); when(node.getSchemaContainer()).thenReturn(schemaContainer); when(node.getCreator()).thenReturn(user); when(node.getUuid()).thenReturn(NODE_DELOREAN_UUID); when(node.getRolesWithPerm(GraphPermission.READ_PERM)).thenReturn(createEmptyTraversal()); when(node.getRolesWithPerm(GraphPermission.READ_PUBLISHED_PERM)).thenReturn(createEmptyTraversal()); NodeGraphFieldContainer container = mockContainer(languageTag, user); when(container.getSchemaContainerVersion()).thenReturn(latestVersion); when(container.getParentNode()).thenReturn(node); when(container.getElementVersion()).thenReturn(UUID_5); when(node.getLatestDraftFieldContainer(languageTag)).thenReturn(container); when(node.getElementVersion()).thenReturn(UUID_4); Mockito.<Iterable<? extends NodeGraphFieldContainer>> when(node.getDraftGraphFieldContainers()).thenReturn(createEmptyTraversal()); return node; }
/** * Add a new node index to the search database and construct the index name using the provided values. See * {@link NodeContainerEntry#composeIndexName(String, String, String, ContainerType)} for details. * * @param project * @param branch * @param version * @param type * @return Fluent API */ default SearchQueueBatch addNodeIndex(Project project, Branch branch, SchemaContainerVersion version, ContainerType type) { return createNodeIndex(project.getUuid(), branch.getUuid(), version.getUuid(), type, version.getSchema()); }
@Override public Set<String> filterUnknownIndices(Set<String> indices) { Set<String> activeIndices = new HashSet<>(); db.tx(() -> { for (Project currentProject : boot.meshRoot().getProjectRoot().findAll()) { for (Branch branch : currentProject.getBranchRoot().findAll()) { for (SchemaContainerVersion version : branch.findActiveSchemaVersions()) { Arrays.asList(ContainerType.DRAFT, ContainerType.PUBLISHED).forEach(type -> { activeIndices .add(NodeGraphFieldContainer.composeIndexName(currentProject.getUuid(), branch.getUuid(), version.getUuid(), type)); }); } } } }); if (log.isDebugEnabled()) { for (String name : activeIndices) { log.debug("Active index: {" + name + "}"); } } return indices.stream() // Only handle indices of the handler's type .filter(i -> i.startsWith(getType())) // Filter out indices which are active .filter(i -> !activeIndices.contains(i)) .collect(Collectors.toSet()); }
/** * Assert that the node was stored in the index for given languages and DRAFT and PUBLISHED versions * * @param node * @param project * @param branch * @param languages * @return Fluent API */ public DummySearchProviderAssert storedAllContainers(Node node, Project project, Branch branch, String... languages) { for (ContainerType type : Arrays.asList(DRAFT, PUBLISHED)) { for (String lang : languages) { String projectUuid = project.getUuid(); String branchUuid = branch.getUuid(); String schemaVersionUuid = node.getSchemaContainer().getLatestVersion().getUuid(); assertThat(actual).hasStore(NodeGraphFieldContainer.composeIndexName(projectUuid, branchUuid, schemaVersionUuid, type), NodeGraphFieldContainer.composeDocumentId(node.getUuid(), lang)); } } return this; }
@Override public Set<String> getSelectedIndices(InternalActionContext ac) { return db.tx(() -> { Set<String> indices = new HashSet<>(); Project project = ac.getProject(); if (project != null) { Branch branch = ac.getBranch(); // Locate all schema versions which need to be taken into consideration when choosing the indices for (SchemaContainerVersion version : branch.findActiveSchemaVersions()) { indices.add(NodeGraphFieldContainer.composeIndexName(project.getUuid(), branch.getUuid(), version.getUuid(), ContainerType .forVersion(ac.getVersioningParameters().getVersion()))); } } else { // The project was not specified. Maybe a global search wants to know which indices must be searched. // In that case we just iterate over all projects and collect index names per branch. for (Project currentProject : boot.meshRoot().getProjectRoot().findAll()) { for (Branch branch : currentProject.getBranchRoot().findAll()) { for (SchemaContainerVersion version : branch.findActiveSchemaVersions()) { indices.add(NodeGraphFieldContainer.composeIndexName(currentProject.getUuid(), branch.getUuid(), version.getUuid(), ContainerType.forVersion(ac.getVersioningParameters().getVersion()))); } } } } return indices; }); }
private Completable diffAndSync(Project project, Branch branch, SchemaContainerVersion version, ContainerType type, SyncMetric metric) throws HttpErrorException { String indexName = NodeGraphFieldContainer.composeIndexName(project.getUuid(), branch.getUuid(), version.getUuid(), type); metric.incUpdate(needUpdate.size()); String versionUuid = version.getUuid(); String projectUuid = project.getUuid(); String branchUuid = branch.getUuid();
public Observable<? extends BulkEntry> moveForBulk(MoveDocumentEntry entry) { MoveEntryContext context = entry.getContext(); ContainerType type = context.getContainerType(); String releaseUuid = context.getBranchUuid(); NodeGraphFieldContainer oldContainer = context.getOldContainer(); String oldProjectUuid = oldContainer.getParentNode().getProject().getUuid(); String oldIndexName = NodeGraphFieldContainer.composeIndexName(oldProjectUuid, releaseUuid, oldContainer.getSchemaContainerVersion().getUuid(), type); String oldLanguageTag = oldContainer.getLanguageTag(); String oldDocumentId = NodeGraphFieldContainer.composeDocumentId(oldContainer.getParentNode().getUuid(), oldLanguageTag); DeleteBulkEntry deleteEntry = new DeleteBulkEntry(oldIndexName, oldDocumentId); NodeGraphFieldContainer newContainer = context.getNewContainer(); String newProjectUuid = newContainer.getParentNode().getProject().getUuid(); String newIndexName = NodeGraphFieldContainer.composeIndexName(newProjectUuid, releaseUuid, newContainer.getSchemaContainerVersion().getUuid(), type); String newLanguageTag = newContainer.getLanguageTag(); String newDocumentId = NodeGraphFieldContainer.composeDocumentId(newContainer.getParentNode().getUuid(), newLanguageTag); JsonObject doc = transformer.toDocument(newContainer, releaseUuid, type); IndexBulkEntry addEntry = new IndexBulkEntry(newIndexName, newDocumentId, doc, searchProvider.hasIngestPipelinePlugin()); return Observable.fromArray(addEntry, deleteEntry); }
/** * Generate an elasticsearch document object from the given container and stores it in the search index. * * @param container * @param branchUuid * @param type * @return Single with affected index name */ public Single<String> storeContainer(NodeGraphFieldContainer container, String branchUuid, ContainerType type) { JsonObject doc = transformer.toDocument(container, branchUuid, type); String projectUuid = container.getParentNode().getProject().getUuid(); String indexName = NodeGraphFieldContainer.composeIndexName(projectUuid, branchUuid, container.getSchemaContainerVersion().getUuid(), type); if (log.isDebugEnabled()) { log.debug("Storing node {" + container.getParentNode().getUuid() + "} into index {" + indexName + "}"); } String languageTag = container.getLanguageTag(); String documentId = NodeGraphFieldContainer.composeDocumentId(container.getParentNode().getUuid(), languageTag); return searchProvider.storeDocument(indexName, documentId, doc).andThen(Single.just(indexName)); }
/** * Generate an elasticsearch document object from the given container and stores it in the search index. * * @param container * @param branchUuid * @param type * @return Single with the bulk entry */ public Single<IndexBulkEntry> storeContainerForBulk(NodeGraphFieldContainer container, String branchUuid, ContainerType type) { JsonObject doc = transformer.toDocument(container, branchUuid, type); String projectUuid = container.getParentNode().getProject().getUuid(); String indexName = NodeGraphFieldContainer.composeIndexName(projectUuid, branchUuid, container.getSchemaContainerVersion().getUuid(), type); if (log.isDebugEnabled()) { log.debug("Storing node {" + container.getParentNode().getUuid() + "} into index {" + indexName + "}"); } String languageTag = container.getLanguageTag(); String documentId = NodeGraphFieldContainer.composeDocumentId(container.getParentNode().getUuid(), languageTag); return Single.just(new IndexBulkEntry(indexName, documentId, doc, searchProvider.hasIngestPipelinePlugin())); }