/** * Use the given node to populate the parent node fields within the source map. * * @param document * @param parentNode */ private void addParentNodeInfo(JsonObject document, Node parentNode) { JsonObject info = new JsonObject(); info.put(UUID_KEY, parentNode.getUuid()); // TODO check whether nesting of nested elements would also work // TODO FIXME MIGRATE: How to add this reference info? The schema is now linked to the node. Should we add another reference: // (n:Node)->(sSchemaContainer) ? // parentNodeInfo.put("schema.name", parentNode.getSchemaContainer().getName()); // parentNodeInfo.put("schema.uuid", parentNode.getSchemaContainer().getUuid()); document.put("parentNode", info); }
public static Node mockNodeBasic(String schemaType, User user) { Node node = mock(Node.class); when(node.getUuid()).thenReturn(NODE_DELOREAN_UUID); SchemaContainer schemaContainer = mockSchemaContainer(schemaType, user); when(node.getSchemaContainer()).thenReturn(schemaContainer); return node; }
/** * Assert that the response contains the given node * * @param node * node * @return Fluent API */ public NodeResponseAssert is(Node node) { assertThat(actual.getUuid()).as("Uuid").isEqualTo(node.getUuid()); return this; }
private Map<String, String> loadVersionsFromGraph(Branch branch, SchemaContainerVersion version, ContainerType type) { Map<String, String> versions = new HashMap<>(); String branchUuid = branch.getUuid(); version.getFieldContainers(branchUuid) .filter(c -> c.getSchemaContainerVersion().equals(version)) .filter(c -> c.isType(type, branchUuid)) .forEach(c -> { String v = generateVersion(c, branchUuid, type); versions.put(c.getParentNode().getUuid() + "-" + c.getLanguageTag(), v); }); return versions; }
/** * Return the document id for the container. * * @return */ default String getDocumentId() { return composeDocumentId(getParentNode().getUuid(), getLanguageTag()); }
public static Node mockNode(Node parentNode, Project project, User user, String languageTag, Tag tagA, Tag tagB) { Node node = mock(Node.class); when(node.getParentNode(anyString())).thenReturn(parentNode); when(node.getProject()).thenReturn(project); TraversalResult<? extends Tag> tagResult = new TraversalResult<>(Arrays.asList(tagA, tagB)); Mockito.<TraversalResult<? extends Tag>>when(node.getTags(any(Branch.class))).thenReturn(tagResult); SchemaContainer schemaContainer = mockSchemaContainer("content", user); SchemaContainerVersion latestVersion = schemaContainer.getLatestVersion(); when(latestVersion.getUuid()).thenReturn(UUID_2); when(node.getSchemaContainer()).thenReturn(schemaContainer); when(node.getCreator()).thenReturn(user); when(node.getUuid()).thenReturn(NODE_DELOREAN_UUID); when(node.getRolesWithPerm(GraphPermission.READ_PERM)).thenReturn(createEmptyTraversal()); when(node.getRolesWithPerm(GraphPermission.READ_PUBLISHED_PERM)).thenReturn(createEmptyTraversal()); NodeGraphFieldContainer container = mockContainer(languageTag, user); when(container.getSchemaContainerVersion()).thenReturn(latestVersion); when(container.getParentNode()).thenReturn(node); when(container.getElementVersion()).thenReturn(UUID_5); when(node.getLatestDraftFieldContainer(languageTag)).thenReturn(container); when(node.getElementVersion()).thenReturn(UUID_4); Mockito.<Iterable<? extends NodeGraphFieldContainer>> when(node.getDraftGraphFieldContainers()).thenReturn(createEmptyTraversal()); return node; }
/** * Transform the user to the document which can be stored in ES. * * @param user * @param withVersion * Whether to include the version number. * @return */ @Override public JsonObject toDocument(User user) { JsonObject document = new JsonObject(); addBasicReferences(document, user); document.put(USERNAME_KEY, user.getUsername()); document.put(EMAIL_KEY, user.getEmailAddress()); document.put(FIRSTNAME_KEY, user.getFirstname()); document.put(LASTNAME_KEY, user.getLastname()); addGroups(document, user.getGroups()); addPermissionInfo(document, user); // TODO add disabled / enabled flag Node referencedNode = user.getReferencedNode(); if (referencedNode != null) { document.put(NODEREFERECE_KEY, referencedNode.getUuid()); } document.put(VERSION_KEY, generateVersion(user)); return document; }
public NodeAssert matches(NodeCreateRequest request) { assertNotNull(request); assertNotNull(actual); // for (Entry<String, String> entry : request.getProperties().entrySet()) { // // Language language = languageService.findByLanguageTag(languageTag); // String propValue = node.getI18nProperties(language).getProperty(entry.getKey()); // assertEquals("The property {" + entry.getKey() + "} did not match with the response object property", entry.getValue(), propValue); // } assertNotNull(actual.getUuid()); assertNotNull(actual.getCreator()); return this; }
/** * Step 3 - Check whether we need to handle all languages. * * Invoke store for the possible set of containers. Utilise the given context settings as much as possible. * * @param obs * @param node * @param branchUuid * @param type * @param context */ private void store(Set<Single<String>> obs, Node node, String branchUuid, ContainerType type, GenericEntryContext context) { if (context.getLanguageTag() != null) { NodeGraphFieldContainer container = node.getGraphFieldContainer(context.getLanguageTag(), branchUuid, type); if (container == null) { log.warn("Node {" + node.getUuid() + "} has no language container for languageTag {" + context.getLanguageTag() + "}. I can't store the search index document. This may be normal in cases if mesh is handling an outdated search queue batch entry."); } else { obs.add(storeContainer(container, branchUuid, type)); } } else { for (NodeGraphFieldContainer container : node.getGraphFieldContainersIt(branchUuid, type)) { obs.add(storeContainer(container, branchUuid, type)); } } }
@Override protected List<FilterField<NodeContent, ?>> getFilters() { List<FilterField<NodeContent, ?>> filters = new ArrayList<>(); filters.add(new MappedFilter<>("uuid", "Filters by uuid", StringFilter.filter(), content -> content.getNode().getUuid())); filters .add(new MappedFilter<>("schema", "Filters by schema", SchemaFilter.filter(context), content -> content.getNode().getSchemaContainer())); filters.add(new MappedFilter<>("created", "Filters by node creation timestamp", DateFilter.filter(), content -> content.getNode().getCreationTimestamp())); filters.add(new MappedFilter<>("creator", "Filters by creator", UserFilter.filter(), content -> content.getNode().getCreator())); filters.add(new MappedFilter<>("edited", "Filters by node update timestamp", DateFilter.filter(), content -> content.getContainer().getLastEditedTimestamp())); filters.add(new MappedFilter<>("editor", "Filters by editor", UserFilter.filter(), content -> content.getContainer().getEditor())); filters.add(new MappedFilter<>("fields", "Filters by fields", createAllFieldFilters(), Function.identity())); return filters; }
/** * Assert that the node was stored in the index for given languages and DRAFT and PUBLISHED versions * * @param node * @param project * @param branch * @param languages * @return Fluent API */ public DummySearchProviderAssert storedAllContainers(Node node, Project project, Branch branch, String... languages) { for (ContainerType type : Arrays.asList(DRAFT, PUBLISHED)) { for (String lang : languages) { String projectUuid = project.getUuid(); String branchUuid = branch.getUuid(); String schemaVersionUuid = node.getSchemaContainer().getLatestVersion().getUuid(); assertThat(actual).hasStore(NodeGraphFieldContainer.composeIndexName(projectUuid, branchUuid, schemaVersionUuid, type), NodeGraphFieldContainer.composeDocumentId(node.getUuid(), lang)); } } return this; }
/** * Step 3 - Check whether we need to handle all languages. * * Invoke store for the possible set of containers. Utilise the given context settings as much as possible. * * @param node * @param branchUuid * @param type * @param context * @return */ private Observable<IndexBulkEntry> storeForBulk(Node node, String branchUuid, ContainerType type, GenericEntryContext context) { if (context.getLanguageTag() != null) { NodeGraphFieldContainer container = node.getGraphFieldContainer(context.getLanguageTag(), branchUuid, type); if (container == null) { log.warn("Node {" + node.getUuid() + "} has no language container for languageTag {" + context.getLanguageTag() + "}. I can't store the search index document. This may be normal in cases if mesh is handling an outdated search queue batch entry."); } else { return storeContainerForBulk(container, branchUuid, type).toObservable(); } } else { Set<Observable<IndexBulkEntry>> obs = new HashSet<>(); for (NodeGraphFieldContainer container : node.getGraphFieldContainersIt(branchUuid, type)) { obs.add(storeContainerForBulk(container, branchUuid, type).toObservable()); } return Observable.merge(obs); } return Observable.empty(); }
/** * Generate an elasticsearch document object from the given container and stores it in the search index. * * @param container * @param branchUuid * @param type * @return Single with affected index name */ public Single<String> storeContainer(NodeGraphFieldContainer container, String branchUuid, ContainerType type) { JsonObject doc = transformer.toDocument(container, branchUuid, type); String projectUuid = container.getParentNode().getProject().getUuid(); String indexName = NodeGraphFieldContainer.composeIndexName(projectUuid, branchUuid, container.getSchemaContainerVersion().getUuid(), type); if (log.isDebugEnabled()) { log.debug("Storing node {" + container.getParentNode().getUuid() + "} into index {" + indexName + "}"); } String languageTag = container.getLanguageTag(); String documentId = NodeGraphFieldContainer.composeDocumentId(container.getParentNode().getUuid(), languageTag); return searchProvider.storeDocument(indexName, documentId, doc).andThen(Single.just(indexName)); }
/** * Generate an elasticsearch document object from the given container and stores it in the search index. * * @param container * @param branchUuid * @param type * @return Single with the bulk entry */ public Single<IndexBulkEntry> storeContainerForBulk(NodeGraphFieldContainer container, String branchUuid, ContainerType type) { JsonObject doc = transformer.toDocument(container, branchUuid, type); String projectUuid = container.getParentNode().getProject().getUuid(); String indexName = NodeGraphFieldContainer.composeIndexName(projectUuid, branchUuid, container.getSchemaContainerVersion().getUuid(), type); if (log.isDebugEnabled()) { log.debug("Storing node {" + container.getParentNode().getUuid() + "} into index {" + indexName + "}"); } String languageTag = container.getLanguageTag(); String documentId = NodeGraphFieldContainer.composeDocumentId(container.getParentNode().getUuid(), languageTag); return Single.just(new IndexBulkEntry(indexName, documentId, doc, searchProvider.hasIngestPipelinePlugin())); }
public ProjectResponseAssert matches(Project project) { assertGenericNode(project, actual); assertNotNull(actual.getRootNode()); assertEquals(project.getName(), actual.getName()); assertNotNull(actual.getRootNode()); assertEquals(project.getBaseNode().getUuid(), actual.getRootNode().getUuid()); return this; }
log.debug("Resolving link to " + node.getUuid() + " in language " + Arrays.toString(languageTags) + " with type " + type.name());
Node node = container.getParentNode(); JsonObject document = new JsonObject(); document.put("uuid", node.getUuid()); addUser(document, "editor", container.getEditor()); document.put("edited", toISO8601(container.getLastEditedTimestamp()));
public Observable<? extends BulkEntry> moveForBulk(MoveDocumentEntry entry) { MoveEntryContext context = entry.getContext(); ContainerType type = context.getContainerType(); String releaseUuid = context.getBranchUuid(); NodeGraphFieldContainer oldContainer = context.getOldContainer(); String oldProjectUuid = oldContainer.getParentNode().getProject().getUuid(); String oldIndexName = NodeGraphFieldContainer.composeIndexName(oldProjectUuid, releaseUuid, oldContainer.getSchemaContainerVersion().getUuid(), type); String oldLanguageTag = oldContainer.getLanguageTag(); String oldDocumentId = NodeGraphFieldContainer.composeDocumentId(oldContainer.getParentNode().getUuid(), oldLanguageTag); DeleteBulkEntry deleteEntry = new DeleteBulkEntry(oldIndexName, oldDocumentId); NodeGraphFieldContainer newContainer = context.getNewContainer(); String newProjectUuid = newContainer.getParentNode().getProject().getUuid(); String newIndexName = NodeGraphFieldContainer.composeIndexName(newProjectUuid, releaseUuid, newContainer.getSchemaContainerVersion().getUuid(), type); String newLanguageTag = newContainer.getLanguageTag(); String newDocumentId = NodeGraphFieldContainer.composeDocumentId(newContainer.getParentNode().getUuid(), newLanguageTag); JsonObject doc = transformer.toDocument(newContainer, releaseUuid, type); IndexBulkEntry addEntry = new IndexBulkEntry(newIndexName, newDocumentId, doc, searchProvider.hasIngestPipelinePlugin()); return Observable.fromArray(addEntry, deleteEntry); }