@Override public DocumentWriter removeChild( String id ) { EditableArray children = federatedDocument.getArray(DocumentTranslator.CHILDREN); if (children != null) { for (int i = 0; i != children.size(); ++i) { Object val = children.get(i); if (val instanceof Document) { Document child = (Document)val; if (child.getString(DocumentTranslator.KEY).equals(id)) { children.remove(i); return this; } } } } return this; }
@Override public DocumentWriter removeChild( String id ) { EditableArray children = federatedDocument.getArray(DocumentTranslator.CHILDREN); if (children != null) { for (int i = 0; i != children.size(); ++i) { Object val = children.get(i); if (val instanceof Document) { Document child = (Document)val; if (child.getString(DocumentTranslator.KEY).equals(id)) { children.remove(i); return this; } } } } return this; }
@Override public DocumentWriter addChild( String id, String name ) { EditableArray children = federatedDocument.getArray(DocumentTranslator.CHILDREN); if (children == null) { children = DocumentFactory.newArray(); federatedDocument.setArray(DocumentTranslator.CHILDREN, children); } children.addDocument(DocumentFactory.newDocument(DocumentTranslator.KEY, id, DocumentTranslator.NAME, name)); return this; }
@Override public DocumentWriter addChild( String id, String name ) { EditableArray children = federatedDocument.getArray(DocumentTranslator.CHILDREN); if (children == null) { children = DocumentFactory.newArray(); federatedDocument.setArray(DocumentTranslator.CHILDREN, children); } children.addDocument(DocumentFactory.newDocument(DocumentTranslator.KEY, id, DocumentTranslator.NAME, name)); return this; }
protected void removeAllBucketsFromUnorderedCollection( NodeKey parentDocKey ) { // should already have been loaded into the cache and the parent locked EditableDocument parentDoc = documentStore.edit(parentDocKey.toString(), false); assert parentDoc != null; EditableArray bucketsIds = parentDoc.getArray(BUCKETS); if (bucketsIds == null || bucketsIds.isEmpty()) { return; } for (Object bucketId : bucketsIds) { String bucketKey = bucketKey(parentDocKey.toString(), bucketId.toString()); documentStore.remove(bucketKey); } }
protected void removeAllBucketsFromUnorderedCollection( NodeKey parentDocKey ) { // should already have been loaded into the cache and the parent locked EditableDocument parentDoc = documentStore.edit(parentDocKey.toString(), false); assert parentDoc != null; EditableArray bucketsIds = parentDoc.getArray(BUCKETS); if (bucketsIds == null || bucketsIds.isEmpty()) { return; } for (Object bucketId : bucketsIds) { String bucketKey = bucketKey(parentDocKey.toString(), bucketId.toString()); documentStore.remove(bucketKey); } }
protected void addFederatedSegment( EditableDocument document, String externalNodeKey, String name ) { EditableArray federatedSegmentsArray = document.getArray(FEDERATED_SEGMENTS); if (federatedSegmentsArray == null) { federatedSegmentsArray = Schematic.newArray(); document.set(FEDERATED_SEGMENTS, federatedSegmentsArray); } if (!StringUtil.isBlank(externalNodeKey)) { EditableDocument federatedSegment = DocumentFactory.newDocument(KEY, externalNodeKey, NAME, name); federatedSegmentsArray.add(federatedSegment); } }
protected void addFederatedSegment( EditableDocument document, String externalNodeKey, String name ) { EditableArray federatedSegmentsArray = document.getArray(FEDERATED_SEGMENTS); if (federatedSegmentsArray == null) { federatedSegmentsArray = Schematic.newArray(); document.set(FEDERATED_SEGMENTS, federatedSegmentsArray); } if (!StringUtil.isBlank(externalNodeKey)) { EditableDocument federatedSegment = DocumentFactory.newDocument(KEY, externalNodeKey, NAME, name); federatedSegmentsArray.add(federatedSegment); } }
private void insertParentWithChildren(String rootKey, int childrenForEachThread) { List<String> newKeys = IntStream.range(0, childrenForEachThread).mapToObj( nr -> UUID.randomUUID().toString()).collect(Collectors.toList()); newKeys.add(rootKey); runInTransaction(() -> { if (localStore.lockDocuments(newKeys.toArray(new String[newKeys.size()]))) { newKeys.remove(rootKey); EditableDocument rootDoc = localStore.edit(rootKey, false); EditableArray children = rootDoc.getArray("children"); newKeys.forEach(newKey -> { EditableDocument newChild = Schematic.newDocument("name", Thread.currentThread().getName(), "key", newKey); children.add(newChild); localStore.put(newKey, newChild); }); } else { fail("Should've obtained key by now"); } }); }
protected void removeFederatedSegments( EditableDocument federatedDocument, Set<String> externalNodeKeys ) { if (!federatedDocument.containsField(FEDERATED_SEGMENTS)) { return; } EditableArray federatedSegments = federatedDocument.getArray(FEDERATED_SEGMENTS); for (int i = 0; i < federatedSegments.size(); i++) { Object federatedSegment = federatedSegments.get(i); assert federatedSegment instanceof Document; String segmentKey = getKey((Document)federatedSegment); if (externalNodeKeys.contains(segmentKey)) { federatedSegments.remove(i); } } if (federatedSegments.isEmpty()) { federatedDocument.remove(FEDERATED_SEGMENTS); } }
protected void removeFederatedSegments( EditableDocument federatedDocument, Set<String> externalNodeKeys ) { if (!federatedDocument.containsField(FEDERATED_SEGMENTS)) { return; } EditableArray federatedSegments = federatedDocument.getArray(FEDERATED_SEGMENTS); for (int i = 0; i < federatedSegments.size(); i++) { Object federatedSegment = federatedSegments.get(i); assert federatedSegment instanceof Document; String segmentKey = getKey((Document)federatedSegment); if (externalNodeKeys.contains(segmentKey)) { federatedSegments.remove(i); } } if (federatedSegments.isEmpty()) { federatedDocument.remove(FEDERATED_SEGMENTS); } }
protected void persistBucketRemovalChanges( NodeKey parentKey, Map<BucketId, Set<NodeKey>> removalsPerBucket ) { EditableDocument parentDoc = documentStore.edit(parentKey.toString(), false); // for each bucket, get the corresponding document (locking it) and make the children changes for (Map.Entry<BucketId, Set<NodeKey>> entry : removalsPerBucket.entrySet()) { BucketId bucketId = entry.getKey(); Set<NodeKey> removalsFromBucket = entry.getValue(); String bucketIdString = bucketId.toString(); String bucketKey = bucketKey(parentKey.toString(), bucketIdString); // we don't worry about locking the bucket keys because the parent key should've already been locked, acting therefore // as a monitor for all the buckets EditableDocument bucketDoc = documentStore.edit(bucketKey, false); assert bucketDoc != null; for (NodeKey toRemove : removalsFromBucket) { // keys are stored directly in the bucket bucketDoc.remove(toRemove.toString()); } if (bucketDoc.isEmpty()) { documentStore.remove(bucketKey); parentDoc.getArray(BUCKETS).remove((Object)bucketIdString); } } }
protected void persistBucketRemovalChanges( NodeKey parentKey, Map<BucketId, Set<NodeKey>> removalsPerBucket ) { EditableDocument parentDoc = documentStore.edit(parentKey.toString(), false); // for each bucket, get the corresponding document (locking it) and make the children changes for (Map.Entry<BucketId, Set<NodeKey>> entry : removalsPerBucket.entrySet()) { BucketId bucketId = entry.getKey(); Set<NodeKey> removalsFromBucket = entry.getValue(); String bucketIdString = bucketId.toString(); String bucketKey = bucketKey(parentKey.toString(), bucketIdString); // we don't worry about locking the bucket keys because the parent key should've already been locked, acting therefore // as a monitor for all the buckets EditableDocument bucketDoc = documentStore.edit(bucketKey, false); assert bucketDoc != null; for (NodeKey toRemove : removalsFromBucket) { // keys are stored directly in the bucket bucketDoc.remove(toRemove.toString()); } if (bucketDoc.isEmpty()) { documentStore.remove(bucketKey); parentDoc.getArray(BUCKETS).remove((Object)bucketIdString); } } }
EditableArray array = urlProps.getArray(localName); for (Object value : values) { value = valueToDocument(value, null, null);
EditableArray array = urlProps.getArray(localName); for (Object value : values) { value = valueToDocument(value, null, null);
List<?> exprs = sequencerA.getArray(FieldName.PATH_EXPRESSIONS); assertThat(exprs.size(), is(1)); assertThat((String)exprs.get(0), is("default://(*.cnd)/jcr:content[@jcr:data]"));
@Test public void shouldNotSplitDocumentWithChildReferenceBlocksThatAreAlreadyTooSmall() throws Exception { NodeKey key = new NodeKey("source1works1-childB"); transactions().begin(); EditableDocument doc = workspaceCache.documentStore().edit(key.toString(), true); EditableArray children = doc.getArray(DocumentTranslator.CHILDREN); String nextBlock = doc.getDocument(DocumentTranslator.CHILDREN_INFO).getString(DocumentTranslator.NEXT_BLOCK); boolean changed = optimizer.splitChildren(key, doc, children, 100, 50, true, nextBlock); transactions().commit(); assertThat(changed, is(false)); }
@Test public void shouldMergeDocumentWithTooSmallChildReferencesSegmentInFirstBlock() throws Exception { NodeKey key = new NodeKey("source1works1-childB"); transactions().begin(); EditableDocument doc = workspaceCache.documentStore().edit(key.toString(), true); EditableArray children = doc.getArray(DocumentTranslator.CHILDREN); String nextBlock = doc.getDocument(DocumentTranslator.CHILDREN_INFO).getString(DocumentTranslator.NEXT_BLOCK); optimizer.mergeChildren(key, doc, children, true, nextBlock); transactions().commit(); // Refetch the document, which should no longer be segmented ... transactions().begin(); doc = workspaceCache.documentStore().edit(key.toString(), true); assertInfo(key.toString(), 2, null, null, true, 0); children = doc.getArray(DocumentTranslator.CHILDREN); transactions().commit(); assertThat(children.size(), is(2)); assertChildren(doc, name("childC"), name("childD")); print(false); print(doc); }
externalSources.getDocument("mock-source").getArray("projections").add("default:/projection4=> /doc1");
.getArray(RepositoryConfiguration.FieldName.PREDEFINED); predefinedWs.add("ws3"); predefinedWs.add("ws4");