private void enableMonitoring( boolean enableMonitoring, EditableDocument configDoc ) { EditableDocument monitoring = configDoc.getOrCreateDocument(FieldName.MONITORING); monitoring.set(FieldName.MONITORING_ENABLED, enableMonitoring); } }
protected void addSequencer( EditableDocument doc, String desc, String type, String... pathExpressions ) { EditableDocument sequencing = doc.getOrCreateDocument(FieldName.SEQUENCING); EditableDocument sequencers = sequencing.getOrCreateDocument(FieldName.SEQUENCERS); // Create the sequencer doc ... String name = desc; EditableDocument sequencer = Schematic.newDocument(); sequencer.set(FieldName.NAME, name); sequencer.set(FieldName.CLASSNAME, type); sequencer.setArray(FieldName.PATH_EXPRESSIONS, (Object[])pathExpressions); // Set it on the 'sequencers' doc ... sequencers.set(name, sequencer); }
@Override public DocumentWriter addPage( String parentId, String nextPageOffset, long blockSize, long totalChildCount ) { EditableDocument childrenInfo = document().getOrCreateDocument(DocumentTranslator.CHILDREN_INFO); childrenInfo.setNumber(DocumentTranslator.COUNT, totalChildCount); childrenInfo.setNumber(DocumentTranslator.BLOCK_SIZE, blockSize); PageKey pageKey = new PageKey(parentId, nextPageOffset, blockSize); childrenInfo.setString(DocumentTranslator.NEXT_BLOCK, pageKey.toString()); return this; }
private void parseClustering(String clusterName, String clusterConfig, String clusterLocking, EditableDocument configDoc) { EditableDocument clustering = configDoc.getOrCreateDocument(FieldName.CLUSTERING); clustering.setString(FieldName.CLUSTER_NAME, clusterName); if (!StringUtil.isBlank(clusterConfig)) { clustering.setString(FieldName.CLUSTER_CONFIGURATION, clusterConfig); } if (!StringUtil.isBlank(clusterLocking)) { clustering.setString(FieldName.CLUSTER_LOCKING, clusterLocking); } }
Set<BinaryKey> usedBinaryKeys) { EditableDocument properties = document.getDocument(PROPERTIES); if (properties == null) { properties = document.setDocument(PROPERTIES); EditableDocument urlProps = properties.getDocument(namespaceUri); if (urlProps == null) { urlProps = properties.setDocument(namespaceUri); Object oldValue = urlProps.get(localName); decrementBinaryReferenceCount(oldValue, unusedBinaryKeys, usedBinaryKeys); urlProps.setArray(localName); } else if (property.isMultiple()) { EditableArray values = Schematic.newArray(property.size()); values.add(valueToDocument(v, unusedBinaryKeys, usedBinaryKeys)); urlProps.setArray(localName, values); } else { assert property.isSingle(); Object value = valueToDocument(property.getFirstValue(), unusedBinaryKeys, usedBinaryKeys); if (value == null) { urlProps.remove(localName); } else { urlProps.set(localName, value);
rootDoc.setArray(DocumentTranslator.CHILDREN, Schematic.newArray(systemChildRefDoc)); EditableDocument childInfo = rootDoc.getOrCreateDocument(DocumentTranslator.CHILDREN_INFO); childInfo.setNumber(DocumentTranslator.COUNT, 1); String parent = systemDoc.getString(DocumentTranslator.PARENT); EditableArray parents = systemDoc.getOrCreateArray(DocumentTranslator.PARENT); if (parent != null) { parents.add(parent);
private EditableDocument parseWorkspaces(OperationContext context, ModelNode model, EditableDocument configDoc, List<String> additionalClasspathEntries) throws OperationFailedException { EditableDocument workspacesDoc = configDoc.getOrCreateDocument(FieldName.WORKSPACES); boolean allowWorkspaceCreation = attribute(context, model, ModelAttributes.ALLOW_WORKSPACE_CREATION).asBoolean(); String defaultWorkspaceName = attribute(context, model, ModelAttributes.DEFAULT_WORKSPACE).asString(); workspacesDoc.set(FieldName.ALLOW_CREATION, allowWorkspaceCreation); workspacesDoc.set(FieldName.DEFAULT, defaultWorkspaceName); if (model.hasDefined(ModelKeys.WORKSPACES_CACHE_SIZE)) { workspacesDoc.set(FieldName.WORKSPACE_CACHE_SIZE, model.get(ModelKeys.WORKSPACES_CACHE_SIZE).asInt()); } if (model.hasDefined(ModelKeys.PREDEFINED_WORKSPACE_NAMES)) { for (ModelNode name : model.get(ModelKeys.PREDEFINED_WORKSPACE_NAMES).asList()) { workspacesDoc.getOrCreateArray(FieldName.PREDEFINED).add(name.asString()); } if (model.hasDefined(ModelKeys.WORKSPACES_INITIAL_CONTENT)) { EditableDocument initialContentDocument = workspacesDoc.getOrCreateDocument(FieldName.INITIAL_CONTENT); List<ModelNode> workspacesInitialContent = model.get(ModelKeys.WORKSPACES_INITIAL_CONTENT).asList(); for (ModelNode initialContent : workspacesInitialContent) { Property initialContentProperty = initialContent.asProperty(); initialContentDocument.set(initialContentProperty.getName(), initialContentProperty.getValue().asString()); } } } if (model.hasDefined(ModelKeys.DEFAULT_INITIAL_CONTENT)) { EditableDocument initialContentDocument = workspacesDoc.getOrCreateDocument(FieldName.INITIAL_CONTENT); initialContentDocument.set(FieldName.DEFAULT_INITIAL_CONTENT, model.get(ModelKeys.DEFAULT_INITIAL_CONTENT).asString()); } return workspacesDoc; }
EditableDocument properties = document.getDocument(PROPERTIES); if (properties == null) { EditableDocument urlProps = properties.getDocument(namespaceUri); if (urlProps == null) { Object propValue = urlProps.get(localName); decrementBinaryReferenceCount(propValue, unusedBinaryKeys, usedBinaryKeys); EditableArray array = urlProps.getArray(localName); for (Object value : values) { value = valueToDocument(value, null, null); if (value.equals(propValue)) { urlProps.remove(localName); break; if (urlProps.isEmpty()) { properties.remove(namespaceUri);
EditableDocument referrers = document.getDocument(REFERRERS); List<NodeKey> strongAdded = changes.getAddedReferrers(ReferenceType.STRONG); List<NodeKey> weakAdded = changes.getAddedReferrers(ReferenceType.WEAK); referrers = document.setDocument(REFERRERS); if (!strongAdded.isEmpty()) { Set<NodeKey> strongAddedSet = new HashSet<NodeKey>(strongAdded); EditableDocument strong = referrers.setDocument(STRONG); for (NodeKey key : strongAddedSet) { strong.set(key.toString(), Collections.frequency(strongAdded, key)); EditableDocument weak = referrers.setDocument(WEAK); for (NodeKey key : weakAddedSet) { weak.set(key.toString(), Collections.frequency(weakAdded, key)); Map<NodeKey, Integer> strongCount = computeReferrersCountDelta(strongAdded, strongRemoved); if (!strongCount.isEmpty()) { EditableDocument strong = referrers.getOrCreateDocument(STRONG); updateReferrers(strong, strongCount); Map<NodeKey, Integer> weakCount = computeReferrersCountDelta(weakAdded, weakRemoved); if (!weakCount.isEmpty()) { EditableDocument weak = referrers.getOrCreateDocument(WEAK); updateReferrers(weak, weakCount);
config.getOrCreateDocument(FieldName.STORAGE).setDocument(FieldName.BINARY_STORAGE, binaryConfig); if (config.containsField(FieldName.JOURNALING)) { if (StringUtil.isBlank(this.journalRelativeTo)) { this.journalRelativeTo = getDataDirectoryPathInjector().getValue(); config.getDocument(FieldName.JOURNALING).setString(FieldName.JOURNAL_LOCATION, finalJournalLocation);
EditableArray children = document.getArray(CHILDREN); if (children == null) { EditableDocument info = document.getDocument(CHILDREN_INFO); boolean selfContained = true; if (info != null) { selfContained = !info.containsField(NEXT_BLOCK); NodeKey docKey = key; while (doc != null) { EditableDocument docInfo = doc.getDocument(CHILDREN_INFO); String nextKey = docInfo != null ? docInfo.getString(NEXT_BLOCK) : null; children = doc.getArray(CHILDREN); int count = children.size(); boolean isFirst = doc == document; info.setString(LAST_BLOCK, docKey.toString());
Integer bucketIdLength = parentDoc.getInteger(BUCKET_ID_LENGTH); assert bucketIdLength != null; String parentKey = getKey(parentDoc); bucketDoc.setString(key, name); parentDoc.getOrCreateArray(BUCKETS).add(bucketId.toString()); Long currentSize = parentDoc.getLong(SIZE); if (currentSize == null) { parentDoc.setNumber(SIZE, totalAdditions); } else { parentDoc.setNumber(SIZE, currentSize + totalAdditions);
EditableDocument configDoc ) throws OperationFailedException { if (model.hasDefined(ModelKeys.JOURNALING)) { EditableDocument journaling = configDoc.getOrCreateDocument(FieldName.JOURNALING); journaling.setBoolean(FieldName.JOURNAL_ENABLED, enabled); journaling.setNumber(FieldName.MAX_DAYS_TO_KEEP_RECORDS, maxDaysToKeepRecords); journaling.setBoolean(FieldName.ASYNC_WRITES_ENABLED, asyncWrites); journaling.setString(FieldName.THREAD_POOL, gcThreadPool); journaling.setString(FieldName.INITIAL_TIME, gcInitialTime);
private void updateReferrers( EditableDocument owningDocument, Map<NodeKey, Integer> referrersCountDelta ) { for (NodeKey strongKey : referrersCountDelta.keySet()) { int newCount = referrersCountDelta.get(strongKey); String keyString = strongKey.toString(); Integer existingCount = (Integer)owningDocument.get(keyString); if (existingCount != null) { int actualCount = existingCount + newCount; if (actualCount <= 0) { owningDocument.remove(keyString); } else { owningDocument.set(keyString, actualCount); } } else if (newCount > 0) { owningDocument.set(keyString, newCount); } } }
EditableDocument childrenInfo = writer.document().getDocument(DocumentTranslator.CHILDREN_INFO); if (childrenInfo != null) { String nextBlockKey = childrenInfo.getString(DocumentTranslator.NEXT_BLOCK); if (!StringUtil.isBlank(nextBlockKey)) { childrenInfo.setString(DocumentTranslator.NEXT_BLOCK, documentIdToNodeKeyString(sourceName, nextBlockKey)); String lastBlockKey = childrenInfo.getString(DocumentTranslator.LAST_BLOCK); if (!StringUtil.isBlank(lastBlockKey)) { childrenInfo.setString(DocumentTranslator.LAST_BLOCK, documentIdToNodeKeyString(sourceName, lastBlockKey));
EditableDocument extractors = textExtracting.getOrCreateDocument(pathToContainer.get(2)); for (String configuredExtractorName : extractors.keySet()) { EditableDocument extractor = (EditableDocument)extractors.get(configuredExtractorName); extractor.set(fieldName, rawValue); break;
EditableDocument sequencers = sequencing.getOrCreateArray(pathToContainer.get(1)); for (String configuredSequencerName : sequencers.keySet()) { EditableDocument sequencer = (EditableDocument)sequencers.get(configuredSequencerName); sequencer.set(fieldName, rawValue); break;
@Test public void shouldNotSplitDocumentWithChildReferenceBlocksThatAreAlreadyTooSmall() throws Exception { NodeKey key = new NodeKey("source1works1-childB"); transactions().begin(); EditableDocument doc = workspaceCache.documentStore().edit(key.toString(), true); EditableArray children = doc.getArray(DocumentTranslator.CHILDREN); String nextBlock = doc.getDocument(DocumentTranslator.CHILDREN_INFO).getString(DocumentTranslator.NEXT_BLOCK); boolean changed = optimizer.splitChildren(key, doc, children, 100, 50, true, nextBlock); transactions().commit(); assertThat(changed, is(false)); }
private void doUpgrade(final Upgrades.Context resources) { try { LOGGER.debug("Upgrading repository '{0}'", name); lastUpgradeId = upgrades.applyUpgradesSince(lastUpgradeId, resources); LOGGER.debug("Recording upgrade completion in repository '{0}'", name); LocalDocumentStore store = documentStore().localStore(); EditableDocument editor = store.edit(REPOSITORY_INFO_KEY, true); DateTime now = context().getValueFactories().getDateFactory().create(); editor.setDate(REPOSITORY_UPGRADED_AT_FIELD_NAME, now.toDate()); editor.setNumber(REPOSITORY_UPGRADE_ID_FIELD_NAME, lastUpgradeId); editor.remove(REPOSITORY_UPGRADER_FIELD_NAME); LOGGER.debug("Repository '{0}' is fully upgraded", name); } catch (Throwable err) { // We do NOT want an error during upgrade to prevent the repository from coming online. // Therefore, we need to catch any exceptions here an log them, but continue ... logger.error(err, JcrI18n.failureDuringUpgradeOperation, getName(), err); resources.getProblems().addError(err, JcrI18n.failureDuringUpgradeOperation, getName(), err); } }