/** * Returns the type of the configured binary store. * * @return the type of the configured binary store, never {@code null} */ public String getType() { return binaryStorage.getString(FieldName.TYPE, FieldValue.BINARY_STORAGE_TYPE_TRANSIENT); }
private boolean propertyAsBoolean(Document document, String propertyName, boolean defaultValue) { Object value = document.get(propertyName); if (value == null) { return defaultValue; } else if (value instanceof Boolean) { return (Boolean) value; } else { return Boolean.valueOf(value.toString()); } } }
/** * Get the optimization interval in hours. * * @return the interval; never null */ public int getIntervalInHours() { return optimization.getInteger(FieldName.INTERVAL_IN_HOURS, Default.OPTIMIZATION_INTERVAL_IN_HOURS); }
public boolean isCreatingWorkspacesAllowed() { Document workspaces = doc.getDocument(FieldName.WORKSPACES); if (workspaces != null) { return workspaces.getBoolean(FieldName.ALLOW_CREATION, Default.ALLOW_CREATION); } return Default.ALLOW_CREATION; }
public int getWorkspaceCacheSize() { Document storage = doc.getDocument(FieldName.WORKSPACES); if (storage != null) { return storage.getInteger(FieldName.WORKSPACE_CACHE_SIZE, Default.WORKSPACE_CACHE_SIZE); } return Default.WORKSPACE_CACHE_SIZE; }
/** * Get the name of the workspace that should be used for sessions where the client does not specify the name of the workspace. * * @return the default workspace name; never null */ public String getDefaultWorkspaceName() { Document workspaces = doc.getDocument(FieldName.WORKSPACES); if (workspaces != null) { return workspaces.getString(FieldName.DEFAULT, Default.DEFAULT); } return Default.DEFAULT; }
public ChildReferencesInfo getChildReferencesInfo( Document document ) { // Now look at the 'childrenInfo' document for info about the next block ... Document childrenInfo = document.getDocument(CHILDREN_INFO); if (childrenInfo != null) { long totalSize = childrenInfo.getLong(COUNT, 0L); long blockSize = childrenInfo.getLong(BLOCK_SIZE, 0L); String nextBlockKey = childrenInfo.getString(NEXT_BLOCK); String lastBlockKey = childrenInfo.getString(LAST_BLOCK, nextBlockKey); return new ChildReferencesInfo(totalSize, blockSize, nextBlockKey, lastBlockKey); } return null; }
protected void assertMatch( Document doc1, Document doc2 ) { assertEquals(doc1.size(), doc2.size()); for (Document.Field field1 : doc1.fields()) { if (field1.getValue() instanceof Document) { assertMatch(field1.getValueAsDocument(), doc2.getDocument(field1.getName())); } else { Object value2 = doc2.get(field1.getName()); assertEquals(field1.getValue(), value2); } } }
@Override public LinkedHashMap<String, Name> getChildrenMap() { LinkedHashMap<String, Name> children = new LinkedHashMap<String, Name>(); if (!federatedDocument.containsField(DocumentTranslator.CHILDREN)) { return children; } List<?> childrenArray = federatedDocument.getArray(DocumentTranslator.CHILDREN); for (Object child : childrenArray) { assert child instanceof Document; Document childDocument = (Document)child; String childId = translator.getKey(childDocument); Name childName = translator.getNameFactory().create(childDocument.get(DocumentTranslator.NAME)); children.put(childId, childName); } return children; }
@Override public Document execute( Repository repository, Git git, CallSpecification spec, DocumentWriter writer, Values values ) { return root.clone(); // return a copy } }
protected BucketedChildReferences( Document parent, DocumentTranslator translator ) { // the power of 16 which indicates how many buckets this.bucketIdLength = parent.getInteger(DocumentConstants.BUCKET_ID_LENGTH); Long size = parent.getLong(DocumentConstants.SIZE); this.size = size != null ? size : 0; this.parentKey = translator.getKey(parent); this.translator = translator; List<?> bucketsArray = parent.getArray(DocumentConstants.BUCKETS); if (bucketsArray == null) { this.bucketIds = Collections.emptySet(); } else { this.bucketIds = new HashSet<>(bucketsArray.size()); for (Object bucketId : bucketsArray) { this.bucketIds.add(new BucketId(bucketId.toString())); } } this.rangeBucketsById = new LinkedHashMap<>(bucketIds.size()); }
/** * Get whether the reindexing should be done synchronously or asynchronously * * @return {@code true} if the reindexing should be performed asynchronously, {@code false} otherwise */ public boolean isAsync() { return reindexing == null || reindexing.getBoolean(FieldName.REINDEXING_ASYNC, true); }
/** * Get the configuration information for the anonymous authentication provider. * * @return the anonymous provider configuration information; null if anonymous users are not allowed */ public AnonymousSecurity getAnonymous() { Document anonymous = security.getDocument(FieldName.ANONYMOUS); if (anonymous != null && anonymous.size() == 1) { // Check the 'roleNames' field ... List<?> roles = anonymous.getArray(FieldName.ANONYMOUS_ROLES); if (roles != null && roles.isEmpty()) { // Specified empty roles, so this is disabling anonymous logins ... return null; } } if (anonymous == null) anonymous = Schematic.newDocument(); return new AnonymousSecurity(anonymous); }
protected Properties datasourceConfig() { Properties datasourceCfg = new Properties(); EditableDocument localCopy = config.edit(true); // remove the generic configuration fields ALL_FIELDS.forEach(localCopy::remove); // convert each of the properties to their Hikari names datasourceCfg.setProperty("jdbcUrl", connectionUrl); datasourceCfg.setProperty("driverClassName", config.getString(DRIVER, DEFAULT_DRIVER)); datasourceCfg.setProperty("username", config.getString(USERNAME, DEFAULT_USERNAME)); datasourceCfg.setProperty("password", config.getString(PASSWORD, DEFAULT_PASSWORD)); datasourceCfg.setProperty("maximumPoolSize", propertyAsString(config, POOL_SIZE, DEFAULT_MAX_POOL_SIZE)); datasourceCfg.setProperty("minimumIdle", DEFAULT_MIN_IDLE); datasourceCfg.setProperty("idleTimeout", DEFAULT_IDLE_TIMEOUT); // pass all the other fields as they are (this will also overwrite any of the previous values if they are explicitly configured) localCopy.fields().forEach(field -> datasourceCfg.setProperty(field.getName(), field.getValue().toString())); return datasourceCfg; }
protected void write( Document bson, Writer writer ) throws IOException { writer.append('{').append(' '); Iterator<Field> iter = bson.fields().iterator(); if (iter.hasNext()) { write(iter.next(), writer); writer.append(' '); while (iter.hasNext()) { writer.append(',').append(' '); write(iter.next(), writer); writer.append(' '); } } writer.append('}'); }
/** * Returns the value of the CONTENT document from a given entry document. * * @param entryDocument a {@link Document} instance representing a schematic entry. * @return a {@link Document} or {@code null} if there is no {@link org.modeshape.schematic.SchematicEntry.FieldName#CONTENT} * document. */ static Document content(Document entryDocument) { return entryDocument.getDocument(FieldName.CONTENT); }
@Test public void shouldPutIfAbsent() throws Exception { SchematicEntry entry = writeSingleEntry(); EditableDocument editableDocument = entry.content().edit(true); editableDocument.setNumber(VALUE_FIELD, 100); SchematicEntry updatedEntry = simulateTransaction(() -> db.putIfAbsent(entry.id(), entry.content())); assertNotNull(updatedEntry); assertEquals(1, (int) updatedEntry.content().getInteger(VALUE_FIELD)); SchematicEntry newEntry = SchematicEntry.create(UUID.randomUUID().toString(), DEFAULT_CONTENT); assertNull(simulateTransaction(() -> db.putIfAbsent(newEntry.id(), newEntry.content()))); updatedEntry = db.getEntry(newEntry.id()); assertNotNull(updatedEntry); }
/** * Returns a list with the cnd files which should be loaded at startup. * * @return a {@code non-null} string list */ public List<String> getNodeTypes() { List<String> result = new ArrayList<String>(); List<?> configuredNodeTypes = doc.getArray(FieldName.NODE_TYPES); if (configuredNodeTypes != null) { for (Object configuredNodeType : configuredNodeTypes) { result.add(configuredNodeType.toString()); } } return result; }
@Override public List<Document> getChildren() { List<Document> children = new ArrayList<Document>(); if (!federatedDocument.containsField(DocumentTranslator.CHILDREN)) { return children; } List<?> childrenArray = federatedDocument.getArray(DocumentTranslator.CHILDREN); for (Object child : childrenArray) { assert child instanceof Document; children.add((Document)child); } return children; }