public Scope convert(AvroScope scope) { return scope == null ? null : Scope.values()[scope.ordinal()]; }
private Map<QName, Object> filterMutableFields(Map<QName, Object> fields, FieldTypes fieldTypes) throws RecordException, TypeException, InterruptedException { Map<QName, Object> mutableFields = new HashMap<QName, Object>(); for (Entry<QName, Object> field : fields.entrySet()) { FieldType fieldType = fieldTypes.getFieldType(field.getKey()); if (Scope.VERSIONED_MUTABLE.equals(fieldType.getScope())) { mutableFields.put(field.getKey(), field.getValue()); } } return mutableFields; }
private FieldType extractFieldType(SchemaId id, Result result) throws RepositoryException, InterruptedException { NavigableMap<byte[], byte[]> nonVersionableColumnFamily = result.getFamilyMap(TypeCf.DATA.bytes); QName name; name = decodeName(nonVersionableColumnFamily.get(TypeColumn.FIELDTYPE_NAME.bytes)); ValueType valueType = decodeValueType(nonVersionableColumnFamily.get(TypeColumn.FIELDTYPE_VALUETYPE.bytes)); Scope scope = Scope.valueOf(Bytes.toString(nonVersionableColumnFamily.get(TypeColumn.FIELDTYPE_SCOPE.bytes))); return new FieldTypeImpl(id, valueType, name, scope); }
public AvroScope convert(Scope scope) { return scope == null ? null : AvroScope.values()[scope.ordinal()]; }
public static ObjectNode toJson(FieldType fieldType, Namespaces namespaces, boolean includeName) { ObjectNode fieldNode = JsonNodeFactory.instance.objectNode(); fieldNode.put("id", fieldType.getId().toString()); if (includeName) { fieldNode.put("name", QNameConverter.toJson(fieldType.getName(), namespaces)); } fieldNode.put("scope", fieldType.getScope().toString().toLowerCase()); fieldNode.put("valueType", ValueTypeNSConverter.toJson(fieldType.getValueType().getName(), namespaces)); return fieldNode; } }
put.add(TypeCf.DATA.bytes, TypeColumn.FIELDTYPE_VALUETYPE.bytes, encodeValueType(fieldType.getValueType())); put.add(TypeCf.DATA.bytes, TypeColumn.FIELDTYPE_SCOPE.bytes, Bytes .toBytes(fieldType.getScope().name())); put.add(TypeCf.DATA.bytes, TypeColumn.FIELDTYPE_NAME.bytes, nameBytes);
public static Map<Scope, Set<FieldType>> getFieldTypeAndScope(Set<SchemaId> fieldIds, FieldFilter fieldFilter, TypeManager typeManager) throws RepositoryException, InterruptedException { // Could be written more elegantly using Multimaps.index, but we want to limit dependencies Map<Scope, Set<FieldType>> result = new EnumMap<Scope, Set<FieldType>>(Scope.class); for (Scope scope : Scope.values()) { result.put(scope, new HashSet<FieldType>()); } for (SchemaId fieldId : fieldIds) { FieldType fieldType; try { fieldType = typeManager.getFieldTypeById(fieldId); } catch (FieldTypeNotFoundException e) { // A field whose field type does not exist: skip it continue; } if (fieldFilter.accept(fieldType)) { result.get(fieldType.getScope()).add(fieldType); } } return result; }
return false; } else if (!scope.equals(other.scope)) { return false;
matchScopes = EnumSet.noneOf(Scope.class); for (String scope : COMMA_SPLITTER.split(matchScopeAttr)) { matchScopes.add(Scope.valueOf(scope));
private void setRecordTypesAfterUpdate(Record record, Record originalRecord, Set<Scope> changedScopes) { // The returned record object after an update should always contain complete record type information for // all the scopes for (Scope scope : Scope.values()) { // For any unchanged or non-existing scope, we reset the record type information to the one of the // original record, so that the returned record object corresponds to the repository state (= same // as when one would do a fresh read) // // Copy over the original record type of a scope if: // - the scope was unchanged. If it was changed, the record type will already have been filled in // by calculateRecordChanges. // - for the non-versioned scope, only copy it over if none of the scopes changed, because the // record type of the non-versioned scope is always brought up to date in case any scope is changed if (!changedScopes.contains(scope) && (scope != Scope.NON_VERSIONED || changedScopes.isEmpty())) { record.setRecordType(scope, originalRecord.getRecordTypeName(scope), originalRecord.getRecordTypeVersion(scope)); } } }
if (Scope.NON_VERSIONED.equals(scope) || (mutableUpdate && Scope.VERSIONED_MUTABLE.equals(scope))) { if (originalValue != null) { Set<BlobReference> previouslyReferencedBlobs = getReferencedBlobs(fieldType, originalValue); if (Scope.NON_VERSIONED.equals(scope)) { fieldValueWriter.addFieldValue(fieldType, newValue, metadata, 1L); } else {
Map<QName, Object> undeterminedFields = new TreeMap<QName, Object>(QNAME_COMP); for (Scope scope : Scope.values()) { fieldsByScope.put(scope, new TreeMap<QName, Object>(QNAME_COMP));
if (Scope.NON_VERSIONED.equals(scope)) {
public static final void writeIdRecord(IdRecord record, DataOutput output, LRepository repository) throws RepositoryException, InterruptedException { write(record, output, repository); output.writeVInt(record.getFieldIdToNameMapping().size()); for (Map.Entry<SchemaId, QName> entry : record.getFieldIdToNameMapping().entrySet()) { writeBytes(entry.getKey().getBytes(), output); writeQName(entry.getValue(), output); } for (Scope scope : Scope.values()) { SchemaId schemaId = record.getRecordTypeId(scope); writeNullOrBytes(schemaId != null ? schemaId.getBytes() : null, output); } }
private void checkImmutableFieldsCorrespond(FieldType userFieldType, FieldType latestFieldType) throws FieldTypeUpdateException { if (!userFieldType.getValueType().equals(latestFieldType.getValueType())) { throw new FieldTypeUpdateException("Changing the valueType of a fieldType '" + latestFieldType.getId() + "' (current name: " + latestFieldType.getName() + ") is not allowed; old '" + latestFieldType.getValueType() + "' new '" + userFieldType.getValueType() + "'"); } if (!userFieldType.getScope().equals(latestFieldType.getScope())) { throw new FieldTypeUpdateException("Changing the scope of a fieldType '" + latestFieldType.getId() + "' (current name: " + latestFieldType.getName() + ") is not allowed; old '" + latestFieldType.getScope() + "' new '" + userFieldType.getScope() + "'"); } }
@Override public IdRecord readWithIds(RecordId recordId, Long aLong, List<SchemaId> schemaIds) throws RepositoryException, InterruptedException { Record record = getRecord(recordId); TypeManager typeManager = this.getTypeManager(); Map<SchemaId, QName> map = Maps.newHashMap(); for (QName qname : record.getFields().keySet()) { map.put(typeManager.getFieldTypeByName(qname).getId(), qname); } Map<Scope,SchemaId> recordTypeIds = Maps.newHashMap(); for (Scope scope : Scope.values()) { RecordType recordType = typeManager.getRecordTypeByName(record.getRecordTypeName(scope), record.getVersion()); if (recordType != null) { recordTypeIds.put(scope, recordType.getId()); } } IdRecord idRecord = new IdRecordImpl(record, map, recordTypeIds); return idRecord; }
if (!oldScope.equals(newScope)) { return ImportResult.conflict("scope", oldScope, newScope);
public static final IdRecord readIdRecord(DataInput input, LRepository repository) throws RepositoryException, InterruptedException { Record record = read(input, repository); IdGenerator idGenerator = repository.getIdGenerator(); int size = input.readVInt(); Map<SchemaId, QName> idToQNameMapping = new HashMap<SchemaId, QName>(); for (int i = 0; i < size; i++) { byte[] schemaIdBytes = readBytes(input); QName name = readQName(input); SchemaId schemaId = idGenerator.getSchemaId(schemaIdBytes); idToQNameMapping.put(schemaId, name); } Map<Scope, SchemaId> recordTypeIds = new EnumMap(Scope.class); for (Scope scope : Scope.values()) { byte[] schemaIdBytes = readNullOrBytes(input); if (schemaIdBytes != null) { SchemaId schemaId = idGenerator.getSchemaId(schemaIdBytes); recordTypeIds.put(scope, schemaId); } } return new IdRecordImpl(record, idToQNameMapping, recordTypeIds); }
for (Scope scope : Scope.values()) { fieldTypeEntriesByScope.put(scope, new ArrayList<Pair<FieldTypeEntry, FieldType>>());
for (Scope scope : Scope.values()) { writeNullOrQName(record.getRecordTypeName(scope), output); writeNullOrVLong(record.getRecordTypeVersion(scope), output);