@Override public <T> T read(byte[] data) throws RepositoryException, InterruptedException { return read(new DataInputImpl(data)); }
@Override public RecordId fromBytes(byte[] bytes) { return fromBytes(new DataInputImpl(bytes)); }
DerefMapVariantPropertiesPattern deserializeVariantPropertiesPattern(byte[] serialized) { return deserializeVariantPropertiesPattern(new DataInputImpl(serialized)); }
public String toString() { if (string == null) { if (uuid == null) { DataInput dataInput = new DataInputImpl(bytes); this.uuid = new UUID(dataInput.readLong(), dataInput.readLong()); } this.string = uuid.toString(); } return string; }
public static Set<String> deserialize(byte[] stringsAsBytes) { Set<String> permissions = new HashSet<String>(); DataInput input = new DataInputImpl(stringsAsBytes); int permCount = input.readVInt(); for (int i = 0; i < permCount; i++) { permissions.add(input.readVUTF()); } return permissions; } }
/** * * @param clearBytes should be false for read operations, true for write operations. * The idea is that as long as the record is not modified, the * existing bytes can be reused. */ private synchronized void decode(boolean clearBytes) { if (delegate == null) { try { delegate = (IdRecord)recordValueType.read(new DataInputImpl(bytes)); } catch (RepositoryException e) { throw new RuntimeException("Failed to decode record "); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } if (clearBytes) { bytes = null; } }
public Record convertRecord(ByteBuffer recordData, LRepository repository) throws RepositoryException, InterruptedException { return RecordAsBytesConverter.read(new DataInputImpl(asArray(recordData)), repository); }
@Override public DataInput[] splitInMasterAndVariant(DataInput dataInput) { // Search for separator byte int sepPos = dataInput.indexOf(VARIANT_SEPARATOR); if (sepPos == -1) { return new DataInput[]{dataInput, null}; } else { DataInput keyInput = new DataInputImpl(((DataInputImpl) dataInput), dataInput.getPosition(), sepPos - dataInput.getPosition()); DataInput variantInput = new DataInputImpl(((DataInputImpl) dataInput), sepPos + 1, dataInput.getSize() - (sepPos + 1)); return new DataInput[]{keyInput, variantInput}; } }
public IdRecord convertIdRecord(ByteBuffer avroIdRecord, LRepository repository) throws RepositoryException, InterruptedException { return RecordAsBytesConverter.readIdRecord(new DataInputImpl(asArray(avroIdRecord)), repository); }
public static AuthorizationContext deserialiaze(byte[] data) { DataInput input = new DataInputImpl(data); String name = input.readVUTF(); String tenant = input.readVUTF(); Set<String> roles = new HashSet<String>(); int roleCnt = input.readVInt(); for (int i = 0; i < roleCnt; i++) { roles.add(input.readVUTF()); } return new AuthorizationContext(name, tenant, roles); } }
public static QName decodeName(byte[] bytes) { DataInput dataInput = new DataInputImpl(bytes); String namespace = dataInput.readUTF(); String name = dataInput.readUTF(); return new QName(namespace, name); }
public boolean filterRowKey(byte[] buffer, int offset, int length) { // note: return value true means it is NOT a result of the scanner, false otherwise if (buffer == null) { return true; } final RecordId recordId = idGenerator.fromBytes(new DataInputImpl(buffer, offset, length)); final SortedMap<String, String> recordVariantProperties = recordId.getVariantProperties(); // check if the record has all expected variant properties if (containsAllExpectedDimensions(recordVariantProperties) && hasSameValueForValuedDimensions(recordVariantProperties)) { // check if the record doesn't have other variant properties return variantProperties.size() != recordVariantProperties.size(); } else { return true; } }
@Override public DataInput[] splitInMasterAndVariant(DataInput dataInput) { if (dataInput.getSize() - dataInput.getPosition() > UUID_LENGTH) { DataInput keyInput = new DataInputImpl(((DataInputImpl) dataInput), dataInput.getPosition(), dataInput.getPosition() + 16); DataInput variantInput = new DataInputImpl(((DataInputImpl) dataInput), dataInput.getPosition() + 16, dataInput.getSize() - 17); return new DataInput[]{keyInput, variantInput}; } else { return new DataInput[]{dataInput, null}; } }
private ValueType decodeValueType(byte[] bytes) throws RepositoryException, InterruptedException { DataInput dataInput = new DataInputImpl(bytes); if (valueTypeEncodingVersion != dataInput.readByte()) { throw new TypeException("Unknown value type encoding version encountered in schema"); } return getValueType(dataInput.readUTF()); }
decodedValue = Fields.DELETED; } else { decodedValue = fieldType.getValueType().read(new DataInputImpl(EncodingUtil.stripPrefix(value)));
Set<DependencyEntry> deserializeDependenciesForward(byte[] serialized) throws IOException { final DataInputImpl dataInput = new DataInputImpl(serialized); final int nDependencies = dataInput.readInt(); final Set<DependencyEntry> result = new HashSet<DependencyEntry>(nDependencies); while (result.size() < nDependencies) { final int tableLength = dataInput.readInt(); final String table = Bytes.toString(dataInput.readBytes(tableLength)); final int masterBytesLength = dataInput.readInt(); final byte[] masterBytes = dataInput.readBytes(masterBytesLength); final DerefMapVariantPropertiesPattern variantPropertiesPattern = deserializeVariantPropertiesPattern(dataInput); result.add(new DependencyEntry(new AbsoluteRecordIdImpl(table, idGenerator.newRecordId(idGenerator.fromBytes(masterBytes), variantPropertiesPattern.getConcreteProperties())), variantPropertiesPattern.getPatternProperties())); } return result; }
private ExtractedField extractField(byte[] key, byte[] prefixedValue, ReadContext context, FieldTypes fieldTypes) throws RepositoryException, InterruptedException { byte flags = prefixedValue[0]; if (FieldFlags.isDeletedField(flags)) { return null; } FieldType fieldType = fieldTypes.getFieldType(new SchemaIdImpl(Bytes.tail(key, key.length - 1))); if (context != null) { context.addFieldType(fieldType); } ValueType valueType = fieldType.getValueType(); Metadata metadata = null; int metadataSpace = 0; // space taken up by metadata (= metadata itself + length suffix) int metadataEncodingVersion = FieldFlags.getFieldMetadataVersion(flags); if (metadataEncodingVersion == 0) { // there is no metadata } else if (metadataEncodingVersion == 1) { int metadataSize = Bytes.toInt(prefixedValue, prefixedValue.length - Bytes.SIZEOF_INT, Bytes.SIZEOF_INT); metadataSpace = metadataSize + Bytes.SIZEOF_INT; metadata = MetadataSerDeser.read( new DataInputImpl(prefixedValue, prefixedValue.length - metadataSpace, metadataSize)); } else { throw new RuntimeException("Unsupported field metadata encoding version: " + metadataEncodingVersion); } Object value = valueType.read(new DataInputImpl(prefixedValue, FieldFlags.SIZE_OF_FIELD_FLAGS, prefixedValue.length - FieldFlags.SIZE_OF_FIELD_FLAGS - metadataSpace)); return new ExtractedField(fieldType, value, metadata); }
public static Link decodeLink(DataInput dataInput, IdGenerator idGenerator) { // Format: see toBytes. int recordIdLength = dataInput.readInt(); byte[] recordIdBytes = null; if (recordIdLength > 0) { recordIdBytes = dataInput.readBytes(recordIdLength); } String args = dataInput.readUTF(); if (recordIdLength == 0 && args == null) { return new Link(); } Link.LinkBuilder builder = Link.newBuilder(); if (recordIdLength > 0) { RecordId id = decode(new DataInputImpl(recordIdBytes), idGenerator); builder.recordId(id); } if (args != null && args.length() > 0) { argsFromString(args, builder, args /* does not matter, should never be invalid */); } return builder.create(); }
int masterRecordIdLength = dataInput.readInt(); DataInput masterRecordIdInput = new DataInputImpl((DataInputImpl)dataInput, position, masterRecordIdLength); RecordId masterRecordId = decode(masterRecordIdInput, idGenerator); dataInput.setPosition(masterRecordIdLength);