public ByteBuffer convert(RecordId recordId) { if (recordId == null) { return null; } return ByteBuffer.wrap(recordId.toBytes()); }
@Override public byte[] toBytes() { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); DataOutput dataOutput = new DataOutputStream(byteArrayOutputStream); byte[] tableBytes = table.getBytes(); byte[] recordIdBytes = recordId.toBytes(); try { dataOutput.writeInt(tableBytes.length); dataOutput.write(tableBytes); dataOutput.writeInt(recordIdBytes.length); dataOutput.write(recordIdBytes); } catch (IOException ioe) { throw new RuntimeException("Error serializing AbsoluteRecordId: " + toString(), ioe); } return byteArrayOutputStream.toByteArray(); }
/** * Build a Put for inserting a new (blank) record into a Lily repository table. */ public Put buildPut(Record newRecord, long version, FieldTypes fieldTypes, RecordEvent recordEvent, Set<BlobReference> referencedBlobs, Set<BlobReference> unReferencedBlobs, long occ) throws RecordException, InterruptedException, RepositoryException { Record dummyOriginalRecord = newRecord(); Put put = new Put(newRecord.getId().toBytes()); put.add(RecordCf.DATA.bytes, RecordColumn.DELETED.bytes, 1L, Bytes.toBytes(false)); calculateRecordChanges(newRecord, dummyOriginalRecord, version, put, recordEvent, referencedBlobs, unReferencedBlobs, false, fieldTypes); put.add(RecordCf.DATA.bytes, RecordColumn.OCC.bytes, 1L, Bytes.toBytes(occ)); return put; }
/** * Build a {@code Put} to update a record. No metadata updates are performed, and any existing metadata on the * fields will be overwritten. * <p> * The record to be updated must exist, otherwise a "partial" record will be created. No checking is done to ensure * that the record to be updated exists. * <p> * Additionally, records updated in this manner must be unversioned records. * <p> * In other words, use this method at your own risk. Unless you are very certain about the context you are working * in, updates should go via the Lily API. * * @param recordId identifier of the record to be updated * @param fieldValues map of field names and values to be updated on the record * @return Put containing all field updates */ public Put buildRecordUpdate(RecordId recordId, Map<QName, Object> fieldValues) { Put put = new Put(recordId.toBytes()); FieldValueWriter fieldValueWriter = hbaseRepo.newFieldValueWriter(put, null); for (Entry<QName, Object> fieldEntry : fieldValues.entrySet()) { try { fieldValueWriter.addFieldValue(fieldTypes.getFieldType(fieldEntry.getKey()), fieldEntry.getValue(), null); } catch (Exception e) { throw new RuntimeException(e); } } return put; }
@Override public Filter createHBaseFilter(RecordFilter uncastFilter, LRepository repository, HBaseRecordFilterFactory factory) throws RepositoryException, InterruptedException { if (!(uncastFilter instanceof RecordIdPrefixFilter)) { return null; } RecordIdPrefixFilter filter = (RecordIdPrefixFilter) uncastFilter; if (filter.getRecordId() == null) { throw new IllegalArgumentException("Record ID should be specified in RecordIdPrefixFilter"); } RecordId recordId = filter.getRecordId(); byte[] rowKeyPrefix = recordId.toBytes(); return new PrefixFilter(rowKeyPrefix); } }
public void write(DataOutput dataOutput) { // The bytes format is as follows: // [byte representation of table, byte representation of master record id, if not null][args: bytes of the string representation] byte[] tableBytes = table == null ? new byte[0] : table.getBytes(); dataOutput.writeInt(tableBytes.length); if (tableBytes.length > 0) { dataOutput.writeBytes(tableBytes); } byte[] recordIdBytes = masterRecordId == null ? new byte[0] : masterRecordId.toBytes(); dataOutput.writeInt(recordIdBytes.length); if (recordIdBytes.length > 0) { dataOutput.writeBytes(recordIdBytes); } StringBuilder argsBuilder = new StringBuilder(); argstoString(argsBuilder); dataOutput.writeUTF(argsBuilder.toString()); }
@Override public void write(Record record) throws IOException, InterruptedException { if (bulkIngester.isBulkMode()) { Put put; try { put = bulkIngester.buildPut(record); recordsWritten++; } catch (RepositoryException e) { throw new RuntimeException(e); } rowKey.set(record.getId().toBytes()); context.write(rowKey, put); } else { try { bulkIngester.write(record); recordsWritten++; } catch (RepositoryException e) { throw new RuntimeException(e); } } }
List<Get> gets = new ArrayList<Get>(); for (RecordId recordId : recordIds) { Get get = new Get(recordId.toBytes());
private boolean reserveBlob(BlobReference referencedBlob) throws BlobNotFoundException, BlobException, IOException { BlobStoreAccess blobStoreAccess = registry.getBlobStoreAccess(referencedBlob.getBlob()); // Inline blobs are not incubated and therefore reserving them always succeeds if (!blobStoreAccess.incubate()) { return true; } byte[] row = referencedBlob.getBlob().getValue(); byte[] family = BlobIncubatorCf.REF.bytes; byte[] recordQualifier = BlobIncubatorColumn.RECORD.bytes; byte[] fieldQualifier = BlobIncubatorColumn.FIELD.bytes; Put put = new Put(row); put.add(family, recordQualifier, referencedBlob.getRecordId().toBytes()); put.add(family, fieldQualifier, referencedBlob.getFieldType().getId().getBytes()); return blobIncubatorTable.checkAndPut(row, family, recordQualifier, INCUBATE, put); }
typeMgr = new HBaseTypeManager(idGenerator, conf, zk, new HBaseTableFactoryImpl(conf)); Get get = new Get(recordId.toBytes()); get.setMaxVersions(); Result row = table.get(get);
@Override public Filter createHBaseFilter(RecordFilter uncastFilter, LRepository repository, HBaseRecordFilterFactory factory) throws RepositoryException, InterruptedException { if (!(uncastFilter instanceof RecordVariantFilter)) { return null; } final RecordVariantFilter filter = (RecordVariantFilter) uncastFilter; if (filter.getMasterRecordId() == null) { throw new IllegalArgumentException("Record ID should be specified in RecordVariantFilter"); } if (filter.getVariantProperties() == null) { throw new IllegalArgumentException("VariantProperties should be specified in RecordVariantFilter"); } return new FilterList(Arrays.<Filter>asList( new PrefixFilter(filter.getMasterRecordId().getMaster().toBytes()), new LilyRecordVariantFilter(filter.getVariantProperties()))); } }
@Override public Set<RecordId> getVariants(RecordId recordId) throws RepositoryException { byte[] masterRecordIdBytes = recordId.getMaster().toBytes(); FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL); filterList.addFilter(new PrefixFilter(masterRecordIdBytes)); filterList.addFilter(REAL_RECORDS_FILTER); Scan scan = new Scan(masterRecordIdBytes, filterList); scan.addColumn(RecordCf.DATA.bytes, RecordColumn.DELETED.bytes); Set<RecordId> recordIds = new HashSet<RecordId>(); try { ResultScanner scanner = recordTable.getScanner(scan); Result result; while ((result = scanner.next()) != null) { RecordId id = idGenerator.fromBytes(result.getRow()); recordIds.add(id); } Closer.close( scanner); // Not closed in finally block: avoid HBase contact when there could be connection problems. } catch (IOException e) { throw new RepositoryException("Error getting list of variants of record " + recordId.getMaster(), e); } return recordIds; }
public Set<FieldedLink> getFieldedReferrers(RecordId record, SchemaId vtag) throws LinkIndexException, InterruptedException { long before = System.currentTimeMillis(); try { Query query = new Query(); query.addEqualsCondition("target", record.toBytes()); if (vtag != null) { query.addEqualsCondition("vtag", vtag.getBytes()); } Set<FieldedLink> result = new HashSet<FieldedLink>(); QueryResult qr = backwardIndex.performQuery(query); byte[] id; while ((id = qr.next()) != null) { SchemaId sourceField = getIdGenerator().getSchemaId(qr.getData(SOURCE_FIELD_KEY)); result.add(new FieldedLink(getIdGenerator().absoluteFromBytes(id), sourceField)); } Closer.close( qr); // Not closed in finally block: avoid HBase contact when there could be connection problems. return result; } catch (IOException e) { throw new LinkIndexException("Error getting referrers for record '" + record + "', vtag '" + vtag + "'", e); } finally { metrics.report(Action.GET_FIELDED_REFERRERS, System.currentTimeMillis() - before); } }
protected Result getRow(RecordId recordId, Long version, int numberOfVersions, List<FieldType> fields, boolean disableAuth) throws RecordException { Result result; Get get = new Get(recordId.toBytes()); get.setFilter(REAL_RECORDS_FILTER); try { // Add the columns for the fields to get addFieldsToGet(get, fields); if (version != null) { get.setTimeRange(0, version + 1); // Only retrieve data within this timerange } get.setMaxVersions(numberOfVersions); // Retrieve the data from the repository if (disableAuth) { result = nonAuthRecordTable.get(get); } else { result = recordTable.get(get); } if (result == null || result.isEmpty()) { throw new RecordNotFoundException(recordId, this, this); } } catch (IOException e) { throw new RecordException("Exception occurred while retrieving record '" + recordId + "' from HBase table", e); } return result; }
for (BlobReference blobReference : blobs) { FieldTypeImpl fieldType = (FieldTypeImpl) blobReference.getFieldType(); byte[] recordIdBytes = recordId.toBytes(); ValueType valueType = fieldType.getValueType();
@Override public DependantRecordIdsIterator findDependantsOf(AbsoluteRecordId parentRecordId, Set<SchemaId> fields, SchemaId vtag) throws IOException { final RecordId master = parentRecordId.getRecordId().getMaster(); final Query query = new Query(); query.addEqualsCondition("dependency_masterrecordid", master.toBytes()); if (vtag != null) { query.addEqualsCondition("dependant_vtag", vtag.getBytes()); } query.setIndexFilter(new DerefMapIndexFilter(parentRecordId.getRecordId().getVariantProperties(), fields)); return new DependantRecordIdsIteratorImpl(backwardDerefIndex.performQuery(query), this.serializationUtil); }
byte[] rowId = record.getId().toBytes(); Get get = new Get(rowId); get.addColumn(RecordCf.DATA.bytes, RecordColumn.DELETED.bytes);
/** * Serializes a list of {@link DependencyEntry}s into a byte array for * usage in the forward index table. It uses a variable length byte array encoding schema. * * @param dependencies list of dependencies to serialize * @return byte array with the serialized format */ byte[] serializeDependenciesForward(Collection<DependencyEntry> dependencies) throws IOException { final DataOutputImpl dataOutput = new DataOutputImpl(); // total number of dependencies dataOutput.writeInt(dependencies.size()); for (DependencyEntry dependencyEntry : dependencies) { // we store the master record id, because that is how they are stored in the backward table final byte[] masterTableBytes = Bytes.toBytes(dependencyEntry.getDependency().getTable()); final byte[] masterBytes = dependencyEntry.getDependency().getRecordId().getMaster().toBytes(); dataOutput.writeInt(masterTableBytes.length); dataOutput.writeBytes(masterTableBytes); dataOutput.writeInt(masterBytes.length); dataOutput.writeBytes(masterBytes); final byte[] variantPropertiesBytes = serializeVariantPropertiesPattern(createVariantPropertiesPattern( dependencyEntry.getDependency().getRecordId().getVariantProperties(), dependencyEntry.getMoreDimensionedVariants())); dataOutput.writeBytes(variantPropertiesBytes); } return dataOutput.toByteArray(); }
private IndexEntry createBackwardEntry(AbsoluteRecordId parentRecordId, AbsoluteRecordId dependantRecordId, SchemaId dependantVtagId, Set<SchemaId> fields, Set<String> moreDimensionedVariantProperties) throws IOException { final byte[] serializedVariantPropertiesPattern = this.serializationUtil.serializeVariantPropertiesPattern( this.serializationUtil.createVariantPropertiesPattern(parentRecordId.getRecordId().getVariantProperties(), moreDimensionedVariantProperties)); final IndexEntry bwdEntry = new IndexEntry(backwardDerefIndex.getDefinition()); bwdEntry.addField("dependency_masterrecordid", parentRecordId.getRecordId().getMaster().toBytes()); bwdEntry.addField("dependant_vtag", dependantVtagId.getBytes()); bwdEntry.addField("variant_properties_pattern", serializedVariantPropertiesPattern); // the identifier is the dependant which depends on the dependency bwdEntry.setIdentifier(dependantRecordId.toBytes()); // the fields which the dependant uses of the dependency (null if used for deleting the entry) if (fields != null) { bwdEntry.addData(FIELDS_KEY, this.serializationUtil.serializeFields(fields)); } return bwdEntry; }
hbaseScan.setStartRow(scan.getRawStartRecordId()); } else if (scan.getStartRecordId() != null) { hbaseScan.setStartRow(scan.getStartRecordId().toBytes()); hbaseScan.setStopRow(scan.getRawStopRecordId()); } else if (scan.getStopRecordId() != null) { hbaseScan.setStopRow(scan.getStopRecordId().toBytes());