/** * Get {@link ColumnFamilyDescriptorBuilder} with default properties set. * @param columnFamilyName name of the column family * @param hConf hadoop configurations * @return the builder with default properties set */ public static ColumnFamilyDescriptorBuilder getColumnFamilyDescriptorBuilder(String columnFamilyName, Configuration hConf) { ColumnFamilyDescriptorBuilder cfdBuilder = new ColumnFamilyDescriptorBuilder(columnFamilyName); String compression = hConf.get(HBaseTableUtil.CFG_HBASE_TABLE_COMPRESSION, HBaseTableUtil.DEFAULT_COMPRESSION_TYPE.name()); cfdBuilder .setMaxVersions(1) .setBloomType(ColumnFamilyDescriptor.BloomType.ROW) .setCompressionType(ColumnFamilyDescriptor.CompressionType.valueOf(compression.toUpperCase())); return cfdBuilder; }
@Override public void create() throws IOException { ColumnFamilyDescriptorBuilder cfdBuilder = HBaseTableUtil.getColumnFamilyDescriptorBuilder(Bytes.toString(DATA_COLUMN_FAMILY), hConf); TableDescriptorBuilder tdBuilder = HBaseTableUtil.getTableDescriptorBuilder(tableId, cConf) .addColumnFamily(cfdBuilder.build()); try (HBaseDDLExecutor ddlExecutor = ddlExecutorFactory.get()) { ddlExecutor.createTableIfNotExists(tdBuilder.build(), null); } }
cfdBuilder.setMaxVersions(Integer.MAX_VALUE); } else if (DatasetsUtil.isTransactional(spec.getProperties())) { cfdBuilder.setMaxVersions(Integer.MAX_VALUE); } else { cfdBuilder.setMaxVersions(1); cfdBuilder.setBloomType(ColumnFamilyDescriptor.BloomType.ROW); cfdBuilder.addProperty(TxConstants.PROPERTY_TTL, String.valueOf(ttl)); if (supportsReadlessIncrements) { cfdBuilder.addProperty("dataset.table.readless.increment.transactional", "false"); tdBuilder.addColumnFamily(cfdBuilder.build());
@Override public void create() throws IOException { ColumnFamilyDescriptorBuilder cfdBuilder = HBaseTableUtil.getColumnFamilyDescriptorBuilder(Bytes.toString(DATA_COLUMN_FAMILY), hConf); TableDescriptorBuilder tdBuilder = HBaseTableUtil.getTableDescriptorBuilder(tableId, cConf) .addColumnFamily(cfdBuilder.build()); try (HBaseDDLExecutor ddlExecutor = ddlExecutorFactory.get()) { ddlExecutor.createTableIfNotExists(tdBuilder.build(), null); } }
cfdBuilder.setMaxVersions(Integer.MAX_VALUE); } else if (DatasetsUtil.isTransactional(spec.getProperties())) { cfdBuilder.setMaxVersions(Integer.MAX_VALUE); } else { cfdBuilder.setMaxVersions(1); cfdBuilder.setBloomType(ColumnFamilyDescriptor.BloomType.ROW); cfdBuilder.addProperty(TxConstants.PROPERTY_TTL, String.valueOf(ttl)); if (supportsReadlessIncrements) { cfdBuilder.addProperty("dataset.table.readless.increment.transactional", "false"); tdBuilder.addColumnFamily(cfdBuilder.build());
/** * Creates the configuration HBase table if it does not exist. */ @VisibleForTesting void createTableIfNecessary() throws IOException { try (HBaseDDLExecutor ddlExecutor = new HBaseDDLExecutorFactory(cConf, hConf).get()) { HBaseTableUtil tableUtil = new HBaseTableUtilFactory(cConf).get(); TableId tableId = tableUtil.createHTableId(NamespaceId.SYSTEM, TABLE_NAME); ColumnFamilyDescriptorBuilder cfdBuilder = HBaseTableUtil.getColumnFamilyDescriptorBuilder(Bytes.toString(FAMILY), hConf); TableDescriptorBuilder tdBuilder = HBaseTableUtil.getTableDescriptorBuilder(tableId, cConf).addColumnFamily(cfdBuilder.build()); ddlExecutor.createTableIfNotExists(tdBuilder.build(), null); } } }
/** * Get {@link ColumnFamilyDescriptorBuilder} with default properties set. * @param columnFamilyName name of the column family * @param hConf hadoop configurations * @return the builder with default properties set */ public static ColumnFamilyDescriptorBuilder getColumnFamilyDescriptorBuilder(String columnFamilyName, Configuration hConf) { ColumnFamilyDescriptorBuilder cfdBuilder = new ColumnFamilyDescriptorBuilder(columnFamilyName); String compression = hConf.get(HBaseTableUtil.CFG_HBASE_TABLE_COMPRESSION, HBaseTableUtil.DEFAULT_COMPRESSION_TYPE.name()); cfdBuilder .setMaxVersions(1) .setBloomType(ColumnFamilyDescriptor.BloomType.ROW) .setCompressionType(ColumnFamilyDescriptor.CompressionType.valueOf(compression.toUpperCase())); return cfdBuilder; }
/** * Creates the configuration HBase table if it does not exist. */ @VisibleForTesting void createTableIfNecessary() throws IOException { try (HBaseDDLExecutor ddlExecutor = new HBaseDDLExecutorFactory(cConf, hConf).get()) { HBaseTableUtil tableUtil = new HBaseTableUtilFactory(cConf).get(); TableId tableId = tableUtil.createHTableId(NamespaceId.SYSTEM, TABLE_NAME); ColumnFamilyDescriptorBuilder cfdBuilder = HBaseTableUtil.getColumnFamilyDescriptorBuilder(Bytes.toString(FAMILY), hConf); TableDescriptorBuilder tdBuilder = HBaseTableUtil.getTableDescriptorBuilder(tableId, cConf).addColumnFamily(cfdBuilder.build()); ddlExecutor.createTableIfNotExists(tdBuilder.build(), null); } } }
@Override public MetadataTable createMetadataTable() throws IOException { TableId tableId = tableUtil.createHTableId(NamespaceId.SYSTEM, metadataTableName); HTable hTable = null; // If the table descriptor is in the cache, we assume the table exists. if (!tableDescriptors.containsKey(tableId)) { synchronized (this) { if (!tableDescriptors.containsKey(tableId)) { try (HBaseDDLExecutor ddlExecutor = ddlExecutorFactory.get()) { ColumnFamilyDescriptorBuilder cfdBuilder = HBaseTableUtil.getColumnFamilyDescriptorBuilder(Bytes.toString(COLUMN_FAMILY), hConf); TableDescriptorBuilder tdBuilder = HBaseTableUtil.getTableDescriptorBuilder(tableId, cConf).addColumnFamily(cfdBuilder.build()); ddlExecutor.createTableIfNotExists(tdBuilder.build(), null); hTable = tableUtil.createHTable(hConf, tableId); tableDescriptors.put(tableId, hTable.getTableDescriptor()); } } } } if (hTable == null) { hTable = tableUtil.createHTable(hConf, tableId); } return new HBaseMetadataTable(tableUtil, hTable, COLUMN_FAMILY, cConf.getInt(Constants.MessagingSystem.HBASE_SCAN_CACHE_ROWS), createExceptionHandler(tableId)); }
@Override public MetadataTable createMetadataTable(String tableName) throws IOException { TableId tableId = tableUtil.createHTableId(NamespaceId.SYSTEM, tableName); HTable hTable = null; // If the table descriptor is in the cache, we assume the table exists. if (!tableDescriptors.containsKey(tableId)) { synchronized (this) { if (!tableDescriptors.containsKey(tableId)) { try (HBaseDDLExecutor ddlExecutor = ddlExecutorFactory.get()) { ColumnFamilyDescriptorBuilder cfdBuilder = HBaseTableUtil.getColumnFamilyDescriptorBuilder(Bytes.toString(COLUMN_FAMILY), hConf); TableDescriptorBuilder tdBuilder = HBaseTableUtil.getTableDescriptorBuilder(tableId, cConf).addColumnFamily(cfdBuilder.build()); ddlExecutor.createTableIfNotExists(tdBuilder.build(), null); hTable = tableUtil.createHTable(hConf, tableId); tableDescriptors.put(tableId, hTable.getTableDescriptor()); } } } } if (hTable == null) { hTable = tableUtil.createHTable(hConf, tableId); } return new HBaseMetadataTable(tableUtil, hTable, COLUMN_FAMILY, cConf.getInt(Constants.MessagingSystem.HBASE_SCAN_CACHE_ROWS), createExceptionHandler(tableId)); }
@Override public void create() throws IOException { // Create the queue table TableDescriptorBuilder tdBuilder = HBaseTableUtil.getTableDescriptorBuilder(tableId, cConf); for (String key : properties.stringPropertyNames()) { tdBuilder.addProperty(key, properties.getProperty(key)); } ColumnFamilyDescriptorBuilder cfdBuilder = HBaseTableUtil.getColumnFamilyDescriptorBuilder(Bytes.toString(QueueEntryRow.COLUMN_FAMILY), hConf); tdBuilder.addColumnFamily(cfdBuilder.build()); // Add coprocessors CoprocessorJar coprocessorJar = createCoprocessorJar(); for (Class<? extends Coprocessor> coprocessor : coprocessorJar.getCoprocessors()) { tdBuilder.addCoprocessor( coprocessorManager.getCoprocessorDescriptor(coprocessor, coprocessorJar.getPriority(coprocessor))); } // Create queue table with splits. The distributor bucket size is the same as splits. int splits = cConf.getInt(QueueConstants.ConfigKeys.QUEUE_TABLE_PRESPLITS); AbstractRowKeyDistributor distributor = new RowKeyDistributorByHashPrefix( new RowKeyDistributorByHashPrefix.OneByteSimpleHash(splits)); byte[][] splitKeys = HBaseTableUtil.getSplitKeys(splits, splits, distributor); tdBuilder.addProperty(QueueConstants.DISTRIBUTOR_BUCKETS, Integer.toString(splits)); createQueueTable(tdBuilder, splitKeys); }
@Override protected StreamConsumer create(TableId tableId, StreamConfig streamConfig, ConsumerConfig consumerConfig, StreamConsumerStateStore stateStore, StreamConsumerState beginConsumerState, FileReader<StreamEventOffset, Iterable<StreamFileOffset>> reader, @Nullable ReadFilter extraFilter) throws IOException { int splits = cConf.getInt(Constants.Stream.CONSUMER_TABLE_PRESPLITS); AbstractRowKeyDistributor distributor = new RowKeyDistributorByHashPrefix( new RowKeyDistributorByHashPrefix.OneByteSimpleHash(splits)); byte[][] splitKeys = HBaseTableUtil.getSplitKeys(splits, splits, distributor); TableId hBaseTableId = tableUtil.createHTableId(new NamespaceId(tableId.getNamespace()), tableId.getTableName()); TableDescriptorBuilder tdBuilder = HBaseTableUtil.getTableDescriptorBuilder(hBaseTableId, cConf); ColumnFamilyDescriptorBuilder cfdBuilder = HBaseTableUtil.getColumnFamilyDescriptorBuilder(Bytes.toString(QueueEntryRow.COLUMN_FAMILY), hConf); tdBuilder.addColumnFamily(cfdBuilder.build()); tdBuilder.addProperty(QueueConstants.DISTRIBUTOR_BUCKETS, Integer.toString(splits)); try (HBaseDDLExecutor ddlExecutor = ddlExecutorFactory.get()) { ddlExecutor.createTableIfNotExists(tdBuilder.build(), splitKeys); } HTable hTable = tableUtil.createHTable(hConf, hBaseTableId); hTable.setWriteBufferSize(Constants.Stream.HBASE_WRITE_BUFFER_SIZE); hTable.setAutoFlushTo(false); return new HBaseStreamFileConsumer(cConf, streamConfig, consumerConfig, tableUtil, hTable, reader, stateStore, beginConsumerState, extraFilter, createKeyDistributor(hTable.getTableDescriptor())); }
.addColumnFamily(cfdBuilder.build()) .addProperty(Constants.MessagingSystem.HBASE_MESSAGING_TABLE_PREFIX_NUM_BYTES, Integer.toString(1)) .addProperty(Constants.MessagingSystem.KEY_DISTRIBUTOR_BUCKETS_ATTR, Integer.toString(splits))
.addColumnFamily(cfdBuilder.build()) .addProperty(Constants.MessagingSystem.HBASE_MESSAGING_TABLE_PREFIX_NUM_BYTES, Integer.toString(1)) .addProperty(Constants.MessagingSystem.KEY_DISTRIBUTOR_BUCKETS_ATTR, Integer.toString(splits))
private void create(TableId tableId) throws IOException { HBaseTableUtil tableUtil = getTableUtil(); TableId htableId = tableUtil.createHTableId(new NamespaceId(tableId.getNamespace()), tableId.getTableName()); ColumnFamilyDescriptorBuilder cfdBuilder = HBaseTableUtil.getColumnFamilyDescriptorBuilder("d", hAdmin.getConfiguration()); TableDescriptorBuilder tdBuilder = HBaseTableUtil.getTableDescriptorBuilder(htableId, cConf); tdBuilder.addColumnFamily(cfdBuilder.build()); ddlExecutor.createTableIfNotExists(tdBuilder.build(), null); }
@Override public synchronized StreamConsumerStateStore create(StreamConfig streamConfig) throws IOException { NamespaceId namespace = streamConfig.getStreamId().getParent(); TableId streamStateStoreTableId = StreamUtils.getStateStoreTableId(namespace); TableId hbaseTableId = tableUtil.createHTableId(new NamespaceId(streamStateStoreTableId.getNamespace()), streamStateStoreTableId.getTableName()); boolean tableExist; try (HBaseAdmin admin = new HBaseAdmin(hConf)) { tableExist = tableUtil.tableExists(admin, hbaseTableId); } if (!tableExist) { try (HBaseDDLExecutor ddlExecutor = ddlExecutorFactory.get()) { TableDescriptorBuilder tdBuilder = HBaseTableUtil.getTableDescriptorBuilder(hbaseTableId, cConf); ColumnFamilyDescriptorBuilder cfdBuilder = HBaseTableUtil.getColumnFamilyDescriptorBuilder(Bytes.toString(QueueEntryRow.COLUMN_FAMILY), hConf); tdBuilder.addColumnFamily(cfdBuilder.build()); ddlExecutor.createTableIfNotExists(tdBuilder.build(), null); } } HTable hTable = tableUtil.createHTable(hConf, hbaseTableId); hTable.setWriteBufferSize(Constants.Stream.HBASE_WRITE_BUFFER_SIZE); hTable.setAutoFlushTo(false); return new HBaseStreamConsumerStateStore(streamConfig, hTable); }