@Override public void perform() throws IOException { LOG.debug("Performing action: Changing encodings on " + tableName); // possible DataBlockEncoding id's final int[] possibleIds = {0, 2, 3, 4, 6}; modifyAllTableColumns(tableName, (columnName, columnBuilder) -> { short id = (short) possibleIds[random.nextInt(possibleIds.length)]; DataBlockEncoding encoding = DataBlockEncoding.getEncodingById(id); columnBuilder.setDataBlockEncoding(encoding); LOG.debug("Set encoding of column family " + columnName + " to: " + encoding); }); } }
private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding, DataBlockEncoding cfEncoding) throws IOException { Path dir = util.getDataTestDirOnTestFS("testSplitHFileWithDifferentEncoding"); FileSystem fs = util.getTestFileSystem(); Path testIn = new Path(dir, "testhfile"); ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setDataBlockEncoding(cfEncoding).build(); HFileTestUtil.createHFileWithDataBlockEncoding(util.getConfiguration(), fs, testIn, bulkloadEncoding, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); Path bottomOut = new Path(dir, "bottom.out"); Path topOut = new Path(dir, "top.out"); LoadIncrementalHFiles.splitStoreFile(util.getConfiguration(), testIn, familyDesc, Bytes.toBytes("ggg"), bottomOut, topOut); int rowCount = verifyHFile(bottomOut); rowCount += verifyHFile(topOut); assertEquals(1000, rowCount); }
ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_BYTES).setCompressionType(comprAlgo) .setBloomFilterType(bloomType).setMaxVersions(MAX_VERSIONS) .setDataBlockEncoding(dataBlockEncoding).build(), BlockCacheFactory.createBlockCache(TEST_UTIL.getConfiguration())); List<String> rows = sequentialStrings("row", NUM_ROWS);
setDataBlockEncoding(encoding). setBlocksize(BLOCK_SIZE). setBloomFilterType(BloomType.NONE).
ColumnFamilyDescriptorBuilder.newBuilder(cfBytes).setCompressionType(compress) .setBloomFilterType(BLOOM_TYPE).setMaxVersions(maxVersions) .setDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding()).build(); HRegion region = TEST_UTIL.createTestRegion(table, cfd, blockCache); int rowIdx = 0;
/** * Verify that compression and data block encoding are respected by the * Store.createWriterInTmp() method, used on store flush. */ @Test public void testCreateWriter() throws Exception { Configuration conf = HBaseConfiguration.create(); FileSystem fs = FileSystem.get(conf); ColumnFamilyDescriptor hcd = ColumnFamilyDescriptorBuilder.newBuilder(family) .setCompressionType(Compression.Algorithm.GZ).setDataBlockEncoding(DataBlockEncoding.DIFF) .build(); init(name.getMethodName(), conf, hcd); // Test createWriterInTmp() StoreFileWriter writer = store.createWriterInTmp(4, hcd.getCompressionType(), false, true, false, false); Path path = writer.getPath(); writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1))); writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2))); writer.append(new KeyValue(row2, family, qf1, Bytes.toBytes(3))); writer.append(new KeyValue(row2, family, qf2, Bytes.toBytes(4))); writer.close(); // Verify that compression and encoding settings are respected HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), true, conf); assertEquals(hcd.getCompressionType(), reader.getCompressionAlgorithm()); assertEquals(hcd.getDataBlockEncoding(), reader.getDataBlockEncoding()); reader.close(); }
.setDataBlockEncoding(DataBlockEncoding.getEncodingById(id)) .build(); TableDescriptor td = TableDescriptorBuilder.newBuilder(selected)
builder.setDataBlockEncoding(dataBlockEncodingFromThrift(in.getDataBlockEncoding()));
columnDescBuilder.setDataBlockEncoding(dataBlockEncodingAlgo);
private ColumnFamilyDescriptor generateColumnFamilyDescriptor(Pair<byte[],Map<String,Object>> family, PTableType tableType) throws SQLException { ColumnFamilyDescriptorBuilder columnDescBuilder = ColumnFamilyDescriptorBuilder.newBuilder(family.getFirst()); if (tableType != PTableType.VIEW) { columnDescBuilder.setDataBlockEncoding(SchemaUtil.DEFAULT_DATA_BLOCK_ENCODING); columnDescBuilder.setBloomFilterType(BloomType.NONE); for (Entry<String,Object> entry : family.getSecond().entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); setHColumnDescriptorValue(columnDescBuilder, key, value); } } return columnDescBuilder.build(); }
boolean inmemory = builder.build().isInMemory(); builder.setScope(v); builder.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF); builder.setBloomFilterType(BloomType.ROW); builder.setCompressionType(Algorithm.SNAPPY);
familyDBuilder.setDataBlockEncoding(DataBlockEncoding.PREFIX); admin.addColumnFamily(tableDescriptor.getTableName(), familyDBuilder.build());
private ColumnFamilyDescriptor generateColumnFamilyDescriptor(Pair<byte[],Map<String,Object>> family, PTableType tableType) throws SQLException { ColumnFamilyDescriptorBuilder columnDescBuilder = ColumnFamilyDescriptorBuilder.newBuilder(family.getFirst()); if (tableType != PTableType.VIEW) { columnDescBuilder.setDataBlockEncoding(SchemaUtil.DEFAULT_DATA_BLOCK_ENCODING); columnDescBuilder.setBloomFilterType(BloomType.NONE); for (Entry<String,Object> entry : family.getSecond().entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); setHColumnDescriptorValue(columnDescBuilder, key, value); } } return columnDescBuilder.build(); }
private ColumnFamilyDescriptor generateColumnFamilyDescriptor(Pair<byte[],Map<String,Object>> family, PTableType tableType) throws SQLException { ColumnFamilyDescriptorBuilder columnDescBuilder = ColumnFamilyDescriptorBuilder.newBuilder(family.getFirst()); if (tableType != PTableType.VIEW) { columnDescBuilder.setDataBlockEncoding(SchemaUtil.DEFAULT_DATA_BLOCK_ENCODING); columnDescBuilder.setBloomFilterType(BloomType.NONE); for (Entry<String,Object> entry : family.getSecond().entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); setHColumnDescriptorValue(columnDescBuilder, key, value); } } return columnDescBuilder.build(); }
@Override public void perform() throws IOException { LOG.debug("Performing action: Changing encodings on " + tableName); // possible DataBlockEncoding id's final int[] possibleIds = {0, 2, 3, 4, 6}; modifyAllTableColumns(tableName, (columnName, columnBuilder) -> { short id = (short) possibleIds[random.nextInt(possibleIds.length)]; DataBlockEncoding encoding = DataBlockEncoding.getEncodingById(id); columnBuilder.setDataBlockEncoding(encoding); LOG.debug("Set encoding of column family " + columnName + " to: " + encoding); }); } }
boolean inmemory = builder.build().isInMemory(); builder.setScope(v); builder.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF); builder.setBloomFilterType(BloomType.ROW); builder.setCompressionType(Algorithm.SNAPPY);
private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding, DataBlockEncoding cfEncoding) throws IOException { Path dir = util.getDataTestDirOnTestFS("testSplitHFileWithDifferentEncoding"); FileSystem fs = util.getTestFileSystem(); Path testIn = new Path(dir, "testhfile"); ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setDataBlockEncoding(cfEncoding).build(); HFileTestUtil.createHFileWithDataBlockEncoding(util.getConfiguration(), fs, testIn, bulkloadEncoding, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); Path bottomOut = new Path(dir, "bottom.out"); Path topOut = new Path(dir, "top.out"); LoadIncrementalHFiles.splitStoreFile(util.getConfiguration(), testIn, familyDesc, Bytes.toBytes("ggg"), bottomOut, topOut); int rowCount = verifyHFile(bottomOut); rowCount += verifyHFile(topOut); assertEquals(1000, rowCount); }
/** * Verify that compression and data block encoding are respected by the * Store.createWriterInTmp() method, used on store flush. */ @Test public void testCreateWriter() throws Exception { Configuration conf = HBaseConfiguration.create(); FileSystem fs = FileSystem.get(conf); ColumnFamilyDescriptor hcd = ColumnFamilyDescriptorBuilder.newBuilder(family) .setCompressionType(Compression.Algorithm.GZ).setDataBlockEncoding(DataBlockEncoding.DIFF) .build(); init(name.getMethodName(), conf, hcd); // Test createWriterInTmp() StoreFileWriter writer = store.createWriterInTmp(4, hcd.getCompressionType(), false, true, false, false); Path path = writer.getPath(); writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1))); writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2))); writer.append(new KeyValue(row2, family, qf1, Bytes.toBytes(3))); writer.append(new KeyValue(row2, family, qf2, Bytes.toBytes(4))); writer.close(); // Verify that compression and encoding settings are respected HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), true, conf); assertEquals(hcd.getCompressionType(), reader.getCompressionAlgorithm()); assertEquals(hcd.getDataBlockEncoding(), reader.getDataBlockEncoding()); reader.close(); }
.setDataBlockEncoding(DataBlockEncoding.getEncodingById(id)) .build(); TableDescriptor td = TableDescriptorBuilder.newBuilder(selected)
columnDescBuilder.setDataBlockEncoding(dataBlockEncodingAlgo);