String bloomType = familyDescriptor.getBloomFilterType().toString(); if (bloomType == null) { bloomType = HColumnDescriptor.DEFAULT_BLOOMFILTER;
/** * This utility method creates a new Thrift ColumnDescriptor "struct" based on * an Hbase HColumnDescriptor object. * * @param in * Hbase HColumnDescriptor object * @return Thrift ColumnDescriptor */ static public ColumnDescriptor colDescFromHbase(HColumnDescriptor in) { ColumnDescriptor col = new ColumnDescriptor(); col.name = ByteBuffer.wrap(Bytes.add(in.getName(), KeyValue.COLUMN_FAMILY_DELIM_ARRAY)); col.maxVersions = in.getMaxVersions(); col.compression = in.getCompressionType().toString(); col.inMemory = in.isInMemory(); col.blockCacheEnabled = in.isBlockCacheEnabled(); col.bloomFilterType = in.getBloomFilterType().toString(); col.timeToLive = in.getTimeToLive(); return col; }
assertEquals("Incorrect bloom filter used for column family " + familyStr + "(reader: " + reader + ")", hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter))); assertEquals("Incorrect compression used for column family " + familyStr + "(reader: " + reader + ")", hcd.getCompressionType(), reader.getFileContext().getCompression());
assertTrue(deserializedHcd.getCompressionType().equals(Compression.Algorithm.SNAPPY)); assertTrue(deserializedHcd.getDataBlockEncoding().equals(DataBlockEncoding.FAST_DIFF)); assertTrue(deserializedHcd.getBloomFilterType().equals(BloomType.ROW)); assertEquals(hcd.isMobEnabled(), deserializedHcd.isMobEnabled()); assertEquals(hcd.getMobThreshold(), deserializedHcd.getMobThreshold());
@Override public BloomType getBloomFilter(HColumnDescriptor columnDescriptor) { org.apache.hadoop.hbase.regionserver.BloomType type = columnDescriptor.getBloomFilterType(); switch (type) { case ROW: return BloomType.ROW; case ROWCOL: return BloomType.ROWCOL; case NONE: return BloomType.NONE; default: throw new IllegalArgumentException("Unsupported bloom filter type: " + type); } }
@Override public BloomType getBloomFilter(HColumnDescriptor columnDescriptor) { org.apache.hadoop.hbase.regionserver.BloomType type = columnDescriptor.getBloomFilterType(); switch (type) { case ROW: return BloomType.ROW; case ROWCOL: return BloomType.ROWCOL; case NONE: return BloomType.NONE; default: throw new IllegalArgumentException("Unsupported bloom filter type: " + type); } }
@Override public BloomType getBloomFilter(HColumnDescriptor columnDescriptor) { org.apache.hadoop.hbase.regionserver.BloomType type = columnDescriptor.getBloomFilterType(); switch (type) { case ROW: return BloomType.ROW; case ROWCOL: return BloomType.ROWCOL; case NONE: return BloomType.NONE; default: throw new IllegalArgumentException("Unsupported bloom filter type: " + type); } }
@Override public BloomType getBloomFilter(HColumnDescriptor columnDescriptor) { org.apache.hadoop.hbase.regionserver.BloomType type = columnDescriptor.getBloomFilterType(); switch (type) { case ROW: return BloomType.ROW; case ROWCOL: return BloomType.ROWCOL; case NONE: return BloomType.NONE; default: throw new IllegalArgumentException("Unsupported bloom filter type: " + type); } }
@Override public BloomType getBloomFilter(HColumnDescriptor columnDescriptor) { org.apache.hadoop.hbase.regionserver.BloomType type = columnDescriptor.getBloomFilterType(); switch (type) { case ROW: return BloomType.ROW; case ROWCOL: return BloomType.ROWCOL; case NONE: return BloomType.NONE; default: throw new IllegalArgumentException("Unsupported bloom filter type: " + type); } }
@Override public BloomType getBloomFilter(HColumnDescriptor columnDescriptor) { org.apache.hadoop.hbase.regionserver.BloomType type = columnDescriptor.getBloomFilterType(); switch (type) { case ROW: return BloomType.ROW; case ROWCOL: return BloomType.ROWCOL; case NONE: return BloomType.NONE; default: throw new IllegalArgumentException("Unsupported bloom filter type: " + type); } }
@Override public BloomType getBloomFilter(HColumnDescriptor columnDescriptor) { org.apache.hadoop.hbase.regionserver.BloomType type = columnDescriptor.getBloomFilterType(); switch (type) { case ROW: return BloomType.ROW; case ROWCOL: return BloomType.ROWCOL; case NONE: return BloomType.NONE; default: throw new IllegalArgumentException("Unsupported bloom filter type: " + type); } }
@Override public BloomType getBloomFilter(HColumnDescriptor columnDescriptor) { org.apache.hadoop.hbase.regionserver.BloomType type = columnDescriptor.getBloomFilterType(); switch (type) { case ROW: return BloomType.ROW; case ROWCOL: return BloomType.ROWCOL; case NONE: return BloomType.NONE; default: throw new IllegalArgumentException("Unsupported bloom filter type: " + type); } }
public FamilyAdmin enableBloomFilter(StoreFile.BloomType bloomType) { HColumnDescriptor columnDescriptor = desc.getFamily(familyName); if (columnDescriptor.getBloomFilterType() == StoreFile.BloomType.NONE) { LOG.info("Enable Bloom Filter for family [" + Bytes.toString(familyName) + "]."); disableTable(); columnDescriptor.setBloomFilterType(bloomType); desc.addFamily(columnDescriptor); return modifyAndEnable(); } LOG.info("Bloom Filter for family [" + Bytes.toString(familyName) + "] enabled."); return this; }
public StoreFile call() throws IOException { StoreFile storeFile = new StoreFile(fs, p, conf, cacheConf, family.getBloomFilterType(), dataBlockEncoder); passSchemaMetricsTo(storeFile); storeFile.createReader(); return storeFile; } });
private static ColumnFamilyDescriptor getColumnFamilyDescriptor(HColumnDescriptor descriptor) { String name = descriptor.getNameAsString(); int maxVersions = descriptor.getMaxVersions(); ColumnFamilyDescriptor.CompressionType compressionType = ColumnFamilyDescriptor.CompressionType.valueOf(descriptor.getCompressionType().getName().toUpperCase()); ColumnFamilyDescriptor.BloomType bloomType = ColumnFamilyDescriptor.BloomType.valueOf(descriptor.getBloomFilterType().name().toUpperCase()); Map<String, String> properties = new HashMap<>(); for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> value : descriptor.getValues().entrySet()) { properties.put(org.apache.hadoop.hbase.util.Bytes.toString(value.getKey().get()), org.apache.hadoop.hbase.util.Bytes.toString(value.getValue().get())); } return new ColumnFamilyDescriptor(name, maxVersions, compressionType, bloomType, properties); } }
private static ColumnFamilyDescriptor getColumnFamilyDescriptor(HColumnDescriptor descriptor) { String name = descriptor.getNameAsString(); int maxVersions = descriptor.getMaxVersions(); ColumnFamilyDescriptor.CompressionType compressionType = ColumnFamilyDescriptor.CompressionType.valueOf(descriptor.getCompressionType().getName().toUpperCase()); ColumnFamilyDescriptor.BloomType bloomType = ColumnFamilyDescriptor.BloomType.valueOf(descriptor.getBloomFilterType().name().toUpperCase()); Map<String, String> properties = new HashMap<>(); for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> value : descriptor.getValues().entrySet()) { properties.put(org.apache.hadoop.hbase.util.Bytes.toString(value.getKey().get()), org.apache.hadoop.hbase.util.Bytes.toString(value.getValue().get())); } return new ColumnFamilyDescriptor(name, maxVersions, compressionType, bloomType, properties); } }
private static ColumnFamilyDescriptor getColumnFamilyDescriptor(HColumnDescriptor descriptor) { String name = descriptor.getNameAsString(); int maxVersions = descriptor.getMaxVersions(); ColumnFamilyDescriptor.CompressionType compressionType = ColumnFamilyDescriptor.CompressionType.valueOf(descriptor.getCompressionType().getName().toUpperCase()); ColumnFamilyDescriptor.BloomType bloomType = ColumnFamilyDescriptor.BloomType.valueOf(descriptor.getBloomFilterType().name().toUpperCase()); Map<String, String> properties = new HashMap<>(); for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> value : descriptor.getValues().entrySet()) { properties.put(org.apache.hadoop.hbase.util.Bytes.toString(value.getKey().get()), org.apache.hadoop.hbase.util.Bytes.toString(value.getValue().get())); } return new ColumnFamilyDescriptor(name, maxVersions, compressionType, bloomType, properties); } }
private StoreFile createStoreFileAndReader(final StoreFileInfo info) throws IOException { info.setRegionCoprocessorHost(this.region.getCoprocessorHost()); StoreFile storeFile = new StoreFile(this.getFileSystem(), info, this.conf, this.cacheConf, this.family.getBloomFilterType()); StoreFile.Reader r = storeFile.createReader(); r.setReplicaStoreFile(isPrimaryReplicaStore()); return storeFile; }
assertTrue(deserializedHcd.getCompressionType().equals(Compression.Algorithm.SNAPPY)); assertTrue(deserializedHcd.getDataBlockEncoding().equals(DataBlockEncoding.FAST_DIFF)); assertTrue(deserializedHcd.getBloomFilterType().equals(BloomType.ROW)); assertEquals(hcd.isMobEnabled(), deserializedHcd.isMobEnabled()); assertEquals(hcd.getMobThreshold(), deserializedHcd.getMobThreshold());
public HFileTarget(Path path, HColumnDescriptor hcol) { super(path, HFileOutputFormatForCrunch.class, SequentialFileNamingScheme.getInstance()); if (hcol != null) { outputConf(HFileOutputFormatForCrunch.HCOLUMN_DESCRIPTOR_COMPRESSION_TYPE_KEY, hcol.getCompressionType().getName()); outputConf(HFileOutputFormatForCrunch.HCOLUMN_DESCRIPTOR_DATA_BLOCK_ENCODING_KEY, hcol.getDataBlockEncoding().name()); outputConf(HFileOutputFormatForCrunch.HCOLUMN_DESCRIPTOR_BLOOM_FILTER_TYPE_KEY, hcol.getBloomFilterType().name()); } }