/** * @return compression type being used for the column family * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 * (<a href="https://issues.apache.org/jira/browse/HBASE-13655">HBASE-13655</a>). * Use {@link #getCompressionType()}. */ @Deprecated public Compression.Algorithm getCompression() { return getCompressionType(); }
/** * This utility method creates a new Thrift ColumnDescriptor "struct" based on * an Hbase HColumnDescriptor object. * * @param in * Hbase HColumnDescriptor object * @return Thrift ColumnDescriptor */ static public ColumnDescriptor colDescFromHbase(HColumnDescriptor in) { ColumnDescriptor col = new ColumnDescriptor(); col.name = ByteBuffer.wrap(Bytes.add(in.getName(), KeyValue.COLUMN_FAMILY_DELIM_ARRAY)); col.maxVersions = in.getMaxVersions(); col.compression = in.getCompressionType().toString(); col.inMemory = in.isInMemory(); col.blockCacheEnabled = in.isBlockCacheEnabled(); col.bloomFilterType = in.getBloomFilterType().toString(); col.timeToLive = in.getTimeToLive(); return col; }
/** * @return compression type being used for the column family * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 * (<a href="https://issues.apache.org/jira/browse/HBASE-13655">HBASE-13655</a>). * Use {@link #getCompressionType()}. */ @Deprecated public Compression.Algorithm getCompression() { return getCompressionType(); }
hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter))); assertEquals("Incorrect compression used for column family " + familyStr + "(reader: " + reader + ")", hcd.getCompressionType(), reader.getFileContext().getCompression());
assertEquals(inmemory, deserializedHcd.isInMemory()); assertEquals(hcd.getScope(), deserializedHcd.getScope()); assertTrue(deserializedHcd.getCompressionType().equals(Compression.Algorithm.SNAPPY)); assertTrue(deserializedHcd.getDataBlockEncoding().equals(DataBlockEncoding.FAST_DIFF)); assertTrue(deserializedHcd.getBloomFilterType().equals(BloomType.ROW));
Assert.assertEquals(Algorithm.valueOf("GZ"), columnDescriptor.getCompressionType()); Assert.assertEquals(Integer.parseInt("6"), columnDescriptor.getMaxVersions()); Assert.assertEquals(Integer.parseInt("3"), columnDescriptor.getMinVersions());
Assert.assertEquals(Algorithm.valueOf("GZ"), columnDescriptor.getCompressionType()); Assert.assertEquals(Integer.parseInt("6"), columnDescriptor.getMaxVersions()); Assert.assertEquals(Integer.parseInt("3"), columnDescriptor.getMinVersions());
/** * @return compression type being used for the column family * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 * (<a href="https://issues.apache.org/jira/browse/HBASE-13655">HBASE-13655</a>). * Use {@link #getCompressionType()}. */ @Deprecated public Compression.Algorithm getCompression() { return getCompressionType(); }
@Override public CompressionType getCompression(HColumnDescriptor columnDescriptor) { Compression.Algorithm type = columnDescriptor.getCompressionType(); switch (type) { case LZO: return CompressionType.LZO; case SNAPPY: return CompressionType.SNAPPY; case GZ: return CompressionType.GZIP; case NONE: return CompressionType.NONE; default: throw new IllegalArgumentException("Unsupported compression type: " + type); } }
@Override public CompressionType getCompression(HColumnDescriptor columnDescriptor) { Compression.Algorithm type = columnDescriptor.getCompressionType(); switch (type) { case LZO: return CompressionType.LZO; case SNAPPY: return CompressionType.SNAPPY; case GZ: return CompressionType.GZIP; case NONE: return CompressionType.NONE; default: throw new IllegalArgumentException("Unsupported compression type: " + type); } }
@Override public CompressionType getCompression(HColumnDescriptor columnDescriptor) { Compression.Algorithm type = columnDescriptor.getCompressionType(); switch (type) { case LZO: return CompressionType.LZO; case SNAPPY: return CompressionType.SNAPPY; case GZ: return CompressionType.GZIP; case NONE: return CompressionType.NONE; default: throw new IllegalArgumentException("Unsupported compression type: " + type); } }
@Override public CompressionType getCompression(HColumnDescriptor columnDescriptor) { Compression.Algorithm type = columnDescriptor.getCompressionType(); switch (type) { case LZO: return CompressionType.LZO; case SNAPPY: return CompressionType.SNAPPY; case GZ: return CompressionType.GZIP; case NONE: return CompressionType.NONE; default: throw new IllegalArgumentException("Unsupported compression type: " + type); } }
@Override public CompressionType getCompression(HColumnDescriptor columnDescriptor) { Compression.Algorithm type = columnDescriptor.getCompressionType(); switch (type) { case LZO: return CompressionType.LZO; case SNAPPY: return CompressionType.SNAPPY; case GZ: return CompressionType.GZIP; case NONE: return CompressionType.NONE; default: throw new IllegalArgumentException("Unsupported compression type: " + type); } }
private static ColumnFamilyDescriptor getColumnFamilyDescriptor(HColumnDescriptor descriptor) { String name = descriptor.getNameAsString(); int maxVersions = descriptor.getMaxVersions(); ColumnFamilyDescriptor.CompressionType compressionType = ColumnFamilyDescriptor.CompressionType.valueOf(descriptor.getCompressionType().getName().toUpperCase()); ColumnFamilyDescriptor.BloomType bloomType = ColumnFamilyDescriptor.BloomType.valueOf(descriptor.getBloomFilterType().name().toUpperCase()); Map<String, String> properties = new HashMap<>(); for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> value : descriptor.getValues().entrySet()) { properties.put(org.apache.hadoop.hbase.util.Bytes.toString(value.getKey().get()), org.apache.hadoop.hbase.util.Bytes.toString(value.getValue().get())); } return new ColumnFamilyDescriptor(name, maxVersions, compressionType, bloomType, properties); } }
private static ColumnFamilyDescriptor getColumnFamilyDescriptor(HColumnDescriptor descriptor) { String name = descriptor.getNameAsString(); int maxVersions = descriptor.getMaxVersions(); ColumnFamilyDescriptor.CompressionType compressionType = ColumnFamilyDescriptor.CompressionType.valueOf(descriptor.getCompressionType().getName().toUpperCase()); ColumnFamilyDescriptor.BloomType bloomType = ColumnFamilyDescriptor.BloomType.valueOf(descriptor.getBloomFilterType().name().toUpperCase()); Map<String, String> properties = new HashMap<>(); for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> value : descriptor.getValues().entrySet()) { properties.put(org.apache.hadoop.hbase.util.Bytes.toString(value.getKey().get()), org.apache.hadoop.hbase.util.Bytes.toString(value.getValue().get())); } return new ColumnFamilyDescriptor(name, maxVersions, compressionType, bloomType, properties); } }
private static ColumnFamilyDescriptor getColumnFamilyDescriptor(HColumnDescriptor descriptor) { String name = descriptor.getNameAsString(); int maxVersions = descriptor.getMaxVersions(); ColumnFamilyDescriptor.CompressionType compressionType = ColumnFamilyDescriptor.CompressionType.valueOf(descriptor.getCompressionType().getName().toUpperCase()); ColumnFamilyDescriptor.BloomType bloomType = ColumnFamilyDescriptor.BloomType.valueOf(descriptor.getBloomFilterType().name().toUpperCase()); Map<String, String> properties = new HashMap<>(); for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> value : descriptor.getValues().entrySet()) { properties.put(org.apache.hadoop.hbase.util.Bytes.toString(value.getKey().get()), org.apache.hadoop.hbase.util.Bytes.toString(value.getValue().get())); } return new ColumnFamilyDescriptor(name, maxVersions, compressionType, bloomType, properties); } }
private static ColumnFamilyDescriptor getColumnFamilyDescriptor(HColumnDescriptor descriptor) { String name = descriptor.getNameAsString(); int maxVersions = descriptor.getMaxVersions(); ColumnFamilyDescriptor.CompressionType compressionType = ColumnFamilyDescriptor.CompressionType.valueOf(descriptor.getCompressionType().getName().toUpperCase()); ColumnFamilyDescriptor.BloomType bloomType = ColumnFamilyDescriptor.BloomType.valueOf(descriptor.getBloomFilterType().name().toUpperCase()); Map<String, String> properties = new HashMap<>(); for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> value : descriptor.getValues().entrySet()) { properties.put(org.apache.hadoop.hbase.util.Bytes.toString(value.getKey().get()), org.apache.hadoop.hbase.util.Bytes.toString(value.getValue().get())); } return new ColumnFamilyDescriptor(name, maxVersions, compressionType, bloomType, properties); } }
private static ColumnFamilyDescriptor getColumnFamilyDescriptor(HColumnDescriptor descriptor) { String name = descriptor.getNameAsString(); int maxVersions = descriptor.getMaxVersions(); ColumnFamilyDescriptor.CompressionType compressionType = ColumnFamilyDescriptor.CompressionType.valueOf(descriptor.getCompressionType().getName().toUpperCase()); ColumnFamilyDescriptor.BloomType bloomType = ColumnFamilyDescriptor.BloomType.valueOf(descriptor.getBloomFilterType().name().toUpperCase()); Map<String, String> properties = new HashMap<>(); for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> value : descriptor.getValues().entrySet()) { properties.put(org.apache.hadoop.hbase.util.Bytes.toString(value.getKey().get()), org.apache.hadoop.hbase.util.Bytes.toString(value.getValue().get())); } return new ColumnFamilyDescriptor(name, maxVersions, compressionType, bloomType, properties); } }
assertEquals(inmemory, deserializedHcd.isInMemory()); assertEquals(hcd.getScope(), deserializedHcd.getScope()); assertTrue(deserializedHcd.getCompressionType().equals(Compression.Algorithm.SNAPPY)); assertTrue(deserializedHcd.getDataBlockEncoding().equals(DataBlockEncoding.FAST_DIFF)); assertTrue(deserializedHcd.getBloomFilterType().equals(BloomType.ROW));
public HFileTarget(Path path, HColumnDescriptor hcol) { super(path, HFileOutputFormatForCrunch.class, SequentialFileNamingScheme.getInstance()); if (hcol != null) { outputConf(HFileOutputFormatForCrunch.HCOLUMN_DESCRIPTOR_COMPRESSION_TYPE_KEY, hcol.getCompressionType().getName()); outputConf(HFileOutputFormatForCrunch.HCOLUMN_DESCRIPTOR_DATA_BLOCK_ENCODING_KEY, hcol.getDataBlockEncoding().name()); outputConf(HFileOutputFormatForCrunch.HCOLUMN_DESCRIPTOR_BLOOM_FILTER_TYPE_KEY, hcol.getBloomFilterType().name()); } }