public static Compression.Algorithm compressionByName(String algoName) { if (algoName == null) return HFile.DEFAULT_COMPRESSION_ALGORITHM; return Compression.getCompressionAlgorithmByName(algoName); }
/** * Get names of supported compression algorithms. The names are acceptable by * HFile.Writer. * * @return Array of strings, each represents a supported compression * algorithm. Currently, the following compression algorithms are * supported. * <ul> * <li>"none" - No compression. * <li>"gz" - GZIP compression. * </ul> */ public static String[] getSupportedCompressionAlgorithms() { return Compression.getSupportedAlgorithms(); }
assert blockBufferWithoutHeader.hasArray(); if (compression != Compression.Algorithm.NONE) { Compression.decompress(blockBufferWithoutHeader.array(), blockBufferWithoutHeader.arrayOffset(), dataInputStream, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, compression);
assert blockBufferWithoutHeader.hasArray(); if (compression != Compression.Algorithm.NONE) { Compression.decompress(blockBufferWithoutHeader.array(), blockBufferWithoutHeader.arrayOffset(), in, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, compression);
public static boolean testCompression(String codec) { codec = codec.toLowerCase(Locale.ROOT); Compression.Algorithm a; try { a = Compression.getCompressionAlgorithmByName(codec); } catch (IllegalArgumentException e) { LOG.warn("Codec type: " + codec + " is not known"); return false; } try { testCompression(a); return true; } catch (IOException ignored) { LOG.warn("Can't instantiate codec: " + codec, ignored); return false; } }
assert blockBufferWithoutHeader.hasArray(); if (compression != Compression.Algorithm.NONE) { Compression.decompress(blockBufferWithoutHeader.array(), blockBufferWithoutHeader.arrayOffset(), dataInputStream, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, compression);
/** * Get names of supported compression algorithms. The names are acceptable by * HFile.Writer. * * @return Array of strings, each represents a supported compression * algorithm. Currently, the following compression algorithms are * supported. * <ul> * <li>"none" - No compression. * <li>"gz" - GZIP compression. * </ul> */ public static String[] getSupportedCompressionAlgorithms() { return Compression.getSupportedAlgorithms(); }
/** * Get supported compression algorithms. * @return supported compression algorithms. */ public static Compression.Algorithm[] getSupportedCompressionAlgorithms() { String[] allAlgos = HFile.getSupportedCompressionAlgorithms(); List<Compression.Algorithm> supportedAlgos = new ArrayList<>(); for (String algoName : allAlgos) { try { Compression.Algorithm algo = Compression.getCompressionAlgorithmByName(algoName); algo.getCompressor(); supportedAlgos.add(algo); } catch (Throwable t) { // this algo is not available } } return supportedAlgos.toArray(new Algorithm[supportedAlgos.size()]); }
assert blockBufferWithoutHeader.hasArray(); if (compression != Compression.Algorithm.NONE) { Compression.decompress(blockBufferWithoutHeader.array(), blockBufferWithoutHeader.arrayOffset(), dataInputStream, onDiskSizeWithoutHeader, uncompressedSizeWithoutHeader, compression);
/** * @param compressionAlgorithmName What kind of algorithm should be used * as baseline for comparison (e.g. lzo, gz). */ public DataBlockEncodingTool(String compressionAlgorithmName) { this.compressionAlgorithmName = compressionAlgorithmName; this.compressionAlgorithm = Compression.getCompressionAlgorithmByName( compressionAlgorithmName); this.compressor = this.compressionAlgorithm.getCompressor(); this.decompressor = this.compressionAlgorithm.getDecompressor(); }
/** * This utility method creates a new Hbase HColumnDescriptor object based on a * Thrift ColumnDescriptor "struct". * * @param in Thrift ColumnDescriptor object * @return HColumnDescriptor * @throws IllegalArgument if the column name is empty */ static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in) throws IllegalArgument { Compression.Algorithm comp = Compression.getCompressionAlgorithmByName(in.compression.toLowerCase(Locale.ROOT)); BloomType bt = BloomType.valueOf(in.bloomFilterType); if (in.name == null || !in.name.hasRemaining()) { throw new IllegalArgument("column name is empty"); } byte [] parsedName = CellUtil.parseColumn(Bytes.getBytes(in.name))[0]; HColumnDescriptor col = new HColumnDescriptor(parsedName) .setMaxVersions(in.maxVersions) .setCompressionType(comp) .setInMemory(in.inMemory) .setBlockCacheEnabled(in.blockCacheEnabled) .setTimeToLive(in.timeToLive > 0 ? in.timeToLive : Integer.MAX_VALUE) .setBloomFilterType(bt); return col; }
public static Compression.Algorithm compressionByName(String algoName) { if (algoName == null) return HFile.DEFAULT_COMPRESSION_ALGORITHM; return Compression.getCompressionAlgorithmByName(algoName); }
public static boolean testCompression(String codec) { codec = codec.toLowerCase(); Compression.Algorithm a; try { a = Compression.getCompressionAlgorithmByName(codec); } catch (IllegalArgumentException e) { LOG.warn("Codec type: " + codec + " is not known"); return false; } try { testCompression(a); return true; } catch (IOException ignored) { LOG.warn("Can't instantiate codec: " + codec, ignored); return false; } }
@Override public boolean open() { super.open(); try { int files = hBaseManager.getConf().getInt(EXPECTED_FILES_NUMBER, DEFAULT_EXPECTED_FILES_NUMBER); int preSplitSize = hBaseManager.getConf().getInt(SAMPLE_INDEX_TABLE_PRESPLIT_SIZE, DEFAULT_SAMPLE_INDEX_TABLE_PRESPLIT_SIZE); int splits = files / preSplitSize; ArrayList<byte[]> preSplits = new ArrayList<>(splits); for (int i = 0; i < splits; i++) { preSplits.add(SampleIndexConverter.toRowKey(i * preSplitSize)); } hBaseManager.createTableIfNeeded(tableName, family, preSplits, Compression.getCompressionAlgorithmByName( hBaseManager.getConf().get(SAMPLE_INDEX_TABLE_COMPRESSION, Compression.Algorithm.SNAPPY.getName()))); } catch (IOException e) { throw new UncheckedIOException(e); } return true; }
/** * Get supported compression algorithms. * @return supported compression algorithms. */ public static Compression.Algorithm[] getSupportedCompressionAlgorithms() { String[] allAlgos = HFile.getSupportedCompressionAlgorithms(); List<Compression.Algorithm> supportedAlgos = new ArrayList<>(); for (String algoName : allAlgos) { try { Compression.Algorithm algo = Compression.getCompressionAlgorithmByName(algoName); algo.getCompressor(); supportedAlgos.add(algo); } catch (Throwable t) { // this algo is not available } } return supportedAlgos.toArray(new Algorithm[supportedAlgos.size()]); }
public static boolean createArchiveTableIfNeeded(GenomeHelper genomeHelper, String tableName, Connection con) throws IOException { Compression.Algorithm compression = Compression.getCompressionAlgorithmByName( genomeHelper.getConf().get(HadoopVariantStorageEngine.ARCHIVE_TABLE_COMPRESSION, Compression.Algorithm.SNAPPY.getName())); final List<byte[]> preSplits = generateArchiveTableBootPreSplitHuman(genomeHelper.getConf()); return HBaseManager.createTableIfNeeded(con, tableName, genomeHelper.getColumnFamily(), preSplits, compression); }
/** * @param compressionAlgorithmName What kind of algorithm should be used * as baseline for comparison (e.g. lzo, gz). */ public DataBlockEncodingTool(String compressionAlgorithmName) { this.compressionAlgorithmName = compressionAlgorithmName; this.compressionAlgorithm = Compression.getCompressionAlgorithmByName( compressionAlgorithmName); this.compressor = this.compressionAlgorithm.getCompressor(); this.decompressor = this.compressionAlgorithm.getDecompressor(); }
VariantPhoenixKeyFactory::generateVariantRowKey); boolean newTable = HBaseManager.createTableIfNeeded(con, tableName, genomeHelper.getColumnFamily(), splitList, Compression.getCompressionAlgorithmByName( genomeHelper.getConf().get( HadoopVariantStorageEngine.VARIANT_TABLE_COMPRESSION,
VariantPhoenixKeyFactory::generateVariantRowKey); boolean newTable = HBaseManager.createTableIfNeeded(con, tableName, genomeHelper.getColumnFamily(), splitList, Compression.getCompressionAlgorithmByName( genomeHelper.getConf().get( HadoopVariantStorageEngine.VARIANT_TABLE_COMPRESSION,
/** * This utility method creates a new Hbase HColumnDescriptor object based on a * Thrift ColumnDescriptor "struct". * * @param in * Thrift ColumnDescriptor object * @return HColumnDescriptor * @throws IllegalArgument */ static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in) throws IllegalArgument { Compression.Algorithm comp = Compression.getCompressionAlgorithmByName(in.compression.toLowerCase(Locale.ROOT)); BloomType bt = BloomType.valueOf(in.bloomFilterType); if (in.name == null || !in.name.hasRemaining()) { throw new IllegalArgument("column name is empty"); } byte [] parsedName = CellUtil.parseColumn(Bytes.getBytes(in.name))[0]; HColumnDescriptor col = new HColumnDescriptor(parsedName) .setMaxVersions(in.maxVersions) .setCompressionType(comp) .setInMemory(in.inMemory) .setBlockCacheEnabled(in.blockCacheEnabled) .setTimeToLive(in.timeToLive > 0 ? in.timeToLive : Integer.MAX_VALUE) .setBloomFilterType(bt); return col; }