Codota Logo
org.apache.carbondata.core.datastore.compression
Code IndexAdd Codota to your IDE (free)

How to use org.apache.carbondata.core.datastore.compression

Best Java code snippets using org.apache.carbondata.core.datastore.compression (Showing top 20 results out of 315)

  • Add the Codota plugin to your IDE and get smart completions
private void myMethod () {
ArrayList a =
  • Codota Iconnew ArrayList<String>()
  • Codota Iconnew ArrayList()
  • Codota Iconnew ArrayList<Object>()
  • Smart code suggestions by Codota
}
origin: org.apache.carbondata/carbondata-streaming

public StreamBlockletReader(byte[] syncMarker, InputStream in, long limit,
  boolean isHeaderPresent, String compressorName) {
 this.syncMarker = syncMarker;
 syncLen = syncMarker.length;
 syncBuffer = new byte[syncLen];
 this.in = in;
 limitStart = limit;
 limitEnd = limitStart + syncLen;
 this.isHeaderPresent = isHeaderPresent;
 this.compressor = CompressorFactory.getInstance().getCompressor(compressorName);
}
origin: org.apache.carbondata/carbondata-core

/**
 * Constructor to get minimum parameter to create instance of this class
 *
 * @param blockletInfo
 * @param eachColumnValueSize
 * @param filePath
 */
public CompressedDimensionChunkFileBasedReaderV2(final BlockletInfo blockletInfo,
  final int[] eachColumnValueSize, final String filePath) {
 super(blockletInfo, eachColumnValueSize, filePath);
 // for v2 store, the compressor is snappy
 this.compressor = CompressorFactory.NativeSupportedCompressor.SNAPPY.getCompressor();
}
origin: org.apache.carbondata/carbondata-core

private CompressorFactory() {
 for (NativeSupportedCompressor nativeSupportedCompressor : NativeSupportedCompressor.values()) {
  allSupportedCompressors.put(nativeSupportedCompressor.getName(),
    nativeSupportedCompressor.getCompressor());
 }
}
origin: org.apache.carbondata/carbondata-core

@Override
void encodeIndexStorage(ColumnPage inputPage) {
 BlockIndexerStorage<byte[][]> indexStorage =
   new BlockIndexerStorageForShort(inputPage.getByteArrayPage(), false, false, false);
 byte[] flattened = ByteUtil.flatten(indexStorage.getDataPage());
 Compressor compressor = CompressorFactory.getInstance().getCompressor(
   inputPage.getColumnCompressorName());
 byte[] compressed = compressor.compressByte(flattened);
 super.indexStorage = indexStorage;
 super.compressedDataPage = compressed;
}
origin: org.apache.carbondata/carbondata-core

/**
 * get the default compressor.
 * This method can only be called in data load procedure to compress column page.
 * In query procedure, we should read the compressor information from the metadata
 * in datafiles when we want to decompress the content.
 */
public Compressor getCompressor() {
 String compressorType = CarbonProperties.getInstance()
   .getProperty(CarbonCommonConstants.COMPRESSOR, CarbonCommonConstants.DEFAULT_COMPRESSOR);
 return getCompressor(compressorType);
}
origin: org.apache.carbondata/carbondata-core

/**
 * get the compressor name from chunk meta
 * before 1.5.0, we only support snappy and do not have compressor_name field;
 * after 1.5.0, we directly get the compressor from the compressor_name field
 */
public static String getCompressorNameFromChunkMeta(ChunkCompressionMeta chunkCompressionMeta) {
 if (chunkCompressionMeta.isSetCompressor_name()) {
  return chunkCompressionMeta.getCompressor_name();
 } else {
  // this is for legacy store before 1.5.0
  return CompressorFactory.NativeSupportedCompressor.SNAPPY.getName();
 }
}
/**
origin: org.apache.carbondata/carbondata-core

public Compressor getCompressor(String name) {
 String internalCompressorName = getInternalCompressorName(name);
 if (null == internalCompressorName) {
  // maybe this is a new compressor, we will try to register it
  return registerColumnCompressor(name);
 } else {
  return allSupportedCompressors.get(internalCompressorName);
 }
}
origin: org.apache.carbondata/carbondata-core

@Override
public double[] unCompressDouble(byte[] compInput, int offset, int length) {
 byte[] unCompArray = unCompressByte(compInput, offset, length);
 DoubleBuffer unCompBuffer =
   ByteBuffer.wrap(unCompArray).order(ByteOrder.LITTLE_ENDIAN).asDoubleBuffer();
 double[] doubles = new double[unCompArray.length / ByteUtil.SIZEOF_DOUBLE];
 unCompBuffer.get(doubles);
 return doubles;
}
origin: org.apache.carbondata/carbondata-core

@Override
public byte[] compressFloat(float[] unCompInput) {
 ByteBuffer unCompBuffer = ByteBuffer.allocate(unCompInput.length * ByteUtil.SIZEOF_FLOAT);
 unCompBuffer.order(ByteOrder.LITTLE_ENDIAN).asFloatBuffer().put(unCompInput);
 return compressByte(unCompBuffer.array());
}
origin: org.apache.carbondata/carbondata-core

@Override
public byte[] unCompressByte(byte[] compInput, int offset, int length) {
 // todo: how to avoid memory copy
 byte[] dstBytes = new byte[length];
 System.arraycopy(compInput, offset, dstBytes, 0, length);
 return unCompressByte(dstBytes);
}
origin: org.apache.carbondata/carbondata-core

@Override
public long rawCompress(long inputAddress, int inputSize, long outputAddress) throws IOException {
 throw new RuntimeException("Not implemented rawCompress for " + this.getName());
}
origin: org.apache.carbondata/carbondata-core

private void fillNullBitSet(ColumnPage inputPage, DataChunk2 dataChunk) {
 PresenceMeta presenceMeta = new PresenceMeta();
 presenceMeta.setPresent_bit_streamIsSet(true);
 Compressor compressor = CompressorFactory.getInstance().getCompressor(
   inputPage.getColumnCompressorName());
 presenceMeta.setPresent_bit_stream(
   compressor.compressByte(inputPage.getNullBits().toByteArray()));
 dataChunk.setPresence(presenceMeta);
}
origin: org.apache.carbondata/carbondata-streaming

StreamBlockletWriter(int maxSize, int maxRowNum, int rowSize, int dimCountWithoutComplex,
  int measureCount, DataType[] measureDataTypes, String compressorName) {
 buffer = new byte[maxSize];
 this.maxSize = maxSize;
 this.maxRowNum = maxRowNum;
 this.rowSize = rowSize;
 this.dimCountWithoutComplex = dimCountWithoutComplex;
 this.measureCount = measureCount;
 this.measureDataTypes = measureDataTypes;
 this.compressor = CompressorFactory.getInstance().getCompressor(compressorName);
 initializeStatsCollector();
}
origin: org.apache.carbondata/carbondata-core

/**
 * Constructor to get minimum parameter to create instance of this class
 *
 * @param blockletInfo BlockletInfo
 * @param filePath     file from which data will be read
 */
public CompressedMeasureChunkFileBasedReaderV2(final BlockletInfo blockletInfo,
  final String filePath) {
 super(blockletInfo, filePath);
 this.compressor = CompressorFactory.NativeSupportedCompressor.SNAPPY.getCompressor();
}
origin: org.apache.carbondata/carbondata-core

@Override
public int[] unCompressInt(byte[] compInput, int offset, int length) {
 byte[] unCompArray = unCompressByte(compInput, offset, length);
 IntBuffer unCompBuffer =
   ByteBuffer.wrap(unCompArray).order(ByteOrder.LITTLE_ENDIAN).asIntBuffer();
 int[] ints = new int[unCompArray.length / ByteUtil.SIZEOF_INT];
 unCompBuffer.get(ints);
 return ints;
}
origin: org.apache.carbondata/carbondata-core

@Override
public byte[] compressShort(short[] unCompInput) {
 ByteBuffer unCompBuffer = ByteBuffer.allocate(unCompInput.length * ByteUtil.SIZEOF_SHORT);
 unCompBuffer.order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().put(unCompInput);
 return compressByte(unCompBuffer.array());
}
origin: org.apache.carbondata/carbondata-core

@Override
protected byte[] encodeData(ColumnPage input) throws MemoryException, IOException {
 Compressor compressor = CompressorFactory.getInstance().getCompressor(
   input.getColumnCompressorName());
 return input.compress(compressor);
}
origin: org.apache.carbondata/carbondata-core

/**
 * Constructor to get minimum parameter to create instance of this class
 *
 * @param blockletInfo        blocklet info
 * @param eachColumnValueSize size of the each column value
 * @param filePath            file from which data will be read
 */
public CompressedDimensionChunkFileBasedReaderV1(final BlockletInfo blockletInfo,
  final int[] eachColumnValueSize, final String filePath) {
 super(eachColumnValueSize, filePath, blockletInfo.getNumberOfRows());
 this.dimensionColumnChunk = blockletInfo.getDimensionColumnChunk();
 // for v1 store, the compressor is snappy
 this.compressor = CompressorFactory.NativeSupportedCompressor.SNAPPY.getCompressor();
}
origin: org.apache.carbondata/carbondata-core

@Override
public float[] unCompressFloat(byte[] compInput, int offset, int length) {
 byte[] unCompArray = unCompressByte(compInput, offset, length);
 FloatBuffer unCompBuffer =
   ByteBuffer.wrap(unCompArray).order(ByteOrder.LITTLE_ENDIAN).asFloatBuffer();
 float[] floats = new float[unCompArray.length / ByteUtil.SIZEOF_FLOAT];
 unCompBuffer.get(floats);
 return floats;
}
origin: org.apache.carbondata/carbondata-core

@Override
public byte[] compressLong(long[] unCompInput) {
 ByteBuffer unCompBuffer = ByteBuffer.allocate(unCompInput.length * ByteUtil.SIZEOF_LONG);
 unCompBuffer.order(ByteOrder.LITTLE_ENDIAN).asLongBuffer().put(unCompInput);
 return compressByte(unCompBuffer.array());
}
org.apache.carbondata.core.datastore.compression

Most used classes

  • CompressorFactory
  • Compressor
  • CompressorFactory$NativeSupportedCompressor
  • AbstractCompressor
  • ZstdCompressor
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now