private void updateState(CompressionMetadata metadata) { long offset = 0; long lastSegmentOffset = 0; long segmentSize = 0; while (offset < metadata.dataLength) { CompressionMetadata.Chunk chunk = metadata.chunkFor(offset); //Reached a new mmap boundary if (segmentSize + chunk.length + 4 > MAX_SEGMENT_SIZE) { if (segmentSize > 0) { state.add(lastSegmentOffset, segmentSize); lastSegmentOffset += segmentSize; segmentSize = 0; } } segmentSize += chunk.length + 4; //checksum offset += metadata.chunkLength(); } if (segmentSize > 0) state.add(lastSegmentOffset, segmentSize); state.length = lastSegmentOffset + segmentSize; }
public void tidy() throws Exception { metadata.close(); } }
protected CompressedRandomAccessReader(String dataFilePath, CompressionMetadata metadata, PoolingSegmentedFile owner) throws FileNotFoundException { super(new File(dataFilePath), metadata.chunkLength(), metadata.compressedFileLength, owner); this.metadata = metadata; checksum = metadata.hasPostCompressionAdlerChecksums ? new Adler32() : new CRC32(); compressed = ByteBuffer.wrap(new byte[metadata.compressor().initialCompressedBufferLength(metadata.chunkLength())]); }
assert position <= fileLength; CompressionMetadata.Chunk chunk = metadata.chunkFor(position); metadata.compressor().uncompress(compressedChunk, uncompressed);
File compressionFile = new File(descriptor.filenameFor(Component.COMPRESSION_INFO)); if (compressionFile.exists()) compression = CompressionMetadata.create(fname); SerializationHeader.Component header = (SerializationHeader.Component) metadata.get(MetadataType.HEADER); out.printf("SSTable min local deletion time: %s%n", stats.minLocalDeletionTime); out.printf("SSTable max local deletion time: %s%n", stats.maxLocalDeletionTime); out.printf("Compressor: %s%n", compression != null ? compression.compressor().getClass().getName() : "-"); if (compression != null) out.printf("Compression ratio: %s%n", stats.compressionRatio);
/** * Create metadata about given compressed file including uncompressed data length, chunk size * and list of the chunk offsets of the compressed data. * * This is an expensive operation! Don't create more than one for each * sstable. * * @param dataFilePath Path to the compressed file * * @return metadata about given compressed file. */ public static CompressionMetadata create(String dataFilePath) { Descriptor desc = Descriptor.fromFilename(dataFilePath); return new CompressionMetadata(desc.filenameFor(Component.COMPRESSION_INFO), new File(dataFilePath).length(), desc.version.hasPostCompressionAdlerChecksums); }
@Override public int chunkSize() { return metadata.chunkLength(); }
@Override public BufferType preferredBufferType() { return metadata.compressor().preferredBufferType(); }
public void dropPageCache(long before) { if (before >= metadata.dataLength) super.dropPageCache(0); super.dropPageCache(metadata.chunkFor(before).offset); }
/** * Returns the amount of memory in bytes used off heap by the compression meta-data. * @return the amount of memory in bytes used off heap by the compression meta-data */ public long getCompressionMetadataOffHeapSize() { if (!compression) return 0; return getCompressionMetadata().offHeapSize(); }
protected CompressionMetadata metadata(String path, long overrideLength, boolean isFinal) { if (writer == null) return CompressionMetadata.create(path); return writer.open(overrideLength, isFinal); }
chunkOffsets = readChunkOffsets(stream);
@Override public void addTo(Ref.IdentityCollection identities) { super.addTo(identities); compressionMetadata.ifPresent(metadata -> metadata.addTo(identities)); }
public CompressionInfo serialize(FileMessageHeader header, DataOutputPlus out, int version) throws IOException { UUIDSerializer.serializer.serialize(header.cfId, out, version); out.writeInt(header.sequenceNumber); out.writeUTF(header.version); out.writeLong(header.estimatedKeys); out.writeInt(header.sections.size()); for (Pair<Long, Long> section : header.sections) { out.writeLong(section.left); out.writeLong(section.right); } // construct CompressionInfo here to avoid holding large number of Chunks on heap. CompressionInfo compressionInfo = null; if (header.compressionMetadata != null) compressionInfo = new CompressionInfo(header.compressionMetadata.getChunksForSections(header.sections), header.compressionMetadata.parameters); CompressionInfo.serializer.serialize(compressionInfo, out, version); out.writeLong(header.repairedAt); return compressionInfo; }
assert position <= fileLength; CompressionMetadata.Chunk chunk = metadata.chunkFor(position); ByteBuffer compressed = compressedHolder.get(); metadata.compressor().uncompress(compressed, uncompressed);
File compressionFile = new File(descriptor.filenameFor(Component.COMPRESSION_INFO)); if (compressionFile.exists()) compression = CompressionMetadata.create(fname); SerializationHeader.Component header = (SerializationHeader.Component) metadata.get(MetadataType.HEADER); out.printf("SSTable min local deletion time: %s%n", stats.minLocalDeletionTime); out.printf("SSTable max local deletion time: %s%n", stats.maxLocalDeletionTime); out.printf("Compressor: %s%n", compression != null ? compression.compressor().getClass().getName() : "-"); if (compression != null) out.printf("Compression ratio: %s%n", stats.compressionRatio);
/** * Create metadata about given compressed file including uncompressed data length, chunk size * and list of the chunk offsets of the compressed data. * * This is an expensive operation! Don't create more than one for each * sstable. * * @param dataFilePath Path to the compressed file * * @return metadata about given compressed file. */ public static CompressionMetadata create(String dataFilePath) { Descriptor desc = Descriptor.fromFilename(dataFilePath); return new CompressionMetadata(desc.filenameFor(Component.COMPRESSION_INFO), new File(dataFilePath).length(), desc.version.compressedChecksumType()); }
@Override public int chunkSize() { return metadata.chunkLength(); }
@Override public BufferType preferredBufferType() { return metadata.compressor().preferredBufferType(); }
public void dropPageCache(long before) { if (before >= metadata.dataLength) super.dropPageCache(0); super.dropPageCache(metadata.chunkFor(before).offset); }