/** * Gets the position of the actual index summary entry in our Memory attribute, 'bytes'. * @param index The index of the entry or key to get the position for * @return an offset into our Memory attribute where the actual entry resides */ public int getPositionInSummary(int index) { // The first section of bytes holds a four-byte position for each entry in the summary, so just multiply by 4. return offsets.getInt(index << 2); }
/** * Gets the position of the actual index summary entry in our Memory attribute, 'bytes'. * @param index The index of the entry or key to get the position for * @return an offset into our Memory attribute where the actual entry resides */ public int getPositionInSummary(int index) { // The first section of bytes holds a four-byte position for each entry in the summary, so just multiply by 4. return offsets.getInt(index << 2); }
/** * Gets the position of the actual index summary entry in our Memory attribute, 'bytes'. * @param index The index of the entry or key to get the position for * @return an offset into our Memory attribute where the actual entry resides */ public int getPositionInSummary(int index) { // The first section of bytes holds a four-byte position for each entry in the summary, so just multiply by 4. return offsets.getInt(index << 2); }
/** * Gets the position of the actual index summary entry in our Memory attribute, 'bytes'. * @param index The index of the entry or key to get the position for * @return an offset into our Memory attribute where the actual entry resides */ public int getPositionInSummary(int index) { // The first section of bytes holds a four-byte position for each entry in the summary, so just multiply by 4. return offsets.getInt(index << 2); }
/** * Gets the position of the actual index summary entry in our Memory attribute, 'bytes'. * @param index The index of the entry or key to get the position for * @return an offset into our Memory attribute where the actual entry resides */ public int getPositionInSummary(int index) { // The first section of bytes holds a four-byte position for each entry in the summary, so just multiply by 4. return offsets.getInt(index << 2); }
public IndexSummary(IPartitioner partitioner, Memory offsets, int offsetCount, Memory entries, long entriesLength, int sizeAtFullSampling, int minIndexInterval, int samplingLevel) { super(new Memory[] { offsets, entries }); assert offsets.getInt(0) == 0; this.partitioner = partitioner; this.minIndexInterval = minIndexInterval; this.offsetCount = offsetCount; this.entriesLength = entriesLength; this.sizeAtFullSampling = sizeAtFullSampling; this.offsets = offsets; this.entries = entries; this.samplingLevel = samplingLevel; assert samplingLevel > 0; }
public IndexSummary(IPartitioner partitioner, Memory offsets, int offsetCount, Memory entries, long entriesLength, int sizeAtFullSampling, int minIndexInterval, int samplingLevel) { super(new Memory[] { offsets, entries }); assert offsets.getInt(0) == 0; this.partitioner = partitioner; this.minIndexInterval = minIndexInterval; this.offsetCount = offsetCount; this.entriesLength = entriesLength; this.sizeAtFullSampling = sizeAtFullSampling; this.offsets = offsets; this.entries = entries; this.samplingLevel = samplingLevel; assert samplingLevel > 0; }
public IndexSummary(IPartitioner partitioner, Memory offsets, int offsetCount, Memory entries, long entriesLength, int sizeAtFullSampling, int minIndexInterval, int samplingLevel) { super(new Memory[] { offsets, entries }); assert offsets.getInt(0) == 0; this.partitioner = partitioner; this.minIndexInterval = minIndexInterval; this.offsetCount = offsetCount; this.entriesLength = entriesLength; this.sizeAtFullSampling = sizeAtFullSampling; this.offsets = offsets; this.entries = entries; this.samplingLevel = samplingLevel; assert samplingLevel > 0; }
public IndexSummary(IPartitioner partitioner, Memory offsets, int offsetCount, Memory entries, long entriesLength, int sizeAtFullSampling, int minIndexInterval, int samplingLevel) { super(new Memory[] { offsets, entries }); assert offsets.getInt(0) == 0; this.partitioner = partitioner; this.minIndexInterval = minIndexInterval; this.offsetCount = offsetCount; this.entriesLength = entriesLength; this.sizeAtFullSampling = sizeAtFullSampling; this.offsets = offsets; this.entries = entries; this.samplingLevel = samplingLevel; assert samplingLevel > 0; }
public IndexSummary(IPartitioner partitioner, Memory offsets, int offsetCount, Memory entries, long entriesLength, int sizeAtFullSampling, int minIndexInterval, int samplingLevel) { super(new Memory[] { offsets, entries }); assert offsets.getInt(0) == 0; this.partitioner = partitioner; this.minIndexInterval = minIndexInterval; this.offsetCount = offsetCount; this.entriesLength = entriesLength; this.sizeAtFullSampling = sizeAtFullSampling; this.offsets = offsets; this.entries = entries; this.samplingLevel = samplingLevel; assert samplingLevel > 0; }
offsets.setInt(i, (int) (offsets.getInt(i) - offsets.size())); return new IndexSummary(partitioner, offsets, offsetCount, entries, entries.size(), fullSamplingSummarySize, minIndexInterval, samplingLevel);
public void serialize(IndexSummary t, DataOutputPlus out, boolean withSamplingLevel) throws IOException { out.writeInt(t.minIndexInterval); out.writeInt(t.offsetCount); out.writeLong(t.getOffHeapSize()); if (withSamplingLevel) { out.writeInt(t.samplingLevel); out.writeInt(t.sizeAtFullSampling); } // our on-disk representation treats the offsets and the summary data as one contiguous structure, // in which the offsets are based from the start of the structure. i.e., if the offsets occupy // X bytes, the value of the first offset will be X. In memory we split the two regions up, so that // the summary values are indexed from zero, so we apply a correction to the offsets when de/serializing. // In this case adding X to each of the offsets. int baseOffset = t.offsetCount * 4; for (int i = 0 ; i < t.offsetCount ; i++) { int offset = t.offsets.getInt(i * 4) + baseOffset; // our serialization format for this file uses native byte order, so if this is different to the // default Java serialization order (BIG_ENDIAN) we have to reverse our bytes if (ByteOrder.nativeOrder() != ByteOrder.BIG_ENDIAN) offset = Integer.reverseBytes(offset); out.writeInt(offset); } out.write(t.entries, 0, t.entriesLength); }
offsets.setInt(i, (int) (offsets.getInt(i) - offsets.size())); return new IndexSummary(partitioner, offsets, offsetCount, entries, entries.size(), fullSamplingSummarySize, minIndexInterval, samplingLevel);
offsets.setInt(i, (int) (offsets.getInt(i) - offsets.size())); return new IndexSummary(partitioner, offsets, offsetCount, entries, entries.size(), fullSamplingSummarySize, minIndexInterval, samplingLevel);
public void serialize(IndexSummary t, DataOutputPlus out, boolean withSamplingLevel) throws IOException { out.writeInt(t.minIndexInterval); out.writeInt(t.offsetCount); out.writeLong(t.getOffHeapSize()); if (withSamplingLevel) { out.writeInt(t.samplingLevel); out.writeInt(t.sizeAtFullSampling); } // our on-disk representation treats the offsets and the summary data as one contiguous structure, // in which the offsets are based from the start of the structure. i.e., if the offsets occupy // X bytes, the value of the first offset will be X. In memory we split the two regions up, so that // the summary values are indexed from zero, so we apply a correction to the offsets when de/serializing. // In this case adding X to each of the offsets. int baseOffset = t.offsetCount * 4; for (int i = 0 ; i < t.offsetCount ; i++) { int offset = t.offsets.getInt(i * 4) + baseOffset; // our serialization format for this file uses native byte order, so if this is different to the // default Java serialization order (BIG_ENDIAN) we have to reverse our bytes if (ByteOrder.nativeOrder() != ByteOrder.BIG_ENDIAN) offset = Integer.reverseBytes(offset); out.writeInt(offset); } out.write(t.entries, 0, t.entriesLength); }
offsets.setInt(i, (int) (offsets.getInt(i) - offsets.size())); return new IndexSummary(partitioner, offsets, offsetCount, entries, entries.size(), fullSamplingSummarySize, minIndexInterval, samplingLevel);
offsets.setInt(i, (int) (offsets.getInt(i) - offsets.size())); return new IndexSummary(partitioner, offsets, offsetCount, entries, entries.size(), fullSamplingSummarySize, minIndexInterval, samplingLevel);
public void serialize(IndexSummary t, DataOutputPlus out, boolean withSamplingLevel) throws IOException { out.writeInt(t.minIndexInterval); out.writeInt(t.offsetCount); out.writeLong(t.getOffHeapSize()); if (withSamplingLevel) { out.writeInt(t.samplingLevel); out.writeInt(t.sizeAtFullSampling); } // our on-disk representation treats the offsets and the summary data as one contiguous structure, // in which the offsets are based from the start of the structure. i.e., if the offsets occupy // X bytes, the value of the first offset will be X. In memory we split the two regions up, so that // the summary values are indexed from zero, so we apply a correction to the offsets when de/serializing. // In this case adding X to each of the offsets. int baseOffset = t.offsetCount * 4; for (int i = 0 ; i < t.offsetCount ; i++) { int offset = t.offsets.getInt(i * 4) + baseOffset; // our serialization format for this file uses native byte order, so if this is different to the // default Java serialization order (BIG_ENDIAN) we have to reverse our bytes if (ByteOrder.nativeOrder() != ByteOrder.BIG_ENDIAN) offset = Integer.reverseBytes(offset); out.writeInt(offset); } out.write(t.entries, 0, t.entriesLength); }
public void serialize(IndexSummary t, DataOutputPlus out, boolean withSamplingLevel) throws IOException { out.writeInt(t.minIndexInterval); out.writeInt(t.offsetCount); out.writeLong(t.getOffHeapSize()); if (withSamplingLevel) { out.writeInt(t.samplingLevel); out.writeInt(t.sizeAtFullSampling); } // our on-disk representation treats the offsets and the summary data as one contiguous structure, // in which the offsets are based from the start of the structure. i.e., if the offsets occupy // X bytes, the value of the first offset will be X. In memory we split the two regions up, so that // the summary values are indexed from zero, so we apply a correction to the offsets when de/serializing. // In this case adding X to each of the offsets. int baseOffset = t.offsetCount * 4; for (int i = 0 ; i < t.offsetCount ; i++) { int offset = t.offsets.getInt(i * 4) + baseOffset; // our serialization format for this file uses native byte order, so if this is different to the // default Java serialization order (BIG_ENDIAN) we have to reverse our bytes if (ByteOrder.nativeOrder() != ByteOrder.BIG_ENDIAN) offset = Integer.reverseBytes(offset); out.writeInt(offset); } out.write(t.entries, 0, t.entriesLength); }
public void serialize(IndexSummary t, DataOutputPlus out, boolean withSamplingLevel) throws IOException { out.writeInt(t.minIndexInterval); out.writeInt(t.offsetCount); out.writeLong(t.getOffHeapSize()); if (withSamplingLevel) { out.writeInt(t.samplingLevel); out.writeInt(t.sizeAtFullSampling); } // our on-disk representation treats the offsets and the summary data as one contiguous structure, // in which the offsets are based from the start of the structure. i.e., if the offsets occupy // X bytes, the value of the first offset will be X. In memory we split the two regions up, so that // the summary values are indexed from zero, so we apply a correction to the offsets when de/serializing. // In this case adding X to each of the offsets. int baseOffset = t.offsetCount * 4; for (int i = 0 ; i < t.offsetCount ; i++) { int offset = t.offsets.getInt(i * 4) + baseOffset; // our serialization format for this file uses native byte order, so if this is different to the // default Java serialization order (BIG_ENDIAN) we have to reverse our bytes if (ByteOrder.nativeOrder() != ByteOrder.BIG_ENDIAN) offset = Integer.reverseBytes(offset); out.writeInt(offset); } out.write(t.entries, 0, t.entriesLength); }