public BasicRecordFile(File file, Serializer<E> serializer) throws IOException { this.file = file; this.serializer = serializer; buffer = new MMapBuffer(file, FileChannel.MapMode.READ_ONLY, ByteOrder.BIG_ENDIAN); memory = buffer.memory(); }
public void sync() throws IOException { buffer.sync(0, currentAddress); } }
public static MMapBuffer cacheByteFieldToFile(UnsortedIntTermDocIterator iterator, int numDocs, File file) throws IOException { final int[] docIdBuf = new int[BUFFER_SIZE]; final MMapBuffer buffer = new MMapBuffer(file, 0L, numDocs, FileChannel.MapMode.READ_WRITE, ByteOrder.LITTLE_ENDIAN); final ByteArray byteArray = buffer.memory().byteArray(0, numDocs); try { while (iterator.nextTerm()) { final byte term = (byte)iterator.term(); while (true) { final int n = iterator.nextDocs(docIdBuf); for (int i = 0; i < n; ++i) { byteArray.set(docIdBuf[i], term); } if (n < docIdBuf.length) { break; } } } buffer.sync(0, numDocs); } catch (RuntimeException e) { Closeables2.closeQuietly(buffer, LOG); throw e; } catch (IOException e) { Closeables2.closeQuietly(buffer, LOG); throw e; } return buffer; }
public static <E> BlockCompressedRecordFile<E> open(final File file, Serializer<E> serializer, CompressionCodec codec, BlockingQueue<Decompressor> decompressorPool, int blockSize, int recordIndexBits, int padBits, boolean mlockFiles, int maxChunkSize) throws IOException { final MMapBuffer buffer = new MMapBuffer(file, FileChannel.MapMode.READ_ONLY, ByteOrder.BIG_ENDIAN); try { if (mlockFiles) { buffer.mlock(0, buffer.memory().length()); final Memory memory = buffer.memory(); openFileCounter.incrementAndGet(); return new BlockCompressedRecordFile<E>(
public MMapShortArrayIntValueLookup(MMapBuffer buffer, int length) { this.buffer = buffer; this.shortArray = buffer.memory().shortArray(0, length); }
public static void addField(String dir, String fieldName, FlamdexReader r, final long[] cache) throws IOException { final File tempFile = new File(dir, "temp-" + fieldName + "-" + UUID.randomUUID() + ".intarray.bin"); try { final MMapBuffer buffer = new MMapBuffer(tempFile, 0, 4 * cache.length, FileChannel.MapMode.READ_WRITE, ByteOrder.nativeOrder()); try { final IntArray indices = buffer.memory().intArray(0, cache.length); for (int i = 0; i < cache.length; ++i) { indices.set(i, i); } finally { try { buffer.close(); } catch (IOException e) { log.error("error closing MMapBuffer", e);
private static <K, V> File writeToHashOffsets( final File outputDir, final TableMeta<K, V> meta, final Iterable<Pair<K, V>> entries, final long dataSize) throws IOException { // integer serialized size of each entry by hash final File tempSizes = File.createTempFile("tmpsizes", ".bin"); // integer hash (offset from start of shard) of each entry by output order final File tempHashes = File.createTempFile("tmphashes", ".bin"); try (final MMapBuffer sizes = new MMapBuffer(tempSizes, 0L, 4L * meta.numEntries(), FileChannel.MapMode.READ_WRITE, ByteOrder.nativeOrder()); final MMapBuffer hashes = new MMapBuffer(tempHashes, 0L, 4L * meta.numEntries(), FileChannel.MapMode.READ_WRITE, ByteOrder.nativeOrder())) { final List<File> shards = splitToShards(outputDir, meta, entries, dataSize, sizes, hashes); rewriteShardsInOrder(new File(outputDir, meta.DEFAULT_DATA_PATH), meta, shards, sizes, hashes); } finally { tempHashes.delete(); } return tempSizes; }
public NativeBuffer realloc(long newSize) { if (mmapped && newSize >= MMAP_THRESHOLD) { if (OS_TYPE_IS_MAC) { // MAC does not support mremap return createNewAndClose(newSize); } else { final long newAddress = MMapBuffer.mremap(address, memory.length(), newSize); if (newAddress == MMapBuffer.MAP_FAILED) { throw new RuntimeException("anonymous mremap failed with error " + MMapBuffer.errno()); } return new NativeBuffer(newAddress, new DirectMemory(newAddress, newSize, memory.getOrder()), true); } } else if (!mmapped && newSize < MMAP_THRESHOLD) { return realloc0(newSize); } else { return createNewAndClose(newSize); } }
@VisibleForTesting int getErrno() { return errno(); }
public static MMapBuffer cacheShortFieldToFile(UnsortedIntTermDocIterator iterator, int numDocs, File file) throws IOException { final int[] docIdBuf = new int[BUFFER_SIZE]; final int length = numDocs * 2; final MMapBuffer buffer = new MMapBuffer(file, 0L, length, FileChannel.MapMode.READ_WRITE, ByteOrder.LITTLE_ENDIAN); final ShortArray shortArray = buffer.memory().shortArray(0, numDocs); try { while (iterator.nextTerm()) { final short term = (short)iterator.term(); while (true) { final int n = iterator.nextDocs(docIdBuf); for (int i = 0; i < n; ++i) { shortArray.set(docIdBuf[i], term); } if (n < docIdBuf.length) { break; } } } buffer.sync(0, length); } catch (RuntimeException e) { Closeables2.closeQuietly(buffer, LOG); throw e; } catch (IOException e) { Closeables2.closeQuietly(buffer, LOG); throw e; } return buffer; }
public static <E> BlockCompressedRecordFile<E> open(final File file, Serializer<E> serializer, CompressionCodec codec, BlockingQueue<Decompressor> decompressorPool, int blockSize, int recordIndexBits, int padBits, boolean mlockFiles, int maxChunkSize) throws IOException { final MMapBuffer buffer = new MMapBuffer(file, FileChannel.MapMode.READ_ONLY, ByteOrder.BIG_ENDIAN); try { if (mlockFiles) { buffer.mlock(0, buffer.memory().length()); final Memory memory = buffer.memory(); openFileCounter.incrementAndGet(); return new BlockCompressedRecordFile<E>(
public MMapIntArrayIntValueLookup(MMapBuffer buffer, int length) { this.buffer = buffer; this.intArray = buffer.memory().intArray(0, length); }
final TableMeta<K, V> meta = TableMeta.load(metaPath, offsetsPath, dataPath); final MMapBuffer data = new MMapBuffer(meta.getDataPath(), FileChannel.MapMode.READ_ONLY, ByteOrder.nativeOrder()); final MMapBuffer offsets = TableConfig.OffsetStorage.INDEXED.equals(meta.getConfig().getOffsetStorage()) || (TableConfig.OffsetStorage.SELECTED.equals(meta.getConfig().getOffsetStorage()) && meta.getSelectOffsets() == null) ? new MMapBuffer(meta.getOffsetsPath(), FileChannel.MapMode.READ_ONLY, ByteOrder.nativeOrder()) : null; final long maxDataHeap = maxDataHeapUsage != null ? maxDataHeapUsage : meta.getConfig().getMaxDataHeapUsage(); if (offsets == null && data.memory().length() < maxDataHeap) { final byte[] rawData = new byte[(int) data.memory().length()]; data.memory().getBytes(0, rawData); data.close(); return new TableReader<>(meta, rawData); } else {
@Override public IntValueLookup newMMapFieldCache(UnsortedIntTermDocIterator iterator, int numDocs, String field, String directory) throws IOException { final File cacheFile = new File(directory, getMMapFileName(field)); MMapBuffer buffer; try { buffer = new MMapBuffer(cacheFile, FileChannel.MapMode.READ_ONLY, ByteOrder.LITTLE_ENDIAN); } catch (FileNotFoundException e) { buffer = cacheToFileAtomically(iterator, numDocs, field, directory, cacheFile, new CacheToFileOperation<MMapBuffer>() { @Override public MMapBuffer execute(UnsortedIntTermDocIterator iterator, int numDocs, File f) throws IOException { return FlamdexUtils.cacheIntFieldToFile(iterator, numDocs, f); } }); } return new MMapIntArrayIntValueLookup(buffer, numDocs); } @Override
address = MMapBuffer.mmap(length, MMapBuffer.READ_WRITE, MMapBuffer.MAP_PRIVATE, FD_FIELD.getInt(raf.getFD()), 0); if (address == MMapBuffer.MAP_FAILED) { throw new RuntimeException("mmap /dev/zero failed with error "+MMapBuffer.errno()); address = MMapBuffer.mmap(length, MMapBuffer.READ_WRITE, MMapBuffer.MAP_PRIVATE | MMapBuffer.MAP_ANONYMOUS, -1, 0); if (address == MMapBuffer.MAP_FAILED) { throw new RuntimeException("anonymous mmap failed with error "+MMapBuffer.errno());
public DynamicMMapBufferDataOutputStream(final File file, final ByteOrder byteOrder) throws IOException { this.file = file; this.byteOrder = byteOrder; buffer = new MMapBuffer(file, 0, 4096, FileChannel.MapMode.READ_WRITE, byteOrder); memory = buffer.memory(); }
public static MMapBuffer cacheLongFieldToFile(UnsortedIntTermDocIterator iterator, int numDocs, File file) throws IOException { final int[] docIdBuf = new int[BUFFER_SIZE]; final int length = numDocs * 8; final MMapBuffer buffer = new MMapBuffer(file, 0L, length, FileChannel.MapMode.READ_WRITE, ByteOrder.LITTLE_ENDIAN); final LongArray longArray = buffer.memory().longArray(0, numDocs); try { while (iterator.nextTerm()) { final long term = iterator.term(); while (true) { final int n = iterator.nextDocs(docIdBuf); for (int i = 0; i < n; ++i) { longArray.set(docIdBuf[i], term); } if (n < docIdBuf.length) { break; } } } buffer.sync(0, length); } catch (RuntimeException e) { Closeables2.closeQuietly(buffer, LOG); throw e; } catch (IOException e) { Closeables2.closeQuietly(buffer, LOG); throw e; } return buffer; }