/** * Creates a new accumulator. * * @param basename the basename (usually a global filename followed by the field name, * separated by a dash). * @param payload the payload stored by this accumulator. * @param field the name of the accumulated field. * @param indexingType the type of indexing procedure. * @param documentsPerBatch the number of documents in each batch. * @param batchDir a directory for batch files; batch names will be relativised to this * directory if it is not <code>null</code>. */ public PayloadAccumulator( final String basename, final Payload payload, final String field, final IndexingType indexingType, final int documentsPerBatch, final File batchDir ) { this.basename = basename; this.payload = payload; this.field = field; this.indexingType = indexingType; if ( indexingType != IndexingType.STANDARD && indexingType != IndexingType.REMAPPED ) throw new UnsupportedOperationException( "Non-standard payload-based indices support only standard and remapped indexing" ); if ( indexingType == IndexingType.REMAPPED ) position = new long[ documentsPerBatch ]; this.batchDir = batchDir; this.cutPoints = new IntArrayList(); this.cutPoints.add( 0 ); flags = new EnumMap<Component, Coding>( CompressionFlags.DEFAULT_PAYLOAD_INDEX ); accumulatorStream = new FastByteArrayOutputStream(); accumulator = new OutputBitStream( accumulatorStream ); }
/** * Creates a new accumulator. * * @param basename the basename (usually a global filename followed by the field name, * separated by a dash). * @param payload the payload stored by this accumulator. * @param field the name of the accumulated field. * @param indexingType the type of indexing procedure. * @param documentsPerBatch the number of documents in each batch. * @param batchDir a directory for batch files; batch names will be relativised to this * directory if it is not <code>null</code>. */ public PayloadAccumulator( final String basename, final Payload payload, final String field, final IndexingType indexingType, final int documentsPerBatch, final File batchDir ) { this.basename = basename; this.payload = payload; this.field = field; this.indexingType = indexingType; if ( indexingType != IndexingType.STANDARD && indexingType != IndexingType.REMAPPED ) throw new UnsupportedOperationException( "Non-standard payload-based indices support only standard and remapped indexing" ); if ( indexingType == IndexingType.REMAPPED ) position = new long[ documentsPerBatch ]; this.batchDir = batchDir; this.cutPoints = new LongArrayList(); this.cutPoints.add( 0 ); flags = new EnumMap<Component, Coding>( CompressionFlags.DEFAULT_PAYLOAD_INDEX ); accumulatorStream = new FastByteArrayOutputStream(); accumulator = new OutputBitStream( accumulatorStream ); }
/** * Creates a new accumulator. * * @param ioFactory the factory that will be used to perform I/O. * @param basename the basename (usually a global filename followed by the field name, * separated by a dash). * @param payload the payload stored by this accumulator. * @param field the name of the accumulated field. * @param indexingType the type of indexing procedure. * @param documentsPerBatch the number of documents in each batch. * @param batchDir a directory for batch files; batch names will be relativised to this * directory if it is not {@code null}. */ public PayloadAccumulator( final IOFactory ioFactory, final String basename, final Payload payload, final String field, final IndexingType indexingType, final int documentsPerBatch, final File batchDir ) { this.basename = basename; this.ioFactory = ioFactory; this.payload = payload; this.field = field; this.indexingType = indexingType; if ( indexingType != IndexingType.STANDARD && indexingType != IndexingType.REMAPPED ) throw new UnsupportedOperationException( "Non-standard payload-based indices support only standard and remapped indexing" ); if ( indexingType == IndexingType.REMAPPED ) position = new long[ documentsPerBatch ]; this.batchDir = batchDir; this.cutPoints = new LongArrayList(); this.cutPoints.add( 0 ); flags = new EnumMap<Component, Coding>( CompressionFlags.DEFAULT_PAYLOAD_INDEX ); accumulatorStream = new FastByteArrayOutputStream(); accumulator = new OutputBitStream( accumulatorStream ); }
/** * Creates a new accumulator. * * @param ioFactory the factory that will be used to perform I/O. * @param basename the basename (usually a global filename followed by the field name, * separated by a dash). * @param payload the payload stored by this accumulator. * @param field the name of the accumulated field. * @param indexingType the type of indexing procedure. * @param documentsPerBatch the number of documents in each batch. * @param batchDir a directory for batch files; batch names will be relativised to this * directory if it is not <code>null</code>. */ public PayloadAccumulator( final IOFactory ioFactory, final String basename, final Payload payload, final String field, final IndexingType indexingType, final int documentsPerBatch, final File batchDir ) { this.ioFactory = ioFactory; this.basename = basename; this.payload = payload; this.field = field; this.indexingType = indexingType; if ( indexingType != IndexingType.STANDARD && indexingType != IndexingType.REMAPPED ) throw new UnsupportedOperationException( "Non-standard payload-based indices support only standard and remapped indexing" ); if ( indexingType == IndexingType.REMAPPED ) position = new long[ documentsPerBatch ]; this.batchDir = batchDir; this.cutPoints = new IntArrayList(); this.cutPoints.add( 0 ); flags = new EnumMap<Component, Coding>( CompressionFlags.DEFAULT_PAYLOAD_INDEX ); accumulatorStream = new FastByteArrayOutputStream(); accumulator = new OutputBitStream( accumulatorStream ); }
cachePointer[ i ] = new OutputBitStream( cachePointerByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkip[ i ] = new OutputBitStream( cacheSkipByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkipBitCount[ i ] = new OutputBitStream( NullOutputStream.getInstance(), 0 );
cachePointer[ i ] = new OutputBitStream( cachePointerByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkip[ i ] = new OutputBitStream( cacheSkipByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkipBitCount[ i ] = new OutputBitStream( NullOutputStream.getInstance(), 0 );
cachePointer[ i ] = new OutputBitStream( cachePointerByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkip[ i ] = new OutputBitStream( cacheSkipByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkipBitCount[ i ] = new OutputBitStream( NullOutputStream.getInstance(), 0 );
cachePointer[ i ] = new OutputBitStream( cachePointerByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkip[ i ] = new OutputBitStream( cacheSkipByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkipBitCount[ i ] = new OutputBitStream( NullOutputStream.getInstance(), 0 );
cachePointer[ i ] = new OutputBitStream( cachePointerByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkip[ i ] = new OutputBitStream( cacheSkipByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkipBitCount[ i ] = new OutputBitStream( NullOutputStream.getInstance(), 0 );
cachePointer[ i ] = new OutputBitStream( cachePointerByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkip[ i ] = new OutputBitStream( cacheSkipByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkipBitCount[ i ] = new OutputBitStream( NullOutputStream.getInstance(), 0 );
cachePointer[ i ] = new OutputBitStream( cachePointerByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkip[ i ] = new OutputBitStream( cacheSkipByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkipBitCount[ i ] = new OutputBitStream( NullOutputStream.getInstance(), 0 );
cachePointer[ i ] = new OutputBitStream( cachePointerByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkip[ i ] = new OutputBitStream( cacheSkipByte[ i ] = new FastByteArrayOutputStream(), 0 ); cacheSkipBitCount[ i ] = new OutputBitStream( NullOutputStream.getInstance(), 0 );
final FastByteArrayOutputStream baos = new FastByteArrayOutputStream(); final OutputBitStream obs = new OutputBitStream(baos);
final FastByteArrayOutputStream baos = new FastByteArrayOutputStream(); final OutputBitStream obs = new OutputBitStream(baos);
final FastByteArrayOutputStream leftStream = new FastByteArrayOutputStream(); final OutputBitStream left = new OutputBitStream(leftStream, 0); final long leavesLeft = toStream(n.left, left, pl); left.flush(); final FastByteArrayOutputStream rightStream = new FastByteArrayOutputStream(); final OutputBitStream right = new OutputBitStream(rightStream, 0); final long leavesRight = toStream(n.right, right, pl);
final FastByteArrayOutputStream leftStream = new FastByteArrayOutputStream(); final OutputBitStream left = new OutputBitStream(leftStream, 0); final long leavesLeft = toStream(n.left, left, pl); left.flush(); final FastByteArrayOutputStream rightStream = new FastByteArrayOutputStream(); final OutputBitStream right = new OutputBitStream(rightStream, 0); final long leavesRight = toStream(n.right, right, pl);
final FastByteArrayOutputStream ebaos = new FastByteArrayOutputStream(); final FastByteArrayOutputStream abaos = new FastByteArrayOutputStream(); final OutputBitStream eobs = new OutputBitStream(ebaos); final OutputBitStream aobs = new OutputBitStream(abaos);
pl.itemsName = "keys"; final PartialTrie<T> immutableBinaryTrie = new PartialTrie<>(elements, log2BucketSize, transformationStrategy, pl); final FastByteArrayOutputStream fbStream = new FastByteArrayOutputStream(); final OutputBitStream trie = new OutputBitStream(fbStream, 0); pl.start("Converting to bitstream...");
final FastByteArrayOutputStream ebaos = new FastByteArrayOutputStream(); final FastByteArrayOutputStream abaos = new FastByteArrayOutputStream(); final OutputBitStream eobs = new OutputBitStream(ebaos); final OutputBitStream aobs = new OutputBitStream(abaos);
pl.itemsName = "keys"; final PartialTrie<T> immutableBinaryTrie = new PartialTrie<>(elements, size, bucketSize, transformationStrategy, pl); final FastByteArrayOutputStream fbStream = new FastByteArrayOutputStream(); final OutputBitStream trie = new OutputBitStream(fbStream, 0); pl.expectedUpdates = immutableBinaryTrie.size;