Iterator(int size, PagedGrowableWriter offsets, PagedGrowableWriter lengths, PagedMutable docs, BytesRef values, long delGen) { super(size, docs, delGen); this.offsets = offsets; this.lengths = lengths; value = values.clone(); }
BytesRef bRef = bIter.next(); if (aRef != null && bRef != null) { // do we have any data? aRef = aRef.clone(); // we clone since we modify the offsets and length in the iteration below bRef = bRef.clone(); if (aRef.length == a.length() && bRef.length == b.length()) { // is it only one array slice we are comparing? return f.applyAsInt(aRef, bRef); for (int i = 0; i < lengthToCompare;) { if (aRef.length == 0) { aRef = aIter.next().clone(); // must be non null otherwise we have a bug bRef = bIter.next().clone(); // must be non null otherwise we have a bug
Iterator(int size, PagedGrowableWriter offsets, PagedGrowableWriter lengths, PagedMutable docs, BytesRef values) { this.offsets = offsets; this.size = size; this.lengths = lengths; this.docs = docs; value = values.clone(); }
Iterator(int size, PagedGrowableWriter offsets, PagedGrowableWriter lengths, PagedMutable docs, BytesRef values) { this.offsets = offsets; this.size = size; this.lengths = lengths; this.docs = docs; value = values.clone(); }
Iterator(int size, PagedGrowableWriter offsets, PagedGrowableWriter lengths, PagedMutable docs, BytesRef values, long delGen) { super(size, docs, delGen); this.offsets = offsets; this.lengths = lengths; value = values.clone(); }
@Override public Token clone() { final Token t = (Token) super.clone(); if (payload != null) { t.payload = payload.clone(); } return t; }
@Override public Token clone() { final Token t = (Token) super.clone(); if (payload != null) { t.payload = payload.clone(); } return t; }
@Override public Token clone() { Token t = (Token)super.clone(); // Do a deep clone if (payload != null) { t.payload = payload.clone(); } return t; }
@Override public PayloadAttributeImpl clone() { PayloadAttributeImpl clone = (PayloadAttributeImpl) super.clone(); if (payload != null) { clone.payload = payload.clone(); } return clone; }
@Override public void copyTo(AttributeImpl target) { PayloadAttribute t = (PayloadAttribute) target; t.setPayload((payload == null) ? null : payload.clone()); }
/** Makes a clone, but replaces the term buffer & * start/end offset in the process. This is more * efficient than doing a full clone (and then calling * {@link #copyBuffer}) because it saves a wasted copy of the old * termBuffer. */ public Token clone(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset) { final Token t = new Token(newTermBuffer, newTermOffset, newTermLength, newStartOffset, newEndOffset); t.positionIncrement = positionIncrement; t.flags = flags; t.type = type; if (payload != null) t.payload = payload.clone(); return t; }
@Override public void copyTo(AttributeImpl target) { super.copyTo(target); ((FlagsAttribute) target).setFlags(flags); ((PayloadAttribute) target).setPayload((payload == null) ? null : payload.clone()); }
@Override public void copyTo(AttributeImpl target) { super.copyTo(target); ((FlagsAttribute) target).setFlags(flags); ((PayloadAttribute) target).setPayload((payload == null) ? null : payload.clone()); }
@Override public void copyTo(AttributeImpl target) { if (target instanceof Token) { final Token to = (Token) target; to.reinit(this); // reinit shares the payload, so clone it: if (payload !=null) { to.payload = payload.clone(); } } else { super.copyTo(target); ((OffsetAttribute) target).setOffset(startOffset, endOffset); ((PositionIncrementAttribute) target).setPositionIncrement(positionIncrement); ((PayloadAttribute) target).setPayload((payload == null) ? null : payload.clone()); ((FlagsAttribute) target).setFlags(flags); ((TypeAttribute) target).setType(type); } }
public static BytesRef getSinglePageOrNull(BytesReference ref) throws IOException { if (ref.length() > 0) { BytesRefIterator iterator = ref.iterator(); BytesRef next = iterator.next(); BytesRef retVal = next.clone(); if (iterator.next() == null) { return retVal; } } else { return new BytesRef(); } return null; }
/** Returns an IntsRef either cached or reading postingsEnum. Not null. * @param postingsEnum*/ private IntsRef postingsEnumToIntsRef(PostingsEnum postingsEnum, Bits liveDocs) throws IOException { // (The cache can have empty IntsRefs) //lookup prefixBuf in a cache if (docIdsCache != null) { docIds = docIdsCache.get(prefixBuf); if (docIds != null) { return docIds; } } //read postingsEnum docIds = new IntsRef(termsEnum.docFreq()); int docId; while ((docId = postingsEnum.nextDoc()) != PostingsEnum.NO_MORE_DOCS) { if (liveDocs != null && !liveDocs.get(postingsEnum.docID())) { continue; } docIds.ints[docIds.length++] = docId; } if (docIds.length == 0) docIds = EMPTY_INTSREF; //cache if (docIdsCache != null) { ensureBufIsACopy(); //clone is shallow; that's okay as the prefix isn't overwritten; it's just appended to docIdsCache.put(prefixBuf.clone(), docIds); } return docIds; }
/** Returns an IntsRef either cached or reading postingsEnum. Not null. * @param postingsEnum*/ private IntsRef postingsEnumToIntsRef(PostingsEnum postingsEnum, Bits liveDocs) throws IOException { // (The cache can have empty IntsRefs) //lookup prefixBuf in a cache if (docIdsCache != null) { docIds = docIdsCache.get(prefixBuf); if (docIds != null) { return docIds; } } //read postingsEnum docIds = new IntsRef(termsEnum.docFreq()); int docId; while ((docId = postingsEnum.nextDoc()) != PostingsEnum.NO_MORE_DOCS) { if (liveDocs != null && !liveDocs.get(postingsEnum.docID())) { continue; } docIds.ints[docIds.length++] = docId; } if (docIds.length == 0) docIds = EMPTY_INTSREF; //cache if (docIdsCache != null) { ensureBufIsACopy(); //clone is shallow; that's okay as the prefix isn't overwritten; it's just appended to docIdsCache.put(prefixBuf.clone(), docIds); } return docIds; }
BytesRef bRef = bIter.next(); if (aRef != null && bRef != null) { // do we have any data? aRef = aRef.clone(); // we clone since we modify the offsets and length in the iteration below bRef = bRef.clone(); if (aRef.length == a.length() && bRef.length == b.length()) { // is it only one array slice we are comparing? return f.applyAsInt(aRef, bRef); for (int i = 0; i < lengthToCompare;) { if (aRef.length == 0) { aRef = aIter.next().clone(); // must be non null otherwise we have a bug bRef = bIter.next().clone(); // must be non null otherwise we have a bug
@Override public final boolean incrementToken() throws IOException { if (!prefixExhausted) { Token nextToken = getNextPrefixInputToken(reusableToken); if (nextToken == null) { prefixExhausted = true; } else { previousPrefixToken.reinit(nextToken); // Make it a deep copy BytesRef p = previousPrefixToken.getPayload(); if (p != null) { previousPrefixToken.setPayload(p.clone()); } setCurrentToken(nextToken); return true; } } Token nextToken = getNextSuffixInputToken(reusableToken); if (nextToken == null) { return false; } nextToken = updateSuffixToken(nextToken, previousPrefixToken); setCurrentToken(nextToken); return true; }
@Override public final boolean incrementToken() throws IOException { if (!prefixExhausted) { Token nextToken = getNextPrefixInputToken(reusableToken); if (nextToken == null) { prefixExhausted = true; } else { previousPrefixToken.reinit(nextToken); // Make it a deep copy BytesRef p = previousPrefixToken.getPayload(); if (p != null) { previousPrefixToken.setPayload(p.clone()); } setCurrentToken(nextToken); return true; } } Token nextToken = getNextSuffixInputToken(reusableToken); if (nextToken == null) { return false; } nextToken = updateSuffixToken(nextToken, previousPrefixToken); setCurrentToken(nextToken); return true; }