void setInvertState() { invertState = new FieldInvertState(indexCreatedVersionMajor, fieldInfo.name); termsHashPerField = termsHash.addField(invertState, fieldInfo); if (fieldInfo.omitsNorms() == false) { assert norms == null; // Even if no documents actually succeed in setting a norm, we still write norms for this segment: norms = new NormValuesWriter(fieldInfo, docState.docWriter.bytesUsed); } }
public void finish() throws IOException { if (fieldInfo.omitsNorms() == false) { long normValue; if (invertState.length == 0) { // the field exists in this document, but it did not have // any indexed tokens, so we assign a default value of zero // to the norm normValue = 0; } else { normValue = similarity.computeNorm(invertState); } norms.addValue(docState.docID, normValue); } termsHashPerField.finish(); }
private void writeNorms(SegmentWriteState state, Sorter.DocMap sortMap) throws IOException { boolean success = false; NormsConsumer normsConsumer = null; try { if (state.fieldInfos.hasNorms()) { NormsFormat normsFormat = state.segmentInfo.getCodec().normsFormat(); assert normsFormat != null; normsConsumer = normsFormat.normsConsumer(state); for (FieldInfo fi : state.fieldInfos) { PerField perField = getPerField(fi.name); assert perField != null; // we must check the final value of omitNorms for the fieldinfo: it could have // changed for this field since the first time we added it. if (fi.omitsNorms() == false && fi.getIndexOptions() != IndexOptions.NONE) { assert perField.norms != null: "field=" + fi.name; perField.norms.finish(state.segmentInfo.maxDoc()); perField.norms.flush(state, sortMap, normsConsumer); } } } success = true; } finally { if (success) { IOUtils.close(normsConsumer); } else { IOUtils.closeWhileHandlingException(normsConsumer); } } }
public void addValue(int docID, long value) { if (docID <= lastDocID) { throw new IllegalArgumentException("Norm for \"" + fieldInfo.name + "\" appears more than once in this document (only one value is allowed per field)"); } pending.add(value); docsWithField.add(docID); updateBytesUsed(); lastDocID = docID; }
public void addValue(int docID, long value) { if (docID <= lastDocID) { throw new IllegalArgumentException("Norm for \"" + fieldInfo.name + "\" appears more than once in this document (only one value is allowed per field)"); } pending.add(value); docsWithField.add(docID); updateBytesUsed(); lastDocID = docID; }
private void writeNorms(SegmentWriteState state) throws IOException { boolean success = false; NormsConsumer normsConsumer = null; try { if (state.fieldInfos.hasNorms()) { NormsFormat normsFormat = state.segmentInfo.getCodec().normsFormat(); assert normsFormat != null; normsConsumer = normsFormat.normsConsumer(state); for (FieldInfo fi : state.fieldInfos) { PerField perField = getPerField(fi.name); assert perField != null; // we must check the final value of omitNorms for the fieldinfo: it could have // changed for this field since the first time we added it. if (fi.omitsNorms() == false && fi.getIndexOptions() != IndexOptions.NONE) { assert perField.norms != null: "field=" + fi.name; perField.norms.finish(state.segmentInfo.maxDoc()); perField.norms.flush(state, normsConsumer); } } } success = true; } finally { if (success) { IOUtils.close(normsConsumer); } else { IOUtils.closeWhileHandlingException(normsConsumer); } } }
public void addValue(int docID, long value) { // Fill in any holes: for (int i = (int)pending.size(); i < docID; ++i) { pending.add(MISSING); } pending.add(value); updateBytesUsed(); }
void setInvertState() { invertState = new FieldInvertState(indexCreatedVersionMajor, fieldInfo.name); termsHashPerField = termsHash.addField(invertState, fieldInfo); if (fieldInfo.omitsNorms() == false) { assert norms == null; // Even if no documents actually succeed in setting a norm, we still write norms for this segment: norms = new NormValuesWriter(fieldInfo, docState.docWriter.bytesUsed); } }
public void finish() throws IOException { if (fieldInfo.omitsNorms() == false && invertState.length != 0) { norms.addValue(docState.docID, similarity.computeNorm(invertState)); } termsHashPerField.finish(); }
private void writeNorms(SegmentWriteState state) throws IOException { boolean success = false; NormsConsumer normsConsumer = null; try { if (state.fieldInfos.hasNorms()) { NormsFormat normsFormat = state.segmentInfo.getCodec().normsFormat(); assert normsFormat != null; normsConsumer = normsFormat.normsConsumer(state); for (FieldInfo fi : state.fieldInfos) { PerField perField = getPerField(fi.name); assert perField != null; // we must check the final value of omitNorms for the fieldinfo: it could have // changed for this field since the first time we added it. if (fi.omitsNorms() == false && fi.getIndexOptions() != IndexOptions.NONE) { assert perField.norms != null: "field=" + fi.name; perField.norms.finish(state.segmentInfo.maxDoc()); perField.norms.flush(state, normsConsumer); } } } success = true; } finally { if (success) { IOUtils.close(normsConsumer); } else { IOUtils.closeWhileHandlingException(normsConsumer); } } }
public void addValue(int docID, long value) { // Fill in any holes: for (int i = (int)pending.size(); i < docID; ++i) { pending.add(MISSING); } pending.add(value); updateBytesUsed(); }
void setInvertState() { invertState = new FieldInvertState(fieldInfo.name); termsHashPerField = termsHash.addField(invertState, fieldInfo); if (fieldInfo.omitsNorms() == false) { assert norms == null; // Even if no documents actually succeed in setting a norm, we still write norms for this segment: norms = new NormValuesWriter(fieldInfo, docState.docWriter.bytesUsed); } }
public void finish() throws IOException { if (fieldInfo.omitsNorms() == false && invertState.length != 0) { norms.addValue(docState.docID, similarity.computeNorm(invertState)); } termsHashPerField.finish(); }
private void writeNorms(SegmentWriteState state, Sorter.DocMap sortMap) throws IOException { boolean success = false; NormsConsumer normsConsumer = null; try { if (state.fieldInfos.hasNorms()) { NormsFormat normsFormat = state.segmentInfo.getCodec().normsFormat(); assert normsFormat != null; normsConsumer = normsFormat.normsConsumer(state); for (FieldInfo fi : state.fieldInfos) { PerField perField = getPerField(fi.name); assert perField != null; // we must check the final value of omitNorms for the fieldinfo: it could have // changed for this field since the first time we added it. if (fi.omitsNorms() == false && fi.getIndexOptions() != IndexOptions.NONE) { assert perField.norms != null: "field=" + fi.name; perField.norms.finish(state.segmentInfo.maxDoc()); perField.norms.flush(state, sortMap, normsConsumer); } } } success = true; } finally { if (success) { IOUtils.close(normsConsumer); } else { IOUtils.closeWhileHandlingException(normsConsumer); } } }
void setInvertState() { invertState = new FieldInvertState(fieldInfo.name); termsHashPerField = termsHash.addField(invertState, fieldInfo); if (fieldInfo.omitsNorms() == false) { assert norms == null; // Even if no documents actually succeed in setting a norm, we still write norms for this segment: norms = new NormValuesWriter(fieldInfo, docState.docWriter.bytesUsed); } }
public void finish() throws IOException { if (fieldInfo.omitsNorms() == false) { long normValue; if (invertState.length == 0) { // the field exists in this document, but it did not have // any indexed tokens, so we assign a default value of zero // to the norm normValue = 0; } else { normValue = similarity.computeNorm(invertState); } norms.addValue(docState.docID, normValue); } termsHashPerField.finish(); }