FieldsReader(FieldsReader other) throws IOException { Map<FieldsProducer,FieldsProducer> oldToNew = new IdentityHashMap<>(); // First clone all formats for(Map.Entry<String,FieldsProducer> ent : other.formats.entrySet()) { FieldsProducer values = ent.getValue().getMergeInstance(); formats.put(ent.getKey(), values); oldToNew.put(ent.getValue(), values); } // Then rebuild fields: for(Map.Entry<String,FieldsProducer> ent : other.fields.entrySet()) { FieldsProducer producer = oldToNew.get(ent.getValue()); assert producer != null; fields.put(ent.getKey(), producer); } segment = other.segment; }
/** * Test the term index. * @lucene.experimental */ public static Status.TermIndexStatus testPostings(CodecReader reader, PrintStream infoStream, boolean verbose, boolean failFast, Version version) throws IOException { // TODO: we should go and verify term vectors match, if // crossCheckTermVectors is on... Status.TermIndexStatus status; final int maxDoc = reader.maxDoc(); try { if (infoStream != null) { infoStream.print(" test: terms, freq, prox..."); } final Fields fields = reader.getPostingsReader().getMergeInstance(); final FieldInfos fieldInfos = reader.getFieldInfos(); status = checkFields(fields, reader.getLiveDocs(), maxDoc, fieldInfos, true, false, infoStream, verbose, version); } catch (Throwable e) { if (failFast) { throw IOUtils.rethrowAlways(e); } msg(infoStream, "ERROR: " + e); status = new Status.TermIndexStatus(); status.error = e; if (infoStream != null) { e.printStackTrace(infoStream); } } return status; }
fields = fields.getMergeInstance();
postingsFields = reader.getPostingsReader().getMergeInstance(); } else { postingsFields = null;
fieldsProducers[i] = reader.getPostingsReader().getMergeInstance(); pointsReaders[i] = reader.getPointsReader(); if (pointsReaders[i] != null) {
@Override public FieldsProducer getMergeInstance() throws IOException { return delegateProducer.getMergeInstance(); } }
@Override public FieldsProducer getMergeInstance() throws IOException { return delegateProducer.getMergeInstance(); } }
@Override public FieldsProducer getMergeInstance() throws IOException { return delegateFieldsProducer.getMergeInstance(); } }
@Override public FieldsProducer getMergeInstance() throws IOException { return delegateProducer.getMergeInstance(); } }
FieldsReader(FieldsReader other) throws IOException { Map<FieldsProducer,FieldsProducer> oldToNew = new IdentityHashMap<>(); // First clone all formats for(Map.Entry<String,FieldsProducer> ent : other.formats.entrySet()) { FieldsProducer values = ent.getValue().getMergeInstance(); formats.put(ent.getKey(), values); oldToNew.put(ent.getValue(), values); } // Then rebuild fields: for(Map.Entry<String,FieldsProducer> ent : other.fields.entrySet()) { FieldsProducer producer = oldToNew.get(ent.getValue()); assert producer != null; fields.put(ent.getKey(), producer); } segment = other.segment; }
FieldsReader(FieldsReader other) throws IOException { Map<FieldsProducer,FieldsProducer> oldToNew = new IdentityHashMap<>(); // First clone all formats for(Map.Entry<String,FieldsProducer> ent : other.formats.entrySet()) { FieldsProducer values = ent.getValue().getMergeInstance(); formats.put(ent.getKey(), values); oldToNew.put(ent.getValue(), values); } // Then rebuild fields: for(Map.Entry<String,FieldsProducer> ent : other.fields.entrySet()) { FieldsProducer producer = oldToNew.get(ent.getValue()); assert producer != null; fields.put(ent.getKey(), producer); } segment = other.segment; }
FieldsReader(FieldsReader other) throws IOException { Map<FieldsProducer,FieldsProducer> oldToNew = new IdentityHashMap<>(); // First clone all formats for(Map.Entry<String,FieldsProducer> ent : other.formats.entrySet()) { FieldsProducer values = ent.getValue().getMergeInstance(); formats.put(ent.getKey(), values); oldToNew.put(ent.getValue(), values); } // Then rebuild fields: for(Map.Entry<String,FieldsProducer> ent : other.fields.entrySet()) { FieldsProducer producer = oldToNew.get(ent.getValue()); assert producer != null; fields.put(ent.getKey(), producer); } segment = other.segment; }
/** * Test the term index. * @lucene.experimental */ public static Status.TermIndexStatus testPostings(CodecReader reader, PrintStream infoStream, boolean verbose, boolean failFast) throws IOException { // TODO: we should go and verify term vectors match, if // crossCheckTermVectors is on... Status.TermIndexStatus status; final int maxDoc = reader.maxDoc(); try { if (infoStream != null) { infoStream.print(" test: terms, freq, prox..."); } final Fields fields = reader.getPostingsReader().getMergeInstance(); final FieldInfos fieldInfos = reader.getFieldInfos(); status = checkFields(fields, reader.getLiveDocs(), maxDoc, fieldInfos, true, false, infoStream, verbose); } catch (Throwable e) { if (failFast) { IOUtils.reThrow(e); } msg(infoStream, "ERROR: " + e); status = new Status.TermIndexStatus(); status.error = e; if (infoStream != null) { e.printStackTrace(infoStream); } } return status; }
/** * Test the term index. * @lucene.experimental */ public static Status.TermIndexStatus testPostings(CodecReader reader, PrintStream infoStream, boolean verbose, boolean failFast, Version version) throws IOException { // TODO: we should go and verify term vectors match, if // crossCheckTermVectors is on... Status.TermIndexStatus status; final int maxDoc = reader.maxDoc(); try { if (infoStream != null) { infoStream.print(" test: terms, freq, prox..."); } final Fields fields = reader.getPostingsReader().getMergeInstance(); final FieldInfos fieldInfos = reader.getFieldInfos(); status = checkFields(fields, reader.getLiveDocs(), maxDoc, fieldInfos, true, false, infoStream, verbose, version); } catch (Throwable e) { if (failFast) { throw IOUtils.rethrowAlways(e); } msg(infoStream, "ERROR: " + e); status = new Status.TermIndexStatus(); status.error = e; if (infoStream != null) { e.printStackTrace(infoStream); } } return status; }
/** * Test the term index. * @lucene.experimental */ public static Status.TermIndexStatus testPostings(CodecReader reader, PrintStream infoStream, boolean verbose, boolean failFast) throws IOException { // TODO: we should go and verify term vectors match, if // crossCheckTermVectors is on... Status.TermIndexStatus status; final int maxDoc = reader.maxDoc(); try { if (infoStream != null) { infoStream.print(" test: terms, freq, prox..."); } final Fields fields = reader.getPostingsReader().getMergeInstance(); final FieldInfos fieldInfos = reader.getFieldInfos(); status = checkFields(fields, reader.getLiveDocs(), maxDoc, fieldInfos, true, false, infoStream, verbose); } catch (Throwable e) { if (failFast) { IOUtils.reThrow(e); } msg(infoStream, "ERROR: " + e); status = new Status.TermIndexStatus(); status.error = e; if (infoStream != null) { e.printStackTrace(infoStream); } } return status; }
fieldsProducers[i] = reader.getPostingsReader().getMergeInstance();
fieldsProducers[i] = reader.getPostingsReader().getMergeInstance();
fields = fields.getMergeInstance();
fields = fields.getMergeInstance();
fields = fields.getMergeInstance();