public synchronized SolrCore readSchema(String indexName) throws IOException, ParserConfigurationException, SAXException { SolrCore core = cache.get(indexName); if (core == null) { // get from cassandra if (logger.isDebugEnabled()) logger.debug("loading index schema for: " + indexName); ByteBuffer buf = readCoreResource(indexName, CassandraUtils.schemaKey); //Schema resource not found for the core if (buf == null) { throw new IOException(String.format("invalid core '%s'", indexName)); } InputStream stream = new ByteArrayInputStream(ByteBufferUtil.getArray(buf)); SolrResourceLoader resourceLoader = new SolandraResourceLoader(indexName, null); SolrConfig solrConfig = new SolrConfig(resourceLoader, solrConfigFile, null); IndexSchema schema = new IndexSchema(solrConfig, indexName, new InputSource(stream)); core = new SolrCore(indexName, "/tmp", solrConfig, schema, null); if (logger.isDebugEnabled()) logger.debug("Loaded core from cassandra: " + indexName); cache.put(indexName, core); } return core; }
SchemaField keyField = rb.req.getSearcher().getSchema().getUniqueKeyField(); if (null != keyField) if (!returnFields.contains(keyField)) fieldFilter.add(ByteBufferUtil.bytes(keyField.getName()));
SchemaField uniqueField = core.getSchema().getUniqueKeyField(); writer.updateDocument(indexName, idTerm, cmd.getLuceneDocument(schema), schema.getAnalyzer(), shardedId, false); writer.addDocument(indexName, cmd.getLuceneDocument(schema), schema.getAnalyzer(), shardedId, false, rms);
@Override public ValueSource getValueSource(SchemaField field, QParser parser) { return new DateFieldSource(field.getName(), field.getType()); }
@Override public ValueSource getValueSource(SchemaField field) { return new RandomValueSource(field.getName()); }
@Override public ValueSource getValueSource(SchemaField field) { return new TrieDateFieldSource(field.getName(), FieldCache.NUMERIC_UTILS_LONG_PARSER); }
@Override public String toString() { return name + "{type="+type.getTypeName() + ((defaultValue==null)?"":(",default="+defaultValue)) + ",properties=" + propertiesToString(properties) + ( required ? ", required=true" : "" ) + "}"; }
protected Field.Index getFieldIndex(SchemaField field, String internalVal) { return field.indexed() ? (isTokenized() ? Field.Index.TOKENIZED : Field.Index.UN_TOKENIZED) : Field.Index.NO; }
@Override public Query getRangeQuery(QParser parser, SchemaField field, String min, String max, boolean minInclusive, boolean maxInclusive) { return getRangeQuery(parser, field, min==null ? null : super.parseMath(null,min), max==null ? null : super.parseMath(null,max), minInclusive, maxInclusive); }
Term term = idTerm.createTerm(idFieldType.toInternal(cmd.id));
/** * This will re-create the Analyzers. If you make any modifications to * the Field map ({@link IndexSchema#getFields()}, this function is required * to synch the internally cached field analyzers. * * @since solr 1.3 */ public void refreshAnalyzers() { analyzer = new SolrIndexAnalyzer(); queryAnalyzer = new SolrQueryAnalyzer(); }
@Override public String toString(int doc) { return description() + '=' + intVal(doc); } };
@Override protected void init(IndexSchema schema, Map<String, String> args) { super.init(schema, args); // Tokenizing makes no sense restrictProps(TOKENIZED); }
protected void init(IndexSchema schema, Map<String,String> args) { properties |= TOKENIZED; if (schema.getVersion()> 1.1f) properties &= ~OMIT_TF_POSITIONS; super.init(schema, args); }
public String toString(int doc) { return description() + '=' + strVal(doc); } };
public String toString(int doc) { return description() + '=' + floatVal(doc); } };
public String toString(int doc) { return description() + '=' + intVal(doc); } };
public String toString(int doc) { return description() + '=' + longVal(doc); } };
public String toString(int doc) { return description() + '=' + doubleVal(doc); } };