@Override public Collection<Document> toDocuments(LuceneIndex index, final Object value) { Document doc = new Document(); SerializerUtil.addField(doc, LuceneService.REGION_VALUE_FIELD, value); return Collections.singleton(doc); } }
public ReflectionLuceneSerializer(Class<? extends Object> clazz, String[] indexedFields) { Set<String> fieldSet = new HashSet<String>(); fieldSet.addAll(Arrays.asList(indexedFields)); // Iterate through all declared fields and save them // in a list if they are an indexed field and have the correct // type. ArrayList<Field> foundFields = new ArrayList<Field>(); while (clazz != Object.class) { for (Field field : clazz.getDeclaredFields()) { Class<?> type = field.getType(); if (fieldSet.contains(field.getName()) && SerializerUtil.isSupported(type)) { field.setAccessible(true); foundFields.add(field); } } clazz = clazz.getSuperclass(); } this.fields = foundFields.toArray(new Field[foundFields.size()]); }
/** * Convert a Apache Geode key into a key search term that can be used to update or delete the * document associated with this key. */ public static Term toKeyTerm(Object key) { if (key instanceof String) { return new Term(KEY_FIELD, (String) key); } else { return new Term(KEY_FIELD, keyToBytes(key)); } }
@Override public void update(Object key, Object value) throws IOException { long start = stats.startUpdate(); Collection<Document> docs = Collections.emptyList(); boolean exceptionHappened = false; try { try { docs = serializer.toDocuments(index, value); } catch (Exception e) { exceptionHappened = true; stats.incFailedEntries(); logger.info("Failed to update index for " + value + " due to " + e.getMessage()); } if (!exceptionHappened) { docs.forEach(doc -> SerializerUtil.addKey(key, doc)); Term keyTerm = SerializerUtil.toKeyTerm(key); writer.updateDocuments(keyTerm, docs); } } finally { stats.endUpdate(start); } }
public HeterogeneousLuceneSerializer() { final PrimitiveSerializer primitiveSerializer = new PrimitiveSerializer(); SerializerUtil.supportedPrimitiveTypes().stream() .forEach(type -> mappers.put(type, primitiveSerializer)); pdxMapper = new PdxLuceneSerializer(); }
@Override public void delete(Object key) throws IOException { long start = stats.startUpdate(); try { Term keyTerm = SerializerUtil.toKeyTerm(key); writer.deleteDocuments(keyTerm); } finally { stats.endUpdate(start); } }
@Override public void create(Object key, Object value) throws IOException { long start = stats.startUpdate(); Collection<Document> docs = Collections.emptyList(); boolean exceptionHappened = false; try { try { docs = serializer.toDocuments(index, value); } catch (Exception e) { exceptionHappened = true; stats.incFailedEntries(); logger.info("Failed to add index for " + value + " due to " + e.getMessage()); } if (!exceptionHappened) { docs.forEach(doc -> SerializerUtil.addKey(key, doc)); writer.addDocuments(docs); } } finally { stats.endUpdate(start); } }
@Override public void query(Query query, int limit, IndexResultCollector collector) throws IOException { long start = stats.startRepositoryQuery(); int totalHits = 0; IndexSearcher searcher = searcherManager.acquire(); try { TopDocs docs = searcher.search(query, limit); totalHits = docs.totalHits; for (ScoreDoc scoreDoc : docs.scoreDocs) { Document doc = searcher.doc(scoreDoc.doc); Object key = SerializerUtil.getKey(doc); if (logger.isDebugEnabled()) { logger.debug("query found doc:" + doc + ":" + scoreDoc); } collector.collect(key, scoreDoc.score); } } finally { searcherManager.release(searcher); stats.endRepositoryQuery(start, totalHits); } }
/** * Extract the Apache Geode key from a lucene document */ public static Object getKey(Document doc) { IndexableField field = doc.getField(KEY_FIELD); if (field.stringValue() != null) { return field.stringValue(); } else { return keyFromBytes(field.binaryValue()); } }
private Object getFieldValue(Object value, String fieldName) { if (value instanceof PdxInstance) { PdxInstance pdx = (PdxInstance) value; Object fieldValue = null; if (pdx.hasField(fieldName)) { fieldValue = pdx.getField(fieldName); } return fieldValue; } else { Class<?> clazz = value.getClass(); if (fieldName.equals(LuceneService.REGION_VALUE_FIELD) && SerializerUtil.supportedPrimitiveTypes().contains(clazz)) { return value; } try { Field field = clazz.getDeclaredField(fieldName); field.setAccessible(true); return field.get(value); } catch (Exception e) { return null; } } } }
private void addFieldValueForNonCollectionObject(Document doc, String indexedFieldName, Object fieldValue, List<String> tokenizedFields) { if (tokenizedFields.size() == 1) { SerializerUtil.addField(doc, indexedFieldName, fieldValue); } else { addFieldValue(doc, indexedFieldName, fieldValue, tokenizedFields.subList(1, tokenizedFields.size())); } }
/** * Add a Apache Geode key to a document */ public static void addKey(Object key, Document doc) { if (key instanceof String) { doc.add(new StringField(KEY_FIELD, (String) key, Store.YES)); } else { doc.add(new StringField(KEY_FIELD, keyToBytes(key), Store.YES)); } }
@Override public Collection<Document> toDocuments(LuceneIndex index, Object value) { Document doc = new Document(); for (Field field : fields) { try { Object fieldValue = field.get(value); if (fieldValue == null) { continue; } SerializerUtil.addField(doc, field.getName(), fieldValue); } catch (IllegalArgumentException | IllegalAccessException e) { // TODO - what to do if we can't read a field? } } if (logger.isDebugEnabled()) { logger.debug("ReflectionLuceneSerializer.toDocument:" + doc); } return Collections.singleton(doc); } }
@Override public Collection<Document> toDocuments(LuceneIndex index, Object value) { Document doc = new Document(); PdxInstance pdx = (PdxInstance) value; for (String field : index.getFieldNames()) { if (pdx.hasField(field)) { Object fieldValue = pdx.getField(field); if (fieldValue == null) { continue; } SerializerUtil.addField(doc, field, fieldValue); } } if (logger.isDebugEnabled()) { logger.debug("PdxLuceneSerializer.toDocument:" + doc); } return Collections.singleton(doc); } }