Refine search
public void add(SolrInputField field) { if (field != null) { // Overwrite the 'id' field if (field.getName().equals(IdFieldMapper.INPUT_FIELD)) { doc.setField(field.getName(), field.getValue(), field.getBoost()); } // Append other fields else { doc.addField(field.getName(), field.getValue(), field.getBoost()); } } }
/**Building solr document for indexing from key-value pairs**/ public SolrInputDocument buildSolrDocument(HashMap<String, Object> hshMap) throws Exception { SolrInputDocument doc = new SolrInputDocument(); Iterator<String> keys = hshMap.keySet().iterator(); while (keys.hasNext()) { String key = keys.next(); Object value = hshMap.get(key); SolrInputField field = new SolrInputField(key); try { doc.addField(field.getName(), value, 1.0f); } catch (Exception e) { e.printStackTrace(); } } return doc; }
/** * Add a SolrInputDocument to this writer. * <p> * Adding multiple documents without ids will result in an IllegalStateException being thrown. */ @Override public void add(SolrInputDocument solrDocument) { String docId = documentId; SolrInputField uniqueKeySolrField = solrDocument.getField(uniqueKeyField); if (uniqueKeySolrField == null || uniqueKeySolrField.getValueCount() == 0) { if (idUsed) { throw new IllegalStateException("Document id '" + documentId + "' has already been used by this record"); } solrDocument.addField(uniqueKeyField, documentId); idUsed = true; } else if (LEGACY_MODE) { docId = uniqueKeySolrField.getValue().toString(); } else { docId = uniqueKeySolrField.getFirstValue().toString(); } if (tableNameField != null) { solrDocument.addField(tableNameField, tableName); } updateCollector.add(docId, solrDocument); }
/** * Convenience method for building up SolrInputFields */ final SolrInputField field(String name, Object... values) { SolrInputField f = new SolrInputField(name); for (Object v : values) { f.addValue(v); } return f; }
/** Set a field value; replacing the existing value if present. * * @param name name of the field to set * @param value value of the field */ public void setField(String name, Object value ) { SolrInputField field = new SolrInputField( name ); _fields.put( name, field ); field.setValue( value ); }
log.debug("Atomic Update - Manually update Document [{}].", sdoc.getField(ID).getValue()); inputDoc.addChildDocuments(nestedDocs); sdoc.getFieldNames().stream() .filter(fn -> !fn.equals(ID) && !fn.equals(TYPE) && !fn.equals("_version_") )//TODO: Add all the special fields or do the oposite check, whether it fits a dynamic Vind field .forEach( fn -> {
private void addField(SolrInputDocument doc, String name, String value) { // find if such field already exists if (doc.get(name) == null) { doc.addField(name, value); } else { // for some fields we can't allow multiple values, like ID field phrase, so we have to perform this check SolrInputField f = doc.get(name); boolean valueExists = false; for (Object existingValue : f.getValues()) { if (existingValue == null && value == null) { valueExists = true; break; } if (existingValue != null && value != null && existingValue.equals(value)) { valueExists = true; break; } } if (!valueExists) { f.addValue(value); } } }
private SolrInputField addToDocument(SolrInputDocument doc, String fieldName, String value, boolean raw) { if (!StringUtils.isBlank(fieldName) && !StringUtils.isBlank(value)) { SolrInputField f; if (!raw) { f = new SolrInputField(fieldName); f.setValue(value); } else { f = new SolrInputField(fieldName + "_raw"); f.setValue(value); } doc.addField(f.getName(), f.getValue()); if (raw) { addFieldNameToSchema(f.getName(), RAW_FIELD_TYPE_NAME, false, true); } else { addFieldNameToSchema(f.getName(), ANALYZED_FIELD_TYPE_NAME, false, true); } return f; } return null; }
&& !StringUtils .isBlank((String) facetValue)) { SolrInputField f = doc.getField(virtualPathField); f.setValue(f.toString() + "/" + facetValue); } else { doc.addField(virtualPathField, (String) facetValue); addFieldNameToSchema(virtualPathField, doc.addField(referenceFacet.getField(), (String) facetValue); addFieldNameToSchema(referenceFacet.getField(),
/** * Convenience method for building up SolrInputDocuments */ final SolrInputDocument doc(SolrInputField... fields) { SolrInputDocument d = new SolrInputDocument(); for (SolrInputField f : fields) { d.put(f.getName(), f); } return d; }
private void mapField(SolrInputDocument input, FIELD col) throws SQLException { SolrInputField ifield = input.get(col.name()); if (ifield != null) { Collection<Object> vals = ifield.getValues(); ArrayList<String> newvals = new ArrayList<>(); for (Object ovalx : vals) { int legacy = Integer.parseInt(oval); if (col == FIELD.id) { Object otype = input.getFieldValue("type"); if (otype != null) { int type = Integer.parseInt(otype.toString()); Object otype = input.getFieldValue("scopeType"); if (otype != null) { int type = Integer.parseInt(otype.toString());
SolrInputDocument doc = cmd.getSolrInputDocument(); if (sigFields == null || sigFields.size() == 0) { Collection<String> docFields = doc.getFieldNames(); sigFields = new ArrayList<String>(docFields.size()); sigFields.addAll(docFields); SolrInputField f = doc.getField(field); if (f != null) { sig.add(field); Object o = f.getValue(); if (o instanceof String) { sig.add((String)o); doc.addField(signatureField, sigString);
/** * Add a SolrInputDocument to this writer. * <p> * Adding multiple documents without ids will result in an IllegalStateException being thrown. */ @Override public void add(SolrInputDocument solrDocument) { String docId = documentId; SolrInputField uniqueKeySolrField = solrDocument.getField(uniqueKeyField); if (uniqueKeySolrField == null) { if (idUsed) { throw new IllegalStateException("Document id '" + documentId + "' has already been used by this record"); } solrDocument.addField(uniqueKeyField, documentId); idUsed = true; } else { docId = uniqueKeySolrField.getValue().toString(); } if (tableNameField != null) { solrDocument.addField(tableNameField, tableName); } updateCollector.add(docId, solrDocument); }
/** * Merge a {@code SolrInputDocument} into the master document, adding a prefix to every field name as it is added. * * @param inputDocument document to be added * @param prefix prefix to be added to field names */ public void add(SolrInputDocument inputDocument, String prefix) { for (Entry<String, SolrInputField> entry : inputDocument.entrySet()) { SolrInputField inputField = entry.getValue(); document.addField(prefix + entry.getKey(), inputField.getValues(), inputField.getBoost()); } }
public static void indexDescriptiveMetadataFields(ModelService model, String aipId, String representationId, List<DescriptiveMetadata> metadataList, SolrInputDocument doc) throws RequestNotValidException, GenericException, NotFoundException, AuthorizationDeniedException { // guarding against repeated fields Set<String> usedNonRepeatableFields = new HashSet<>(); for (DescriptiveMetadata metadata : metadataList) { StoragePath storagePath = ModelUtils.getDescriptiveMetadataStoragePath(aipId, representationId, metadata.getId()); Binary binary = model.getStorage().getBinary(storagePath); try { SolrInputDocument fields = getDescriptiveMetadataFields(binary, metadata.getType(), metadata.getVersion()); for (SolrInputField field : fields) { if (NON_REPEATABLE_FIELDS.contains(field.getName())) { boolean added = usedNonRepeatableFields.add(field.getName()); if (added) { doc.addField(field.getName(), field.getValue()); } } else { doc.addField(field.getName(), field.getValue()); } } } catch (GenericException e) { LOGGER.warn("Problem processing descriptive metadata: {}", e.getMessage(), e); } catch (Exception e) { LOGGER.error("Error processing descriptive metadata: {}", metadata, e); } } }
/** * Merge a {@code SolrInputDocument} into the master document, adding a prefix to every field name as it is added. * * @param inputDocument document to be added * @param prefix prefix to be added to field names */ public void add(SolrInputDocument inputDocument, String prefix) { for (Entry<String, SolrInputField> entry : inputDocument.entrySet()) { SolrInputField inputField = entry.getValue(); document.addField(prefix + entry.getKey(), inputField.getValues()); } }
SolrInputDocument crtTerm = new SolrInputDocument(); this.data.put(String.valueOf(crtTerm.get(ID_FIELD).getFirstValue()), crtTerm);
doc.addField(RodaConstants.FILE_PATH, path); if (path != null && !path.isEmpty()) { List<String> ancestorsPath = SolrUtils.getFileAncestorsPath(file.getAipId(), file.getRepresentationId(), path); if (!ancestorsPath.isEmpty()) { doc.addField(RodaConstants.FILE_PARENT_UUID, ancestorsPath.get(ancestorsPath.size() - 1)); doc.addField(RodaConstants.FILE_ANCESTORS_PATH, ancestorsPath); SolrInputDocument premisSolrDoc = PremisV3Utils.getSolrDocument(premisFile); doc.putAll(premisSolrDoc); sizeInBytes = SolrUtils.objectToLong(premisSolrDoc.get(RodaConstants.FILE_SIZE).getValue(), 0L); } catch (GenericException e) { LOGGER.warn("Could not index file PREMIS information", e);
SolrInputField inputField = doc.getField(multiTextFieldName); SolrInputField outputField = new SolrInputField(inputField.getName()); if (inputField.getValues() != null) { for (final Object inputValue : inputField.getValues()) { Object outputValue = inputValue; outputField.addValue(outputValue, 1.0F); outputField.setBoost(inputField.getBoost()); doc.removeField(multiTextFieldName); doc.put(multiTextFieldName, outputField); return doc;