/** * Make a DimensionRow by setting all of the field values to the given value. * * @param value Value for dimension fields * @return a DimensionRow */ private DimensionRow makeDimensionRow(String value) { LinkedHashMap<DimensionField, String> map = new LinkedHashMap<>(); for (DimensionField dimensionField: dimension.getDimensionFields()) { map.put(dimensionField, value); } return new DimensionRow(dimension.getKey(), map); }
/** * Get DimensionRows as a map of Field Name and Value. * * @return map of fieldname,value */ public LinkedHashMap<String, String> getRowMap() { return entrySet().stream() .collect(StreamUtils.toLinkedMap(entry -> entry.getKey().getName(), Map.Entry::getValue)); } }
@Override public TreeSet<DimensionRow> notinFilterOperation(TreeSet<DimensionRow> dimensionRows, ApiFilter filter) { TreeSet<DimensionRow> filteredDimensionRows = new TreeSet<>(dimensionRows); for (DimensionRow dimensionRow : dimensionRows) { String value = dimensionRow.get(filter.getDimensionField()); if (filter.getValues().contains(value)) { filteredDimensionRows.remove(dimensionRow); } } return filteredDimensionRows; }
if (dimensionRow.isEmpty()) { LOG.warn("Ignoring attempt to add a dimension row with no data {}", dimensionRow); continue; } else if (dimensionRow.get(getKey()) == null) { LOG.warn("Attempting to add a dimension row with a null key {}", dimensionRow); throw new IllegalArgumentException("Cannot add dimension with null key."); String rowIdKey = DimensionStoreKeyUtils.getRowKey(getKey().getName(), dimensionRow.get(getKey())); ); dimensionRowOld = parseDimensionRow(fieldNameValueMap); if (dimensionRow.equals(dimensionRowOld)) { continue;
/** * Update the dimension row in the index. * * @param luceneDimensionRowDoc Document to use for doing the update * @param fieldMap Mapping of DimensionFields to the Document's fields * @param writer Lucene IndexWriter to update the indexes of * @param newRow Row to update * * @throws IOException if there is a problem updating the document */ private void updateDimensionRow( Document luceneDimensionRowDoc, Map<DimensionField, Field> fieldMap, IndexWriter writer, DimensionRow newRow ) throws IOException { // Update the document fields with each field from the new dimension row for (DimensionField field : dimension.getDimensionFields()) { // Get the field to update from the lookup map Field fieldToUpdate = fieldMap.get(field); // Set field value to updated value fieldToUpdate.setStringValue(newRow.getOrDefault(field, "")); } // Build the term to delete the old document by the key value (which should be unique) Term keyTerm = new Term(fieldMap.get(dimension.getKey()).name(), newRow.getOrDefault(dimension.getKey(), "")); // Update the document by the key term writer.updateDocument(keyTerm, luceneDimensionRowDoc); }
@Override public DimensionRow createEmptyDimensionRow(String keyFieldValue) { if (findDimensionRowByKeyValue(keyFieldValue) != null) { String error = String.format( "Dimension row with keyFieldValue '%s' already exists for dimension '%s'", keyFieldValue, this ); throw new IllegalArgumentException(error); } else { Map<DimensionField, String> dimensionFieldValueMap = new HashMap<>(); for (DimensionField dimensionField : getDimensionFields()) { dimensionFieldValueMap.put(dimensionField, ""); } dimensionFieldValueMap.put(getKey(), keyFieldValue); return new DimensionRow(key, dimensionFieldValueMap); } }
@Override public TreeSet<DimensionRow> inFilterOperation(TreeSet<DimensionRow> dimensionRows, ApiFilter filter) { return dimensionRows.stream() .filter(row -> filter.getValues().contains(row.get(filter.getDimensionField()))) .collect(Collectors.toCollection(TreeSet<DimensionRow>::new)); }
@Override public void clearDimension() { //Remove all dimension data from the store. findAllDimensionRows().stream() .flatMap(dimensionRow -> dimensionRow.entrySet().stream()) .map(entry -> DimensionStoreKeyUtils.getRowKey(entry.getKey().getName(), entry.getValue())) .forEach(keyValueStore::remove); //Since the indices are being dropped, the dimension field stored via the columnKey is becoming stale. keyValueStore.remove(DimensionStoreKeyUtils.getColumnKey(dimension.getKey().getName())); // The allValues key mapping needs to reflect the fact that we are dropping all dimension data. keyValueStore.put(DimensionStoreKeyUtils.getAllValuesKey(), "[]"); //We're resetting the keyValueStore, so we don't want any stale last updated date floating around. keyValueStore.remove(DimensionStoreKeyUtils.getLastUpdatedKey()); refreshCardinality(); }
@Override public DimensionRow parseDimensionRow(Map<String, String> fieldNameValueMap) { // TODO: This rewrite need to be removed once description is normalized in legacy implementations String desc = fieldNameValueMap.remove("description"); if (desc != null) { fieldNameValueMap.put("desc", desc); } LinkedHashMap<DimensionField, String> dimensionRowFieldValues = new LinkedHashMap<>(fieldNameValueMap.size()); // Load every field we expect and only fields we expect for (DimensionField field : dimensionFields) { String fieldName = field.getName(); String value = fieldNameValueMap.get(fieldName); if (value == null) { // A missing key value is unacceptable if (field == getKey()) { String error = String.format(MISSING_ROW_KEY_FORMAT, fieldNameValueMap.toString(), fieldName); LOG.info(error); throw new IllegalArgumentException(error); } // A missing value for another field is turned into the empty string value = ""; } dimensionRowFieldValues.put(field, value); } return new DimensionRow(getKey(), dimensionRowFieldValues); }
@Override public TreeSet<DimensionRow> startswithFilterOperation( TreeSet<DimensionRow> dimensionRows, ApiFilter filter ) { TreeSet<DimensionRow> filteredDimensionRows = new TreeSet<>(); // regex string containing all starts with filter values StringBuilder startsWithRegex = new StringBuilder("("); for (String filterValue : filter.getValues()) { startsWithRegex.append(filterValue).append("|"); } startsWithRegex.replace(startsWithRegex.length() - 1, startsWithRegex.length(), ").*"); String startsWithRegexString = startsWithRegex.toString(); for (DimensionRow dimensionRow : dimensionRows) { String value = dimensionRow.get(filter.getDimensionField()); if (value.matches(startsWithRegexString)) { filteredDimensionRows.add(dimensionRow); } } return filteredDimensionRows; }
/** * Make test DimensionRow using ID and DESC fields from provided Dimension. * * @param dimension provided dimension * @param values Values for dimension fields * * @return test DimensionRow */ public static DimensionRow makeDimensionRow(Dimension dimension, String...values) { LinkedHashMap<DimensionField, String> map = new LinkedHashMap<>(values.length); Iterator<DimensionField> fields = dimension.getDimensionFields().iterator(); for (String value : values) { DimensionField field = fields.next(); map.put(field, value); } return new DimensionRow(dimension.getKey(), map); } }
/** * Generates the dimension names and its unique id map from Result for custom serialization. * * @param result Result object for the custom serialization * * @return custom map of dimension names and their respective unique id */ private Map<String, String> getDimensionValues(Result result) { return result.getDimensionRows().entrySet().stream().collect(Collectors.toMap( columnRow -> columnRow.getKey().getName(), columnRow -> columnRow.getValue().get(columnRow.getKey().getDimension().getKey()) )); }
/** * Make test DimensionRow using ID and DESC fields from provided Dimension. * * @param dimension provided dimension * @param values Values for dimension fields * * @return test DimensionRow */ public static DimensionRow makeDimensionRow(Dimension dimension, String...values) { LinkedHashMap<DimensionField, String> map = new LinkedHashMap<>(values.length); Iterator<DimensionField> fields = dimension.getDimensionFields().iterator(); for (String value : values) { DimensionField field = fields.next(); map.put(field, value); } return new DimensionRow(dimension.getKey(), map); } }
/** * Contains filter operation. * * @param dimensionRows The unfiltered set of dimension rows * @param filter The api filter * * @return Tree set of DimensionRows */ @Override public TreeSet<DimensionRow> containsFilterOperation(TreeSet<DimensionRow> dimensionRows, ApiFilter filter) { TreeSet<DimensionRow> filteredDimensionRows = new TreeSet<>(); // regex string containing all contains filter values StringBuilder containsRegex = new StringBuilder(".*("); for (String filterValue : filter.getValues()) { containsRegex.append(filterValue).append("|"); } containsRegex.replace(containsRegex.length() - 1, containsRegex.length(), ").*"); for (DimensionRow dimensionRow : dimensionRows) { String value = dimensionRow.get(filter.getDimensionField()); if (value.matches(containsRegex.toString())) { filteredDimensionRows.add(dimensionRow); } } return filteredDimensionRows; }
row.put(getDimensionColumnName(dimension, dimensionField), drow.get(dimensionField));
row.put(dimension.getApiName(), dimensionRow.get(dimension.getKey()));