/** * Constructor. * * @param dimensionMappingValues The map of dimension to sets of dimension values which map this table. */ public DimensionIdFilter(Map<Dimension, Set<String>> dimensionMappingValues) { dimensionKeySelectFilters = dimensionMappingValues.entrySet().stream() .map(entry -> new AbstractMap.SimpleEntry<>( entry.getKey(), new ApiFilter( entry.getKey(), entry.getKey().getKey(), DefaultFilterOperation.in, entry.getValue() ) )) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); }
/** * Make a DimensionRow by setting all of the field values to the given value. * * @param value Value for dimension fields * @return a DimensionRow */ private DimensionRow makeDimensionRow(String value) { LinkedHashMap<DimensionField, String> map = new LinkedHashMap<>(); for (DimensionField dimensionField: dimension.getDimensionFields()) { map.put(dimensionField, value); } return new DimensionRow(dimension.getKey(), map); }
/** * Generates the dimension names and its unique id map from Result for custom serialization. * * @param result Result object for the custom serialization * * @return custom map of dimension names and their respective unique id */ private Map<String, String> getDimensionValues(Result result) { return result.getDimensionRows().entrySet().stream().collect(Collectors.toMap( columnRow -> columnRow.getKey().getName(), columnRow -> columnRow.getValue().get(columnRow.getKey().getDimension().getKey()) )); }
/** * Validity rules for non-aggregatable dimensions that are only referenced in filters. * A query that references a non-aggregatable dimension in a filter without grouping by this dimension, is valid * only if the requested dimension field is a key for this dimension and only a single value is requested * with an inclusive operator ('in' or 'eq'). * * @return A predicate that determines a given dimension is non aggregatable and also not constrained to one row * per result */ protected static Predicate<ApiFilter> isNonAggregatableInFilter() { return apiFilter -> !apiFilter.getDimensionField().equals(apiFilter.getDimension().getKey()) || apiFilter.getValues().size() != 1 || !( apiFilter.getOperation().equals(DefaultFilterOperation.in) || apiFilter.getOperation().equals(DefaultFilterOperation.eq) ); }
/** * Update the dimension row in the index. * * @param luceneDimensionRowDoc Document to use for doing the update * @param fieldMap Mapping of DimensionFields to the Document's fields * @param writer Lucene IndexWriter to update the indexes of * @param newRow Row to update * * @throws IOException if there is a problem updating the document */ private void updateDimensionRow( Document luceneDimensionRowDoc, Map<DimensionField, Field> fieldMap, IndexWriter writer, DimensionRow newRow ) throws IOException { // Update the document fields with each field from the new dimension row for (DimensionField field : dimension.getDimensionFields()) { // Get the field to update from the lookup map Field fieldToUpdate = fieldMap.get(field); // Set field value to updated value fieldToUpdate.setStringValue(newRow.getOrDefault(field, "")); } // Build the term to delete the old document by the key value (which should be unique) Term keyTerm = new Term(fieldMap.get(dimension.getKey()).name(), newRow.getOrDefault(dimension.getKey(), "")); // Update the document by the key term writer.updateDocument(keyTerm, luceneDimensionRowDoc); }
DimensionStoreKeyUtils.getColumnKey(dimensionField.getName()), "", dimensionField.equals(dimension.getKey()) ? Field.Store.YES : Field.Store.NO );
@Override public void clearDimension() { //Remove all dimension data from the store. findAllDimensionRows().stream() .flatMap(dimensionRow -> dimensionRow.entrySet().stream()) .map(entry -> DimensionStoreKeyUtils.getRowKey(entry.getKey().getName(), entry.getValue())) .forEach(keyValueStore::remove); //Since the indices are being dropped, the dimension field stored via the columnKey is becoming stale. keyValueStore.remove(DimensionStoreKeyUtils.getColumnKey(dimension.getKey().getName())); // The allValues key mapping needs to reflect the fact that we are dropping all dimension data. keyValueStore.put(DimensionStoreKeyUtils.getAllValuesKey(), "[]"); //We're resetting the keyValueStore, so we don't want any stale last updated date floating around. keyValueStore.remove(DimensionStoreKeyUtils.getLastUpdatedKey()); refreshCardinality(); }
); DimensionField key = dimension.getKey(); Set<DimensionRow> dimensionRows = new LinkedHashSet<>(); for (Map<String, String> fieldnameValueMap: rawDimensionRows.get("dimensionRows")) {
/** * Make test DimensionRow using ID and DESC fields from provided Dimension. * * @param dimension provided dimension * @param values Values for dimension fields * * @return test DimensionRow */ public static DimensionRow makeDimensionRow(Dimension dimension, String...values) { LinkedHashMap<DimensionField, String> map = new LinkedHashMap<>(values.length); Iterator<DimensionField> fields = dimension.getDimensionFields().iterator(); for (String value : values) { DimensionField field = fields.next(); map.put(field, value); } return new DimensionRow(dimension.getKey(), map); } }
/** * Make test DimensionRow using ID and DESC fields from provided Dimension. * * @param dimension provided dimension * @param values Values for dimension fields * * @return test DimensionRow */ public static DimensionRow makeDimensionRow(Dimension dimension, String...values) { LinkedHashMap<DimensionField, String> map = new LinkedHashMap<>(values.length); Iterator<DimensionField> fields = dimension.getDimensionFields().iterator(); for (String value : values) { DimensionField field = fields.next(); map.put(field, value); } return new DimensionRow(dimension.getKey(), map); } }
if (field == dimension.getKey()) { continue;
String idKey = DimensionStoreKeyUtils.getColumnKey(dimension.getKey().getName()); filteredDimRows = Arrays.stream(hits) .map(