public SchemaField getSchemaField(String caseInsensitiveName) { SchemaField schemaField = null; if(schema!=null) { schemaField = schema.getFieldOrNull(caseInsensitiveName); } if (schemaField == null) { schemaField = lowerNameVsSchemaField.get(caseInsensitiveName.toLowerCase(Locale.ROOT)); } return schemaField; }
/** * <p>Get the list of {@link PhraseBoosting.PhraseBoostFieldParams} to boost entire phrases or query-term n-grams.</p> * @see #getPhraseBoostTiebreaker() * * @return The list of PhraseBoostFieldParams for boosting or an empty list if no (sub)phrase should be boosted. */ public List<PhraseBoostFieldParams> getPhraseBoostFieldParams() { final IndexSchema schema = request.getSchema(); return allPhraseFields.stream() .filter(field -> isFieldPhraseQueryable(schema.getFieldOrNull(field.getField()))) .map(DismaxSearchEngineRequestAdapter::fieldParams2phraseBoostFieldParams) .collect(Collectors.toList()); }
private void initParams(SolrParams params) { String prependFields = params.get(PREPEND_FIELDS); for (String field : prependFields.split(",")) { String trimmed = field.trim(); if (this.indexSchema.getFieldOrNull(trimmed) != null) { this.prependFields.add(trimmed); } else { log.error("Unsupported format for" + PREPEND_FIELDS + ":" + trimmed + ". Skipping prepending langs to this field."); } } String prependGranularity = params.get(PREPEND_GRANULARITY); if (prependGranularity != null && prependGranularity.trim().length() > 0) { if (prependGranularity.trim().equals("document")) { this.prependGranularity = PrependGranularities.document; } else if (prependGranularity.trim().equals("field")) { this.prependGranularity = PrependGranularities.field; } else if (prependGranularity.trim().equals("fieldValue")) { this.prependGranularity = PrependGranularities.fieldValue; } else { log.error("Unsupported format for" + PREPEND_GRANULARITY + ":" + prependGranularity + ". Using " + this.prependGranularity.toString() + "."); } } this.hidePrependedLangs = params.getBool(HIDE_PREPENDED_LANGS, false); }
@Override protected boolean doProcess(Record record) { Collection<Map.Entry> entries = new ArrayList<Map.Entry>(record.getFields().asMap().entrySet()); for (Map.Entry<String, Collection<Object>> entry : entries) { String key = entry.getKey(); if (schema.getFieldOrNull(key) == null && !LoadSolrBuilder.LOAD_SOLR_DELETE_BY_ID.equals(key) && !LoadSolrBuilder.LOAD_SOLR_DELETE_BY_QUERY.equals(key) && !LoadSolrBuilder.LOAD_SOLR_CHILD_DOCUMENTS.equals(key)) { LOG.debug("Sanitizing unknown Solr field: {}", key); Collection values = entry.getValue(); if (renameToPrefix != null) { record.getFields().putAll(renameToPrefix + key, values); } values.clear(); // implicitly removes key from record } } // pass record to next command in chain: return super.doProcess(record); }
@Override protected boolean doProcess(Record record) { Collection<Map.Entry> entries = new ArrayList<Map.Entry>(record.getFields().asMap().entrySet()); for (Map.Entry<String, Collection<Object>> entry : entries) { String key = entry.getKey(); if (schema.getFieldOrNull(key) == null && !LoadSolrBuilder.LOAD_SOLR_DELETE_BY_ID.equals(key) && !LoadSolrBuilder.LOAD_SOLR_DELETE_BY_QUERY.equals(key) && !LoadSolrBuilder.LOAD_SOLR_CHILD_DOCUMENTS.equals(key)) { LOG.debug("Sanitizing unknown Solr field: {}", key); Collection values = entry.getValue(); if (renameToPrefix != null) { record.getFields().putAll(renameToPrefix + key, values); } values.clear(); // implicitly removes key from record } } // pass record to next command in chain: return super.doProcess(record); }
@Override protected boolean doProcess(Record record) { Collection<Map.Entry> entries = new ArrayList<Map.Entry>(record.getFields().asMap().entrySet()); for (Map.Entry<String, Collection<Object>> entry : entries) { String key = entry.getKey(); if (schema.getFieldOrNull(key) == null) { LOG.debug("Sanitizing unknown Solr field: {}", key); Collection values = entry.getValue(); if (renameToPrefix != null) { record.getFields().putAll(renameToPrefix + key, values); } values.clear(); // implicitly removes key from record } } return super.doProcess(record); }
@Override protected boolean doProcess(Record record) { Collection<Map.Entry> entries = new ArrayList<Map.Entry>(record.getFields().asMap().entrySet()); for (Map.Entry<String, Collection<Object>> entry : entries) { String key = entry.getKey(); if (schema.getFieldOrNull(key) == null) { LOG.debug("Sanitizing unknown Solr field: {}", key); Collection values = entry.getValue(); if (renameToPrefix != null) { record.getFields().putAll(renameToPrefix + key, values); } values.clear(); // implicitly removes key from record } } return super.doProcess(record); }
@Override protected Query getFieldQuery(String field, String queryText, boolean quoted) throws SyntaxError { SchemaField sf = this.schema.getFieldOrNull(field); //TODO cache this check if (sf != null) { final String fieldTypeName = sf.getType().getTypeName().toLowerCase(); if(fieldTypeName.contains("payload") || fieldTypeName.contains("vector")) { return new PayloadScoreQuery(new SpanTermQuery(new Term(field, queryText)), new AveragePayloadFunction(), false); } } return super.getFieldQuery(field, queryText, quoted); } }
/** * Method that checks if proper field collapsing is actually possible with the current collapse field. * If the collapse field does not meet the proper requirements a runtime exception is thrown. * A runtime exception is thrown under the following circumstances: * <ul> * <li> When the collapse field does not exists in the schema * <li> When the collapse field is multivalued in the schema * <li> When the collapse field is tokenized in the schema * </ul> * * For example when a field is tokenized, only the last token of the field can be retrieved from the fieldcache. This * results in field collapsing only on the last token of a field value instead of the complete field value. * * When the field values from a multivalued field are returned from the <code>FieldCache</code> then an exception may * be thrown. This happens when there are more terms in a field than documents. * * @param schema The index schema */ protected void checkCollapseField(IndexSchema schema) { SchemaField schemaField = schema.getFieldOrNull(collapseField); if (schemaField == null) { throw new RuntimeException("Could not collapse, because collapse field does not exist in the schema."); } if (schemaField.multiValued()) { throw new RuntimeException("Could not collapse, because collapse field is multivalued"); } if (schemaField.getType().isTokenized()) { throw new RuntimeException("Could not collapse, because collapse field is tokenized"); } }
@Override protected SolrInputDocument process(SolrInputDocument doc) { SolrInputDocument outputDocument = super.process(doc); Collection<String> fieldNames = new ArrayList<String>(); for (String nextFieldName : outputDocument.getFieldNames()) { fieldNames.add(nextFieldName); } List<DetectedLanguage> documentLangs = this.detectLanguage(this .concatFields(doc, this.inputFields)); for (String nextFieldName : this.prependFields) { if (indexSchema.getFieldOrNull(nextFieldName) != null) { if (indexSchema.getField(nextFieldName).getType() instanceof MultiTextField) { outputDocument = detectAndPrependLanguages(outputDocument, nextFieldName, documentLangs); } else { log.error("Invalid field " + PREPEND_FIELDS + ":" + nextFieldName + ". Field is not a " + MultiTextField.class + "."); } } else { log.error("Invalid field " + PREPEND_FIELDS + ":" + nextFieldName + ". Field does not exist in indexSchema."); } } return outputDocument; }
/** * The main entry point of this class. Generates a list of {@link SirenFacetEntry} for * the given {@link SolrInputDocument} by performing a DFS through the doc. * * @param doc The document for which to generate facet entries * @throws IllegalStateException if IndexSchema was not set (either in constructor or via {@link #setSchema(IndexSchema)} */ @Override public List<SirenFacetEntry> extractFacets(SolrInputDocument doc) throws FacetException { if (schema == null) { throw new IllegalStateException( "Schema field is null - probably the default constructor was used without calling setSchema() later."); } List<SirenFacetEntry> facets = new ArrayList<SirenFacetEntry>(); for (String fieldName : doc.getFieldNames()) { FieldType ft = schema.getFieldOrNull(fieldName).getType(); if (ft instanceof ExtendedJsonField) { String sirenField = (String) doc.getFieldValue(fieldName); try { JsonNode sirenNode = mapper.readTree(sirenField); generateFacetsForLeaves(sirenNode, fieldName, (ExtendedJsonField) ft, "", facets); } catch (JsonProcessingException e) { throw new FacetException("Could not parse siren field " + fieldName + ": " + e.getMessage(), e); } catch (IOException e) { throw new FacetException("I/O problem while parsing siren field " + fieldName + ": " + e.getMessage(), e); } } } return facets; }
/** * This is a destructive call... the queue is empty at the end */ public NamedList<Integer> toNamedList( IndexSchema schema ) { // reverse the list.. List<TermInfo> aslist = new LinkedList<TermInfo>(); while( size() > 0 ) { aslist.add( 0, (TermInfo)pop() ); } NamedList<Integer> list = new NamedList<Integer>(); for (TermInfo i : aslist) { String txt = i.term.text(); SchemaField ft = schema.getFieldOrNull( i.term.field() ); if( ft != null ) { txt = ft.getType().indexedToReadable( txt ); } list.add( txt, i.docFreq ); } return list; } }
@Override public void transform(SolrDocument doc, int docid) throws IOException { Query query = context.query; SimpleJsonByQueryExtractor extractor = new SimpleJsonByQueryExtractor(); try { IndexSchema schema = context.req.getSchema(); for (String fieldName : doc.getFieldNames()) { FieldType ft = schema.getFieldOrNull(fieldName).getType(); if (ft instanceof ExtendedJsonField) { String sirenField = (String) doc.getFieldValue(fieldName); String json = extractor.extractAsString(sirenField, query); if (json == null) { // query doesn't contain variables, no transformation is necessary continue; } doc.setField(fieldName, json); } } } catch (ProjectionException e) { throw new IOException(String.format( "Problem while projecting (extracting variables from matched document id %s", doc.getFieldValue("id")), e); } }
/** * Add the Field and value to the document with the specified boost, invoking the copyField mechanism * @param name The name of the field. * @param val The value to add * @param boost The boost * * @see #addField(String, String) * @see #addField(org.apache.solr.schema.SchemaField, String, float) * @see #addSingleField(org.apache.solr.schema.SchemaField, String, float) * */ public void addField(String name, String val, float boost) { SchemaField sfield = schema.getFieldOrNull(name); if (sfield != null) { addField(sfield,val,boost); } // Check if we should copy this field to any other fields. // This could happen whether it is explicit or not. final List<CopyField> copyFields = schema.getCopyFieldsList(name); if (copyFields != null) { for(CopyField cf : copyFields) { addSingleField(cf.getDestination(), cf.getLimitedValue( val ), boost); } } // error if this field name doesn't match anything if (sfield==null && (copyFields==null || copyFields.size()==0)) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,"ERROR:unknown field '" + name + "'"); } }
} else { SchemaField sf = schema.getFieldOrNull(fname); if (version>=2100 && sf!=null && sf.multiValued()) { startTag("arr",fname,false);
private static SolrDocument toSolrDoc(Document doc, IndexSchema schema) { SolrDocument out = new SolrDocument(); for ( IndexableField f : doc.getFields() ) { // Make sure multivalued fields are represented as lists Object existing = out.get(f.name()); if (existing == null) { SchemaField sf = schema.getFieldOrNull(f.name()); // don't return copyField targets if (sf != null && schema.isCopyFieldTarget(sf)) continue; if (sf != null && sf.multiValued()) { List<Object> vals = new ArrayList<>(); vals.add( f ); out.setField( f.name(), vals ); } else{ out.setField( f.name(), f ); } } else { out.addField( f.name(), f ); } } return out; }
} else { SchemaField sf = schema.getFieldOrNull(fname); if (sf != null && sf.multiValued()) { writeArrayOpener(-1); // no trivial way to determine array size
SimpleOrderedMap<Object> f = new SimpleOrderedMap<Object>(); SchemaField sfield = schema.getFieldOrNull( fieldable.name() ); FieldType ftype = (sfield==null)?null:sfield.getType();
private void registerXmlTextFields() { String xmlFieldName = indexConfig.getFieldName(FieldRole.XML_TEXT); SchemaField schemaField = schema.getFieldOrNull(xmlFieldName); Analyzer xmlAnalyzer = null; Analyzer xmlQueryAnalyzer = null;
assertNotNull(d); schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull("double.json.rating")); assertEquals("tdouble", schema.getFieldType("double.json.rating").getTypeName()); assertTrue((5.4 - (double)d.getFieldValue("double.json.rating")) < 0.01); assertNotNull(d); schema = h.getCore().getLatestSchema(); assertNotNull(schema.getFieldOrNull("double.json.rating")); assertEquals("tdouble", schema.getFieldType("double.json.rating").getTypeName()); assertTrue((-8.4 + (double)d.getFieldValue("double.json.rating")) < 0.01);