private Collection<IndexableField> getNotIndexedStoredFields( Document document ) { Map<String,IndexableField> nameFieldMap = new HashMap<>(); List<String> indexedFields = new ArrayList<>(); for ( IndexableField field : document.getFields() ) { if ( isStoredField( field ) ) { nameFieldMap.put( field.name(), field ); } else if ( !DocValuesType.NONE.equals( field.fieldType().docValuesType() ) ) { indexedFields.add( field.name() ); } } indexedFields.forEach( nameFieldMap::remove ); return nameFieldMap.values(); }
protected boolean isStoredField( IndexableField field ) { return isValidKey( field.name() ) && field.fieldType().stored() && !FullTxData.TX_STATE_KEY.equals( field.name() ); }
/** Called from processDocument to index one field's point */ private void indexPoint(PerField fp, IndexableField field) throws IOException { int pointDataDimensionCount = field.fieldType().pointDataDimensionCount(); int pointIndexDimensionCount = field.fieldType().pointIndexDimensionCount(); int dimensionNumBytes = field.fieldType().pointNumBytes(); // Record dimensions for this field; this setter will throw IllegalArgExc if // the dimensions were already set to something different: if (fp.fieldInfo.getPointDataDimensionCount() == 0) { fieldInfos.globalFieldNumbers.setDimensions(fp.fieldInfo.number, fp.fieldInfo.name, pointDataDimensionCount, pointIndexDimensionCount, dimensionNumBytes); } fp.fieldInfo.setPointDimensions(pointDataDimensionCount, pointIndexDimensionCount, dimensionNumBytes); if (fp.pointValuesWriter == null) { fp.pointValuesWriter = new PointValuesWriter(docWriter, fp.fieldInfo); } fp.pointValuesWriter.addPackedValue(docState.docID, field.binaryValue()); }
@Override boolean start(IndexableField field, boolean first) { super.start(field, first); assert field.fieldType().indexOptions() != IndexOptions.NONE; doVectors = field.fieldType().storeTermVectors(); doVectorPositions = field.fieldType().storeTermVectorPositions(); doVectorOffsets = field.fieldType().storeTermVectorOffsets(); doVectorPayloads = field.fieldType().storeTermVectorPayloads(); } else { doVectorPayloads = false; if (field.fieldType().storeTermVectorPayloads()) { if (field.fieldType().storeTermVectorOffsets()) { throw new IllegalArgumentException("cannot index term vector offsets when term vectors are not indexed (field=\"" + field.name() + "\")"); if (field.fieldType().storeTermVectorPositions()) { throw new IllegalArgumentException("cannot index term vector positions when term vectors are not indexed (field=\"" + field.name() + "\")"); if (field.fieldType().storeTermVectorPayloads()) { throw new IllegalArgumentException("cannot index term vector payloads when term vectors are not indexed (field=\"" + field.name() + "\")"); if (doVectors != field.fieldType().storeTermVectors()) { throw new IllegalArgumentException("all instances of a given field name must have the same term vectors settings (storeTermVectors changed for field=\"" + field.name() + "\")"); if (doVectorPositions != field.fieldType().storeTermVectorPositions()) { throw new IllegalArgumentException("all instances of a given field name must have the same term vectors settings (storeTermVectorPositions changed for field=\"" + field.name() + "\")");
private int processField(IndexableField field, long fieldGen, int fieldCount) throws IOException { String fieldName = field.name(); IndexableFieldType fieldType = field.fieldType();
IndexableFieldType fieldType = field.fieldType();
@Override public Set<String> parseContext(Document document) { Set<String> values = null; if (fieldName != null) { IndexableField[] fields = document.getFields(fieldName); values = new HashSet<>(fields.length); for (IndexableField field : fields) { if (field instanceof SortedDocValuesField || field instanceof SortedSetDocValuesField || field instanceof StoredField) { // Ignore doc values and stored fields } else if (field.fieldType() instanceof KeywordFieldMapper.KeywordFieldType) { values.add(field.binaryValue().utf8ToString()); } else if (field.fieldType() instanceof StringFieldType) { values.add(field.stringValue()); } else { throw new IllegalArgumentException("Failed to parse context field [" + fieldName + "], only keyword and text fields are accepted"); } } } return (values == null) ? Collections.emptySet() : values; }
IndexableField lonField = lonFields[i]; IndexableField latField = latFields[i]; assert lonField.fieldType().docValuesType() == latField.fieldType().docValuesType(); if (lonField.fieldType().docValuesType() == DocValuesType.NONE) { spare.reset(latField.numericValue().doubleValue(), lonField.numericValue().doubleValue()); geohashes.add(stringEncode(spare.getLon(), spare.getLat(), precision));
if (!f.fieldType().stored()) { continue;
@Override public IndexableFieldType fieldType() { return getRealValue().fieldType(); }
@Override public boolean isTokenized() { return field.fieldType().tokenized(); }
@Override public IndexableFieldType fieldType() { return getRealValue().fieldType(); }
private boolean isDocValueField(IndexableField field) { return field.fieldType().docValuesType() != DocValuesType.NONE; } }
private boolean isDocValueField(IndexableField field) { return field.fieldType().docValuesType() != DocValuesType.NONE; }
public void writeField( IndexableField field ) throws IOException { int flags = ( field.fieldType().indexed() ? F_INDEXED : 0 ) // + ( field.fieldType().tokenized() ? F_TOKENIZED : 0 ) // + ( field.fieldType().stored() ? F_STORED : 0 ); // // + ( false ? F_COMPRESSED : 0 ); // Compressed not supported anymore String name = field.name(); String value = field.stringValue(); dos.write( flags ); dos.writeUTF( name ); writeUTF( value, dos ); }
public void writeField( IndexableField field ) throws IOException { int flags = ( field.fieldType().indexOptions() != IndexOptions.NONE ? F_INDEXED : 0 ) // + ( field.fieldType().tokenized() ? F_TOKENIZED : 0 ) // + ( field.fieldType().stored() ? F_STORED : 0 ); // // + ( false ? F_COMPRESSED : 0 ); // Compressed not supported anymore String name = field.name(); String value = field.stringValue(); dos.write( flags ); dos.writeUTF( name ); writeUTF( value, dos ); }
public void writeField( IndexableField field ) throws IOException { int flags = ( field.fieldType().indexOptions() != IndexOptions.NONE ? F_INDEXED : 0 ) // + ( field.fieldType().tokenized() ? F_TOKENIZED : 0 ) // + ( field.fieldType().stored() ? F_STORED : 0 ); // // + ( false ? F_COMPRESSED : 0 ); // Compressed not supported anymore String name = field.name(); String value = field.stringValue(); dos.write( flags ); dos.writeUTF( name ); writeUTF( value, dos ); }
private Map<String, String> toMap( Document d ) { final HashMap<String, String> result = new HashMap<String, String>(); for ( Object o : d.getFields() ) { IndexableField f = (IndexableField) o; if ( f.fieldType().stored() ) { result.put( f.name(), f.stringValue() ); } } return result; }
private Map<String, String> toMap( Document d ) { final HashMap<String, String> result = new HashMap<String, String>(); for ( Object o : d.getFields() ) { IndexableField f = (IndexableField) o; if ( f.fieldType().stored()) { result.put( f.name(), f.stringValue() ); } } return result; }
private Map<String, String> toMap( Document d ) { final HashMap<String, String> result = new HashMap<String, String>(); for ( Object o : d.getFields() ) { IndexableField f = (IndexableField) o; if ( f.fieldType().stored() ) { result.put( f.name(), f.stringValue() ); } } return result; }