private void addStaleDocs(final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException { assert softDeleteEnabled : "Add history documents but soft-deletes is disabled"; for (ParseContext.Document doc : docs) { doc.add(softDeletesField); // soft-deleted every document before adding to Lucene } if (docs.size() > 1) { indexWriter.addDocuments(docs); } else { indexWriter.addDocument(docs.get(0)); } }
private void indexFields(ParseContext context, Field[] fields) { ArrayList<IndexableField> flist = new ArrayList<>(Arrays.asList(fields)); createFieldNamesField(context, flist); for (IndexableField f : flist) { context.doc().add(f); } } }
private static void addFields(ParseContext.Document nestedDoc, ParseContext.Document rootDoc) { for (IndexableField field : nestedDoc.getFields()) { if (!field.name().equals(UidFieldMapper.NAME) && !field.name().equals(TypeFieldMapper.NAME)) { rootDoc.add(field); } } }
@Override public void createField(ParseContext context, Object object) throws IOException { Uid value = (Uid)object; Field uid = new Field(NAME, value.toString(), fieldType); context.doc().add(uid); }
@Override public void createField(ParseContext context, Object value) throws IOException { Long timestamp = (Long)value; if (enabledState.enabled) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { context.doc().add(new LegacyLongFieldMapper.CustomLongNumericField(timestamp, fieldType())); } if (fieldType().hasDocValues()) { context.doc().add(new NumericDocValuesField(fieldType().name(), timestamp)); } } }
assert doc.getField(SeqNoFieldMapper.TOMBSTONE_NAME) != null : "Noop tombstone document but _tombstone field is not set [" + doc + " ]"; doc.add(softDeletesField); indexWriter.addDocument(doc); } catch (Exception ex) {
nestedDoc.add(new Field(IdFieldMapper.NAME, idField.stringValue(), IdFieldMapper.Defaults.NESTED_FIELD_TYPE)); } else { nestedDoc.add(new Field(IdFieldMapper.NAME, idField.binaryValue(), IdFieldMapper.Defaults.NESTED_FIELD_TYPE)); nestedDoc.add(new Field(UidFieldMapper.NAME, uidField.stringValue(), UidFieldMapper.Defaults.NESTED_FIELD_TYPE)); } else { throw new IllegalStateException("The root document of a nested document should have an uid field"); nestedDoc.add(new Field(TypeFieldMapper.NAME, mapper.nestedTypePathAsString(), TypeFieldMapper.Defaults.FIELD_TYPE)); return context;
context.doc().add(new LatLonPoint(fieldType().name(), point.lat(), point.lon())); context.doc().add(new StoredField(fieldType().name(), point.toString())); context.doc().add(new LatLonDocValuesField(fieldType().name(), point.lat(), point.lon())); } else if (fieldType().stored() || fieldType().indexOptions() != IndexOptions.NONE) { List<IndexableField> fields = new ArrayList<>(1); createFieldNamesField(context, fields); for (IndexableField field : fields) { context.doc().add(field);
assert doc.getField(SeqNoFieldMapper.TOMBSTONE_NAME) != null : "Delete tombstone document but _tombstone field is not set [" + doc + " ]"; doc.add(softDeletesField); if (plan.addStaleOpToLucene || plan.currentlyDeleted) { indexWriter.addDocument(doc);
@Override protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException { if (fieldType().isEnabled() == false) { return; } for (ParseContext.Document document : context) { final List<String> paths = new ArrayList<>(document.getFields().size()); String previousPath = ""; // used as a sentinel - field names can't be empty for (IndexableField field : document.getFields()) { final String path = field.name(); if (path.equals(previousPath)) { // Sometimes mappers create multiple Lucene fields, eg. one for indexing, // one for doc values and one for storing. Deduplicating is not required // for correctness but this simple check helps save utf-8 conversions and // gives Lucene fewer values to deal with. continue; } paths.add(path); previousPath = path; } for (String path : paths) { for (String fieldName : extractFieldNames(path)) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { document.add(new Field(fieldType().name(), fieldName, fieldType())); } } } } }
@Override public void postParse(ParseContext context) throws IOException { // In the case of nested docs, let's fill nested docs with the original // so that Lucene doesn't write a Bitset for documents that // don't have the field. This is consistent with the default value // for efficiency. // we share the parent docs fields to ensure good compression SequenceIDFields seqID = context.seqID(); assert seqID != null; final Version versionCreated = context.mapperService().getIndexSettings().getIndexVersionCreated(); final boolean includePrimaryTerm = versionCreated.before(Version.V_6_1_0); for (Document doc : context.nonRootDocuments()) { doc.add(seqID.seqNo); doc.add(seqID.seqNoDocValue); if (includePrimaryTerm) { // primary terms are used to distinguish between parent and nested docs since 6.1.0 doc.add(seqID.primaryTerm); } } }
/** * Parse the field value using the provided {@link ParseContext}. */ public void parse(ParseContext context) throws IOException { final List<IndexableField> fields = new ArrayList<>(2); try { parseCreateField(context, fields); for (IndexableField field : fields) { context.doc().add(field); } } catch (Exception e) { throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(), fieldType().typeName()); } multiFields.parse(this, context); }
@Override public void postParse(ParseContext context) throws IOException { // In the case of nested docs, let's fill nested docs with version=1 so that Lucene doesn't write a Bitset for documents // that don't have the field. This is consistent with the default value for efficiency. Field version = context.version(); assert version != null; for (Document doc : context.nonRootDocuments()) { doc.add(version); } }
/** * Makes the processing document as a tombstone document rather than a regular document. * Tombstone documents are stored in Lucene index to represent delete operations or Noops. */ ParsedDocument toTombstone() { assert docs().size() == 1 : "Tombstone should have a single doc [" + docs() + "]"; this.seqID.tombstoneField.setLongValue(1); rootDoc().add(this.seqID.tombstoneField); return this; }
/** Add fields so that they can later be fetched using {@link #getByKey(Object)}. */ public void addWithKey(Object key, IndexableField field) { if (keyedFields == null) { keyedFields = new ObjectObjectHashMap<>(); } else if (keyedFields.containsKey(key)) { throw new IllegalStateException("Only one field can be stored per key"); } keyedFields.put(key, field); add(field); }
public ParsedDocument createNoopTombstoneDoc(String index, String reason) throws MapperParsingException { final String id = ""; // _id won't be used. final SourceToParse sourceToParse = SourceToParse.source(index, type, id, new BytesArray("{}"), XContentType.JSON); final ParsedDocument parsedDoc = documentParser.parseDocument(sourceToParse, noopTombstoneMetadataFieldMappers).toTombstone(); // Store the reason of a noop as a raw string in the _source field final BytesRef byteRef = new BytesRef(reason); parsedDoc.rootDoc().add(new StoredField(SourceFieldMapper.NAME, byteRef.bytes, byteRef.offset, byteRef.length)); return parsedDoc; }
private void indexShape(ParseContext context, Shape shape) { List<IndexableField> fields = new ArrayList<>(Arrays.asList(fieldType().defaultPrefixTreeStrategy().createIndexableFields(shape))); createFieldNamesField(context, fields); for (IndexableField field : fields) { context.doc().add(field); } }
@Override protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException { for (String field : context.getIgnoredFields()) { context.doc().add(new Field(NAME, field, fieldType())); } }
/** * Adds a context-enabled field for all the defined mappings to <code>document</code> * see {@link org.elasticsearch.search.suggest.completion.context.ContextMappings.TypedContextField} */ public void addField(ParseContext.Document document, String name, String input, int weight, Map<String, Set<String>> contexts) { document.add(new TypedContextField(name, input, weight, contexts, document)); }