@Override public void updateDocument( Term term, Document document ) throws IOException { writer.updateDocument( term, document ); }
private void updateDocs(final Term uid, final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException { if (softDeleteEnabled) { if (docs.size() > 1) { indexWriter.softUpdateDocuments(uid, docs, softDeletesField); } else { indexWriter.softUpdateDocument(uid, docs.get(0), softDeletesField); } } else { if (docs.size() > 1) { indexWriter.updateDocuments(uid, docs); } else { indexWriter.updateDocument(uid, docs.get(0)); } } numDocUpdates.inc(docs.size()); }
return updateDocument((DocumentsWriterDeleteQueue.Node<?>) null, doc);
private static void update(final Term uid, final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException { if (docs.size() > 1) { indexWriter.updateDocuments(uid, docs); } else { indexWriter.updateDocument(uid, docs.get(0)); } }
/** * Updates a document by first deleting the document(s) * containing <code>term</code> and then adding the new * document. The delete and then add are atomic as seen * by a reader on the same index (flush may happen only after * the add). * * @return The <a href="#sequence_number">sequence number</a> * for this operation * * @param term the term to identify the document(s) to be * deleted * @param doc the document to be added * @throws CorruptIndexException if the index is corrupt * @throws IOException if there is a low-level IO error */ public long updateDocument(Term term, Iterable<? extends IndexableField> doc) throws IOException { return updateDocument(term == null ? null : DocumentsWriterDeleteQueue.newNode(term), doc); }
writer.addDocument(doc); } else { writer.updateDocument(new Term("content",strs[i]), doc);
private void applyDocuments( IndexWriter writer, IndexType type, LongObjectMap<DocumentContext> documents ) throws IOException { for ( DocumentContext context : documents ) { if ( context.exists ) { if ( LuceneDataSource.documentIsEmpty( context.document ) ) { writer.deleteDocuments( type.idTerm( context.entityId ) ); } else { writer.updateDocument( type.idTerm( context.entityId ), context.document ); } } else { writer.addDocument( context.document ); } } }
throw new IllegalArgumentException("at least one soft delete must be present"); return updateDocument(DocumentsWriterDeleteQueue.newNode(buildDocValuesUpdate(term, softDeletes)), doc);
writer.updateDocument( index.type.idTerm( id ), document );
writer.updateDocument(term, doc);
@Override public void updateDocument( Term term, Document doc ) throws IOException { List<AbstractIndexPartition> partitions = index.getPartitions(); if ( index.hasSinglePartition( partitions ) && writablePartition( index.getFirstPartition( partitions ), 1 ) ) { index.getFirstPartition( partitions ).getIndexWriter().updateDocument( term, doc ); } else { deleteDocuments( term ); addDocument( doc ); } }
@Override void remove( TxDataHolder holder, EntityId entityId, String key, Object value ) { try { ensureLuceneDataInstantiated(); long id = entityId.id(); Document document = findDocument( id ); if ( document != null ) { index.type.removeFromDocument( document, key, value ); if ( LuceneDataSource.documentIsEmpty( document ) ) { writer.deleteDocuments( index.type.idTerm( id ) ); } else { writer.updateDocument( index.type.idTerm( id ), document ); } } invalidateSearcher(); } catch ( IOException e ) { throw new RuntimeException( e ); } }
@Override public long updateDocument(Term term, Iterable<? extends IndexableField> doc) throws IOException { assert softDeleteEnabled == false : "Call #updateDocument but soft-deletes is enabled"; return super.updateDocument(term, doc); } @Override
/** *@inheritDoc */ public synchronized void add(final String docId, final Document itdoc) { if (null == docId) { logger.error("No documentId specified. Ignoring addition."); return; } org.apache.lucene.document.Document doc = asLuceneDocument(itdoc); org.apache.lucene.document.Field docidPayloadField = new org.apache.lucene.document.Field(LsiIndex.PAYLOAD_TERM_FIELD, docId, Field.Store.NO, Field.Index.ANALYZED); doc.add(docidPayloadField); doc.add(new Field("documentId",docId,Field.Store.NO,Field.Index.NOT_ANALYZED)); try { if (logger.isDebugEnabled()) { logger.debug("Adding document with docId=" + docId + ". Doc is " + itdoc.getFieldNames()); } writer.updateDocument(docIdTerm(docId), doc); } catch (IOException e) { logger.error(e); } }
w.updateDocument(term, document);
writer.updateDocument(new Term("uid", LucenePDFDocument.createUID(file)), doc);
public void update(Term term, Document document) { committerThread.throwExceptionIfAny(); try { writer.updateDocument(term, document); } catch(IOException ex) { throw Throwables.propagate(ex); } }
@Override public void updateDocument(Term term, Iterable<? extends IndexableField> doc, Analyzer analyzer) throws IOException { try (Ticker ignored = updateDocumentMetric.start()) { super.updateDocument(term, doc, analyzer); } }
@Override public int doLogic() throws Exception { final String docID = doc.get(DocMaker.ID_FIELD); if (docID == null) { throw new IllegalStateException("document must define the docid field"); } final IndexWriter iw = getRunData().getIndexWriter(); iw.updateDocument(new Term(DocMaker.ID_FIELD, docID), doc); return 1; }
@Override public void updateDocument(String path, Iterable<? extends IndexableField> doc) throws IOException { if (reindex) { getWriter().addDocument(doc); } else { getWriter().updateDocument(newPathTerm(path), doc); } indexUpdated = true; }