@Override public void deleteDocuments( Query query ) throws IOException { writer.deleteDocuments( query ); }
@Override public void deleteDocuments( Term term ) throws IOException { writer.deleteDocuments( term ); }
@Override public void update(List<CoreLabel> tokens, String sentid) { try { setIndexWriter(); indexWriter.deleteDocuments(new TermQuery(new Term("sentid",sentid))); add(tokens, sentid, true); } catch (IOException e) { throw new RuntimeException(e); } }
@Override public void deleteDocuments( Term term ) throws IOException { List<AbstractIndexPartition> partitions = index.getPartitions(); for ( AbstractIndexPartition partition : partitions ) { partition.getIndexWriter().deleteDocuments( term ); } }
@Override public void deleteDocuments( Query query ) throws IOException { List<AbstractIndexPartition> partitions = index.getPartitions(); for ( AbstractIndexPartition partition : partitions ) { partition.getIndexWriter().deleteDocuments( query ); } }
@Override public void updateOrAdd( long entityId, Map<String, Object> properties ) { try { removeFromCache( entityId ); writer.deleteDocuments( type.idTermQuery( entityId ) ); add( entityId, properties ); } catch ( IOException e ) { throw new RuntimeException( e ); } }
/** * Writes a document to contain the serialized version of {@code settings}, * with a {@link QueryBuilder#OBJUID} value set to * {@link #INDEX_ANALYSIS_SETTINGS_OBJUID}. An existing version of the * document is first deleted. * @param writer a defined, target instance * @param settings a defined instance * @throws IOException if I/O error occurs while writing Lucene */ public void write(IndexWriter writer, IndexAnalysisSettings settings) throws IOException { byte[] objser = settings.serialize(); writer.deleteDocuments(new Term(QueryBuilder.OBJUID, INDEX_ANALYSIS_SETTINGS_OBJUID)); Document doc = new Document(); StringField uidfield = new StringField(QueryBuilder.OBJUID, INDEX_ANALYSIS_SETTINGS_OBJUID, Field.Store.NO); doc.add(uidfield); doc.add(new StoredField(QueryBuilder.OBJSER, objser)); doc.add(new StoredField(QueryBuilder.OBJVER, INDEX_ANALYSIS_SETTINGS_OBJVER)); writer.addDocument(doc); }
/** * Delete a row from the index. * * @param row the row * @param commitIndex whether to commit the changes to the Lucene index */ protected void delete(Object[] row, boolean commitIndex) throws SQLException { String query = getQuery(row); try { Term term = new Term(LUCENE_FIELD_QUERY, query); indexAccess.writer.deleteDocuments(term); if (commitIndex) { commitIndex(); } } catch (IOException e) { throw convertException(e); } }
@Override public void delete(Object key) throws IOException { long start = stats.startUpdate(); try { Term keyTerm = SerializerUtil.toKeyTerm(key); writer.deleteDocuments(keyTerm); } finally { stats.endUpdate(start); } }
private void applyDocuments( IndexWriter writer, IndexType type, LongObjectMap<DocumentContext> documents ) throws IOException { for ( DocumentContext context : documents ) { if ( context.exists ) { if ( LuceneDataSource.documentIsEmpty( context.document ) ) { writer.deleteDocuments( type.idTerm( context.entityId ) ); } else { writer.updateDocument( type.idTerm( context.entityId ), context.document ); } } else { writer.addDocument( context.document ); } } }
/** * Removes entry for given key from this index. * * @param key Key. * @throws IgniteCheckedException If failed. */ public void remove(CacheObject key) throws IgniteCheckedException { try { writer.deleteDocuments(new Term(KEY_FIELD_NAME, new BytesRef(key.valueBytes(objectContext())))); } catch (IOException e) { throw new IgniteCheckedException(e); } finally { updateCntr.incrementAndGet(); } }
indexWriter.getIndexWriter().deleteDocuments(query);
try { final IndexWriter indexWriter = writer.getIndexWriter(); indexWriter.deleteDocuments(term); indexWriter.commit(); final int docsLeft = indexWriter.numDocs();
writer.deleteDocuments(term);
@Override void remove( TxDataHolder holder, EntityId entityId, String key, Object value ) { try { ensureLuceneDataInstantiated(); long id = entityId.id(); Document document = findDocument( id ); if ( document != null ) { index.type.removeFromDocument( document, key, value ); if ( LuceneDataSource.documentIsEmpty( document ) ) { writer.deleteDocuments( index.type.idTerm( id ) ); } else { writer.updateDocument( index.type.idTerm( id ), document ); } } invalidateSearcher(); } catch ( IOException e ) { throw new RuntimeException( e ); } }
/** * Remove a stale file (uidIter.term().text()) from the index database and * history cache, and queue the removal of xref. * * @param removeHistory if false, do not remove history cache for this file * @throws java.io.IOException if an error occurs */ private void removeFile(boolean removeHistory) throws IOException { String path = Util.uid2url(uidIter.term().utf8ToString()); for (IndexChangedListener listener : listeners) { listener.fileRemove(path); } writer.deleteDocuments(new Term(QueryBuilder.U, uidIter.term())); removeXrefFile(path); if (removeHistory) { removeHistoryFile(path); } setDirty(); for (IndexChangedListener listener : listeners) { listener.fileRemoved(path); } }
@Override public long deleteDocuments(Term... terms) throws IOException { assert softDeleteEnabled == false : "Call #deleteDocuments but soft-deletes is enabled"; return super.deleteDocuments(terms); } @Override
/** *@inheritDoc */ public synchronized void del(final String docId) { try { writer.deleteDocuments(docIdTerm(docId)); } catch (IOException e) { logger.error(e); } }
private void delete(Record record) { // removes previous copy of this record from the index, if it's there Property idprop = config.getIdentityProperties().iterator().next(); Query q = parseTokens(idprop.getName(), record.getValue(idprop.getName())); try { iwriter.deleteDocuments(q); } catch (IOException e) { throw new DukeException(e); } }
indexWriter.deleteDocuments(delete.uid());