@Override public void addDocuments( int numDocs, Iterable<Document> document ) throws IOException { writer.addDocuments( document ); }
@Override public void addDocuments( int numDocs, Iterable<Document> documents ) throws IOException { getIndexWriter( numDocs ).addDocuments( documents ); }
@Override public boolean index(List<Document> documents, final int commitThreshold) throws IOException { if (documents.isEmpty()) { return false; } final int numDocs = documents.size(); indexWriter.addDocuments(documents); totalIndexed.addAndGet(numDocs); boolean updated = false; while (!updated) { final CommitStats stats = commitStats.get(); CommitStats updatedStats = new CommitStats(stats.getIndexedSinceCommit() + numDocs, stats.getNextCommitTimestamp()); if (updatedStats.getIndexedSinceCommit() >= commitThreshold || System.nanoTime() >= updatedStats.getNextCommitTimestamp()) { updatedStats = new CommitStats(0, System.nanoTime() + maxCommitNanos); updated = commitStats.compareAndSet(stats, updatedStats); if (updated) { return true; } } else { updated = commitStats.compareAndSet(stats, updatedStats); } } return false; }
@Override public void create(Object key, Object value) throws IOException { long start = stats.startUpdate(); Collection<Document> docs = Collections.emptyList(); boolean exceptionHappened = false; try { try { docs = serializer.toDocuments(index, value); } catch (Exception e) { exceptionHappened = true; stats.incFailedEntries(); logger.info("Failed to add index for " + value + " due to " + e.getMessage()); } if (!exceptionHappened) { docs.forEach(doc -> SerializerUtil.addKey(key, doc)); writer.addDocuments(docs); } } finally { stats.endUpdate(start); } }
private void addStaleDocs(final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException { assert softDeleteEnabled : "Add history documents but soft-deletes is disabled"; for (ParseContext.Document doc : docs) { doc.add(softDeletesField); // soft-deleted every document before adding to Lucene } if (docs.size() > 1) { indexWriter.addDocuments(docs); } else { indexWriter.addDocument(docs.get(0)); } }
private void addDocs(final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException { if (docs.size() > 1) { indexWriter.addDocuments(docs); } else { indexWriter.addDocument(docs.get(0)); } numDocAppends.inc(docs.size()); }
docs.add(document); if (len > maxLen) { writer.addDocuments(docs); docs.clear(); len = 0; writer.addDocuments(docs);
void addDocuments(final Collection<Document> docs) throws IOException { indexWriter.addDocuments(docs); }
/** Calls {@link IndexWriter#addDocuments(Iterable)} and * returns the generation that reflects this change. */ public long addDocuments(Iterable<? extends Iterable<? extends IndexableField>> docs) throws IOException { writer.addDocuments(docs); // Return gen as of when indexing finished: return indexingGen.get(); }
/** Calls {@link IndexWriter#addDocuments(Iterable)} and * returns the generation that reflects this change. */ public long addDocuments(Iterable<? extends Iterable<? extends IndexableField>> docs) throws IOException { writer.addDocuments(docs); // Return gen as of when indexing finished: return indexingGen.get(); }
private static void index(final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException { if (docs.size() > 1) { indexWriter.addDocuments(docs); } else { indexWriter.addDocument(docs.get(0)); } }
private static void index(final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException { if (docs.size() > 1) { indexWriter.addDocuments(docs); } else { indexWriter.addDocument(docs.get(0)); } }
private void addStaleDocs(final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException { assert softDeleteEnabled : "Add history documents but soft-deletes is disabled"; for (ParseContext.Document doc : docs) { doc.add(softDeletesField); // soft-deleted every document before adding to Lucene } if (docs.size() > 1) { indexWriter.addDocuments(docs); } else { indexWriter.addDocument(docs.get(0)); } }
@Override public void addDocuments(Iterable<? extends Iterable<? extends IndexableField>> docs) throws IOException { try (Ticker ignored = addDocumentsMetric.start()) { super.addDocuments(docs); } }
@Override public void addDocuments(Iterable<? extends Iterable<? extends IndexableField>> docs, Analyzer analyzer) throws IOException { try (Ticker ignored = addDocumentsMetric.start()) { super.addDocuments(docs, analyzer); } }
private void addDocs(final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException { if (docs.size() > 1) { indexWriter.addDocuments(docs); } else { indexWriter.addDocument(docs.get(0)); } numDocAppends.inc(docs.size()); }
private CompletableFuture<Long> addEntry(IndexWriter indexWriter) { try { return CompletableFuture.completedFuture( indexWriter.addDocuments( indexEntry ) ); } catch (IOException e) { throw log.unableToIndexEntry( tenantId, id, getEventContext(), e ); } }
/** * add the documents to the lucene index and share it on HDFS * * @param documents the documents to add */ public void createDocument(List<DQDocument> documents) { List<Document> luceneDocuments = new ArrayList<>(); for (DQDocument document : documents) luceneDocuments.add(DictionaryUtils.dqDocumentToLuceneDocument(document)); LOGGER.debug("create " + documents.size() + " documents"); try { getWriter().addDocuments(luceneDocuments); } catch (IOException e) { LOGGER.error(e.getMessage(), e); } }
public void visit(String sha1, String group, String artifactId, String version, String packaging, String classifier) { if (!this.filter.accept(sha1, group, artifactId, version, packaging, classifier)) return; try { // Add to Lucene index indexWriter.addDocuments(artifactToDocs(sha1, group, artifactId, version, packaging, classifier)); } catch (IOException ex) { throw new RuntimeException("Failed writing to IndexWriter: " + ex.getMessage(), ex); } }
@Override protected long doUpdateEntry(IndexWriter indexWriter, String tenantId, String id, LuceneIndexEntry indexEntry) throws IOException { indexWriter.deleteDocuments( LuceneQueries.discriminatorMultiTenancyDeleteDocumentQuery( tenantId, id ) ); return indexWriter.addDocuments( indexEntry ); } }