/** * Get the index writer/searcher wrapper for the given connection. * * @param conn the connection * @return the index access wrapper */ protected static IndexAccess getIndexAccess(Connection conn) throws SQLException { String path = getIndexPath(conn); synchronized (INDEX_ACCESS) { IndexAccess access = INDEX_ACCESS.get(path); if (access == null) { try { Directory indexDir = path.startsWith(IN_MEMORY_PREFIX) ? new RAMDirectory() : FSDirectory.open(new File(path)); Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_30); IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_30, analyzer); conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); IndexWriter writer = new IndexWriter(indexDir, conf); //see http://wiki.apache.org/lucene-java/NearRealtimeSearch access = new IndexAccess(writer); } catch (IOException e) { throw convertException(e); } INDEX_ACCESS.put(path, access); } return access; } }
@Override public synchronized void close() { for ( RAMDirectory ramDirectory : directories.values() ) { ramDirectory.close(); } directories.clear(); } }
private void unCache(String fileName) throws IOException { // Only let one thread uncache at a time; this only // happens during commit() or close(): synchronized(uncacheLock) { if (VERBOSE) { System.out.println("nrtdir.unCache name=" + fileName); } if (!cache.fileNameExists(fileName)) { // Another thread beat us... return; } assert slowFileExists(in, fileName) == false: "fileName=" + fileName + " exists both in cache and in delegate"; final IOContext context = IOContext.DEFAULT; final IndexOutput out = in.createOutput(fileName, context); IndexInput in = null; try { in = cache.openInput(fileName, context); out.copyBytes(in, in.length()); } finally { IOUtils.close(in, out); } // Lock order: uncacheLock -> this synchronized(this) { // Must sync here because other sync methods have // if (cache.fileNameExists(name)) { ... } else { ... }: cache.deleteFile(fileName); } } }
@Override public synchronized IndexInput openInput(String name, IOContext context) throws IOException { if (VERBOSE) { System.out.println("nrtdir.openInput name=" + name); } if (cache.fileNameExists(name)) { if (VERBOSE) { System.out.println(" from cache"); } return cache.openInput(name, context); } else { return in.openInput(name, context); } }
/** * create a lucene RAMDirectory from a list of BroadcastDocumentObject */ static RAMDirectory createRamDirectoryFromDocuments(List<BroadcastDocumentObject> dictionaryObject) throws IOException { RAMDirectory ramDirectory = new RAMDirectory(); IndexWriterConfig writerConfig = new IndexWriterConfig(Version.LATEST, new StandardAnalyzer(CharArraySet.EMPTY_SET)); IndexWriter writer = new IndexWriter(ramDirectory, writerConfig); for (BroadcastDocumentObject objectDoc : dictionaryObject) { writer.addDocument(BroadcastUtils.createLuceneDocumentFromObject(objectDoc)); } writer.commit(); writer.close(); return ramDirectory; }
RAMDirectory idx = new RAMDirectory(); IndexWriter writer = new IndexWriter( idx, new IndexWriterConfig(Version.LUCENE_40, new ClassicAnalyzer(Version.LUCENE_40)) ); Document document = new Document(); document.add(new StringField("ticket_number", "t123", Field.Store.YES)); document.add(new IntField("ticket_id", 234, Field.Store.YES)); document.add(new StringField("id_s", "234", Field.Store.YES)); writer.addDocument(document); writer.commit(); IndexReader reader = DirectoryReader.open(idx); IndexSearcher searcher = new IndexSearcher(reader); Query q1 = new TermQuery(new Term("id_s", "234")); TopDocs td1 = searcher.search(q1, 1); System.out.println(td1.totalHits); // prints "1" Query q2 = NumericRangeQuery.newIntRange("ticket_id", 1, 234, 234, true, true); TopDocs td2 = searcher.search(q2, 1); System.out.println(td2.totalHits); // prints "1"
private int findInText(String whatToIndex, String whatToSearch) throws Exception { final Directory d = new RAMDirectory(); IndexWriterConfig config = new IndexWriterConfig(analyzer); //use of Version, need to look at this. config.setOpenMode(IndexWriterConfig.OpenMode.CREATE); IndexWriter writer = new IndexWriter(d, config); Document doc = new Document(); doc.add(new TextField("content", whatToIndex, Store.YES)); writer.addDocument(doc); writer.close(); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(d)); QueryParser qp = new QueryParser("content", analyzer); Query query = qp.parse(whatToSearch); ScoreDoc[] hits = searcher.search(query, 1000).scoreDocs; writer.close(); d.close(); return hits.length; } }
public TopDocs cosine(String queryTerm, int n, String... terms) throws IOException, ParseException { Directory directory = new RAMDirectory(); final Pattern pattern = Pattern.compile("."); Analyzer analyzer = new Analyzer() { IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_36, analyzer); IndexWriter writer = new IndexWriter(directory, conf); for (String term : terms) { Document doc = new Document(); doc.add(new Field("chars", term, Field.Store.YES, Field.Index.ANALYZED)); writer.addDocument(doc); writer.close(); IndexReader reader = IndexReader.open(directory); IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), terms.length); for (int i = 0; i < topDocs.scoreDocs.length; i++){ System.out.println("Id: " + topDocs.scoreDocs[i].doc + " Val: " + searcher.doc(topDocs.scoreDocs[i].doc).get("chars"));
public static void main(String[] args) throws Exception { Directory directory = new RAMDirectory(); Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_30); MaxFieldLength mlf = MaxFieldLength.UNLIMITED; Document doc = new Document(); String id = String.valueOf(i); String date = buildDate(baseTime + i * 1000); doc.add(new Field("id", id, Store.YES, Index.NOT_ANALYZED)); doc.add(new Field("date", date, Store.YES, Index.NOT_ANALYZED)); writer.addDocument(doc); writer.close(); TopDocs topDocs = searcher.search(query, 10); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { Document doc = searcher.doc(scoreDoc.doc); System.out.println(doc);
RAMDirectory idx = new RAMDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(LUCENE_41, anal); IndexWriter writer = new IndexWriter(idx, iwc); writer.addDocument(doc); writer.close(); TopDocs results = new IndexSearcher(reader).search(query, 1); float score = 0; for (ScoreDoc hit : results.scoreDocs) { idx.close(); return score;
@Test public void queryFindsADocumentThatWasAdded() throws IOException { // Create an in memory lucene index to add a document to RAMDirectory directory = new RAMDirectory(); IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig()); // Add a document to the lucene index Document document = new Document(); document.add(new TextField("name", "name", Field.Store.YES)); Field[] fields = SpatialHelper.getIndexableFields(-122.8515139, 45.5099231); for (Field field : fields) { document.add(field); } writer.addDocument(document); writer.commit(); // Make sure a findWithin query locates the document Query query = SpatialHelper.findWithin(-122.8515239, 45.5099331, 1); SearcherManager searcherManager = new SearcherManager(writer, null); IndexSearcher searcher = searcherManager.acquire(); TopDocs results = searcher.search(query, 100); assertEquals(1, results.totalHits); } }
if ((docCount < maxDocsForInMemoryMerge) && (mergeSize < maxRamInMbForInMemoryMerge)) ramDirectory = new RAMDirectory(); writer = new IndexWriter(ramDirectory, new AlfrescoStandardAnalyser(), true, MaxFieldLength.UNLIMITED); writer = new IndexWriter(location, new AlfrescoStandardAnalyser(), true, MaxFieldLength.UNLIMITED); writer.setUseCompoundFile(mergerUseCompoundFile); writer.setMaxBufferedDocs(mergerMaxBufferedDocs); writer.setRAMBufferSizeMB(mergerRamBufferSizeMb); String[] files = ramDirectory.list(); Directory directory = FSDirectory.getDirectory(outputLocation, true); for (int i = 0; i < files.length; i++) IndexOutput os = directory.createOutput(files[i]); IndexInput is = ramDirectory.openInput(files[i]); os.close(); ramDirectory.close(); directory.close();
@Test public void pageContentIsFoundAfterCommit() throws CorruptIndexException, LockObtainFailedException, IOException { Directory index = new RAMDirectory(); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_35, new StandardAnalyzer(Version.LUCENE_35)); IndexWriter writer = new IndexWriter(index, config); IndexerImpl indexerImpl = new IndexerImpl(writer); PageContent content = new PageContent("http://path", new ArrayList<String>(), "This is the title", "This is the content"); indexerImpl.index(content); indexerImpl.commit(); IndexReader reader = IndexReader.open(index); IndexSearcher searcher = new IndexSearcher(reader); TermQuery query = new TermQuery(new Term("content", "content")); TopDocs result = searcher.search(query, 10); assertEquals(1, result.totalHits); }
Directory directory = new RAMDirectory(); Document doc = new Document(); String text = word; doc.add(new Field("name", text, Field.Store.YES, Field.Index.NOT_ANALYZED)); iwriter.addDocument(doc); iwriter.close(); JavaUtilRegexCapabilities.FLAG_CASE_INSENSITIVE)); ScoreDoc[] hits2 = isearcher2.search(query2, null, 1000).scoreDocs; for (int i = 0; i < hits2.length; i++) { Document hitDoc = isearcher2.doc(hits2[i].doc); logger.info("HelloLucene.main: starting with O = " + hitDoc.get("name"));
private void ensureLuceneDataInstantiated() { if ( this.directory == null ) { try { this.directory = new RAMDirectory(); IndexWriterConfig writerConfig = new IndexWriterConfig( index.type.analyzer ); this.writer = new IndexWriter( directory, writerConfig ); } catch ( IOException e ) { throw new RuntimeException( e ); } } }
@Before public void setupIndex() throws Exception { Directory dir = new RAMDirectory(); IndexWriterConfig conf = new IndexWriterConfig(new VocabularyIndexAnalyzer()); try (IndexWriter writer = new IndexWriter(dir, conf)) { addDoc(writer, "hippocampus"); addDoc(writer, "hippocampal structures"); addDoc(writer, "structure of the hippocampus"); addDoc(writer, "formation"); writer.commit(); } IndexReader reader = DirectoryReader.open(dir); searcher = new IndexSearcher(reader); parser = new QueryParser(NodeProperties.LABEL, new VocabularyQueryAnalyzer()); }
index = new RAMDirectory(sourceDir, IOContext.DEFAULT); index = new RAMDirectory(); IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer); LogDocMergePolicy logDocMergePolicy = new LogDocMergePolicy(); logDocMergePolicy.setMergeFactor(1000); indexWriterConfig.setMergePolicy(logDocMergePolicy); w = new IndexWriter(index, indexWriterConfig); w.getConfig().setRAMBufferSizeMB(32);
static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, Collection<ParsedDocument> docs) { RAMDirectory ramDirectory = new RAMDirectory(); try (IndexWriter indexWriter = new IndexWriter(ramDirectory, new IndexWriterConfig(analyzer))) { // Indexing in order here, so that the user provided order matches with the docid sequencing: Iterable<ParseContext.Document> iterable = () -> docs.stream() .map(ParsedDocument::docs) .flatMap(Collection::stream) .iterator(); indexWriter.addDocuments(iterable); DirectoryReader directoryReader = DirectoryReader.open(indexWriter); assert directoryReader.leaves().size() == 1 : "Expected single leaf, but got [" + directoryReader.leaves().size() + "]"; final IndexSearcher slowSearcher = new IndexSearcher(directoryReader) { @Override public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException { BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(query, BooleanClause.Occur.MUST); bq.add(Queries.newNestedFilter(), BooleanClause.Occur.MUST_NOT); return super.createNormalizedWeight(bq.build(), needsScores); } }; slowSearcher.setQueryCache(null); return slowSearcher; } catch (IOException e) { throw new ElasticsearchException("Failed to create index for percolator with nested document ", e); } }
private static synchronized void create() throws IOException { if (directory != null) { return; } final RAMDirectory ram = new RAMDirectory(); IndexWriter w = new IndexWriter(ram, Factory.get().writerConfig()); w.commit(); w.close(); directory = ram; } }
/** * Creates and loads data into an in memory index. * * @param cve the data source to retrieve the cpe data * @throws IndexException thrown if there is an error creating the index */ public synchronized void open(CveDB cve) throws IndexException { if (INSTANCE.usageCount.addAndGet(1) == 1) { index = new RAMDirectory(); buildIndex(cve); try { indexReader = DirectoryReader.open(index); } catch (IOException ex) { throw new IndexException(ex); } indexSearcher = new IndexSearcher(indexReader); searchingAnalyzer = createSearchingAnalyzer(); queryParser = new QueryParser(Fields.DOCUMENT_KEY, searchingAnalyzer); } }