Refine search
private void ensureLuceneDataInstantiated() { if ( this.directory == null ) { try { this.directory = new RAMDirectory(); IndexWriterConfig writerConfig = new IndexWriterConfig( index.type.analyzer ); this.writer = new IndexWriter( directory, writerConfig ); } catch ( IOException e ) { throw new RuntimeException( e ); } } }
/** * Get the index writer/searcher wrapper for the given connection. * * @param conn the connection * @return the index access wrapper */ protected static IndexAccess getIndexAccess(Connection conn) throws SQLException { String path = getIndexPath(conn); synchronized (INDEX_ACCESS) { IndexAccess access = INDEX_ACCESS.get(path); if (access == null) { try { Directory indexDir = path.startsWith(IN_MEMORY_PREFIX) ? new RAMDirectory() : FSDirectory.open(new File(path)); Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_30); IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_30, analyzer); conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); IndexWriter writer = new IndexWriter(indexDir, conf); //see http://wiki.apache.org/lucene-java/NearRealtimeSearch access = new IndexAccess(writer); } catch (IOException e) { throw convertException(e); } INDEX_ACCESS.put(path, access); } return access; } }
private IndexWriter instantiateWriter( File folder ) { Directory dir = null; try { dir = LuceneDataSource.getDirectory( folder, identifier ); IndexWriterConfig writerConfig = new IndexWriterConfig( type.analyzer ); writerConfig.setRAMBufferSizeMB( determineGoodBufferSize( writerConfig.getRAMBufferSizeMB() ) ); return new IndexWriter( dir, writerConfig ); } catch ( IOException e ) { IOUtils.closeAllSilently( dir ); throw new RuntimeException( e ); } }
public void open() throws Exception { if ( !directory.exists() && !directory.mkdirs() ) { throw new IOException("Could not make: " + directory); } IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_35, new KeywordAnalyzer()).setOpenMode(IndexWriterConfig.OpenMode.CREATE); niofsDirectory = new NIOFSDirectory(directory, new SingleInstanceLockFactory()); writer = new IndexWriter(niofsDirectory, conf); }
IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_47, analyzer).setOpenMode(OpenMode.CREATE); final IndexWriter writer = new IndexWriter(directory, config); for (Stop stop : graphIndex.stopForId.values()) { addStop(writer, stop);
LOGGER.log(Level.INFO, "Optimizing the index{0}", projectDetail); Analyzer analyzer = new StandardAnalyzer(); IndexWriterConfig conf = new IndexWriterConfig(analyzer); conf.setOpenMode(OpenMode.CREATE_OR_APPEND); wrt = new IndexWriter(indexDirectory, conf); wrt.forceMerge(1); // this is deprecated and not needed anymore elapsed.report(LOGGER, String.format("Done optimizing index%s", projectDetail));
IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_47, analyzer); iwc.setOpenMode(OpenMode.CREATE_OR_APPEND); IndexWriter writer = new IndexWriter(dir, iwc);
writer = new IndexWriter(dir, new IndexWriterConfig(new StandardAnalyzer()));
private IndexWriterCount createWriter(final File indexDirectory) throws IOException { final List<Closeable> closeables = new ArrayList<>(); final Directory directory = FSDirectory.open(indexDirectory); closeables.add(directory); try { final Analyzer analyzer = new StandardAnalyzer(); closeables.add(analyzer); final IndexWriterConfig config = new IndexWriterConfig(LuceneUtil.LUCENE_VERSION, analyzer); final ConcurrentMergeScheduler mergeScheduler = new ConcurrentMergeScheduler(); final int mergeThreads = repoConfig.getConcurrentMergeThreads(); mergeScheduler.setMaxMergesAndThreads(mergeThreads, mergeThreads); config.setMergeScheduler(mergeScheduler); final IndexWriter indexWriter = new IndexWriter(directory, config); final EventIndexWriter eventIndexWriter = new LuceneEventIndexWriter(indexWriter, indexDirectory); final IndexWriterCount writerCount = new IndexWriterCount(eventIndexWriter, analyzer, directory, 1, false); logger.debug("Providing new index writer for {}", indexDirectory); return writerCount; } catch (final IOException ioe) { for (final Closeable closeable : closeables) { try { closeable.close(); } catch (final IOException ioe2) { ioe.addSuppressed(ioe2); } } throw ioe; } }
private IndexRepositoryImpl createIndexRepo() throws IOException { ConcurrentHashMap fileAndChunkRegion = new ConcurrentHashMap(); RegionDirectory dir = new RegionDirectory(fileAndChunkRegion, fileSystemStats); IndexWriterConfig config = new IndexWriterConfig(analyzer); IndexWriter writer = new IndexWriter(dir, config); LuceneIndex index = Mockito.mock(LuceneIndex.class); Mockito.when(index.getFieldNames()).thenReturn(new String[] {"txt"}); return new IndexRepositoryImpl(region, writer, mapper, indexStats, null, null, "", index); }
closeables.add(analyzer); final IndexWriterConfig config = new IndexWriterConfig(LuceneUtil.LUCENE_VERSION, analyzer); config.setWriteLockTimeout(300000L); final IndexWriter indexWriter = new IndexWriter(directory, config); final EventIndexWriter eventIndexWriter = new LuceneEventIndexWriter(indexWriter, indexDirectory); writerCount = new IndexWriterCount(eventIndexWriter, analyzer, directory, 1);
private IndexWriter newIndexWriter( IndexIdentifier identifier ) throws ExplicitIndexNotFoundKernelException { try { Directory indexDirectory = getIndexDirectory( identifier ); IndexType type = getType( identifier ); IndexWriterConfig writerConfig = new IndexWriterConfig( type.analyzer ); writerConfig.setIndexDeletionPolicy( new SnapshotDeletionPolicy( new KeepOnlyLastCommitDeletionPolicy() ) ); Similarity similarity = type.getSimilarity(); if ( similarity != null ) { writerConfig.setSimilarity( similarity ); } return new IndexWriter( indexDirectory, writerConfig ); } catch ( IOException e ) { throw new RuntimeException( e ); } } }
IndexWriter indexWriter = new IndexWriter(index, new IndexWriterConfig(analyzer))) {
@Override public IndexRepository computeIndexRepository(final Integer bucketId, LuceneSerializer serializer, InternalLuceneIndex index, PartitionedRegion userRegion, IndexRepository oldRepository, PartitionedRepositoryManager partitionedRepositoryManager) throws IOException { final IndexRepository repo; if (oldRepository != null) { oldRepository.cleanup(); } LuceneRawIndex indexForRaw = (LuceneRawIndex) index; BucketRegion dataBucket = getMatchingBucket(userRegion, bucketId); Directory dir = null; if (indexForRaw.withPersistence()) { String bucketLocation = LuceneServiceImpl.getUniqueIndexName(index.getName(), index.getRegionPath() + "_" + bucketId); File location = new File(index.getName(), bucketLocation); if (!location.exists()) { location.mkdirs(); } dir = new NIOFSDirectory(location.toPath()); } else { dir = new RAMDirectory(); } IndexWriterConfig config = new IndexWriterConfig(indexForRaw.getAnalyzer()); IndexWriter writer = new IndexWriter(dir, config); return new IndexRepositoryImpl(null, writer, serializer, indexForRaw.getIndexStats(), dataBucket, null, "", indexForRaw); } }
try { Analyzer analyzer = AnalyzerGuru.getAnalyzer(); IndexWriterConfig iwc = new IndexWriterConfig(analyzer); iwc.setOpenMode(OpenMode.CREATE_OR_APPEND); iwc.setRAMBufferSizeMB(env.getRamBufferSize()); writer = new IndexWriter(indexDirectory, iwc);
RegionDirectory dir = new RegionDirectory(bucketTargetingMap, indexForPR.getFileSystemStats()); IndexWriterConfig config = new IndexWriterConfig(indexForPR.getAnalyzer()); IndexWriter writer = new IndexWriter(dir, config); repo = new IndexRepositoryImpl(fileAndChunkBucket, writer, serializer, indexForPR.getIndexStats(), dataBucket, lockService, lockName, indexForPR);
public static void main(String[] args) throws Exception { IndexWriterConfig indexWriterConfig = new IndexWriterConfig(new StandardAnalyzer()); try (IndexWriter writer = new IndexWriter(FSDirectory.open(Paths.get(args[0])), indexWriterConfig)) { LuceneIndexer indexer = new LuceneIndexer(new Tika(), writer); for (int i = 1; i < args.length; i++) { indexer.indexDocument(new File(args[i])); } } }
private IndexWriter newWriter(final Directory dir, final Analyzer analyzer) throws IOException { final IndexWriterConfig config = new IndexWriterConfig(analyzer); config.setUseCompoundFile(ini.getBoolean("lucene.useCompoundFile", false)); config.setRAMBufferSizeMB(ini.getDouble("lucene.ramBufferSizeMB", IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB)); return new IndexWriter(dir, config); }
IndexWriterConfig iwc = new IndexWriterConfig(analyzer); if (create) try (final IndexWriter writer = new IndexWriter(dir, iwc))
/** * This method removes all lucene files from the given directory. It will first try to delete all commit points / segments * files to ensure broken commits or corrupted indices will not be opened in the future. If any of the segment files can't be deleted * this operation fails. */ public static void cleanLuceneIndex(Directory directory) throws IOException { try (Lock writeLock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { for (final String file : directory.listAll()) { if (file.startsWith(IndexFileNames.SEGMENTS) || file.equals(IndexFileNames.OLD_SEGMENTS_GEN)) { directory.deleteFile(file); // remove all segment_N files } } } try (IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Lucene.STANDARD_ANALYZER) .setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setMergePolicy(NoMergePolicy.INSTANCE) // no merges .setCommitOnClose(false) // no commits .setOpenMode(IndexWriterConfig.OpenMode.CREATE))) // force creation - don't append... { // do nothing and close this will kick of IndexFileDeleter which will remove all pending files } }