multiReader = new MultiReader(subreaders, true); } catch (IOException ex) { LOGGER.log(Level.SEVERE,
private IndexHits<Document> search( IndexReference searcherRef, IndexSearcher fulltextTransactionStateSearcher, Query query, QueryContext additionalParametersOrNull, Collection<EntityId> removed ) throws IOException { if ( fulltextTransactionStateSearcher != null && !removed.isEmpty() ) { letThroughAdditions( fulltextTransactionStateSearcher, query, removed ); } IndexSearcher searcher = fulltextTransactionStateSearcher == null ? searcherRef.getSearcher() : new IndexSearcher( new MultiReader( searcherRef.getSearcher().getIndexReader(), fulltextTransactionStateSearcher.getIndexReader() ) ); IndexHits<Document> result; if ( additionalParametersOrNull != null && additionalParametersOrNull.getTop() > 0 ) { result = new TopDocsIterator( query, additionalParametersOrNull, searcher ); } else { Sort sorting = additionalParametersOrNull != null ? additionalParametersOrNull.getSorting() : null; boolean forceScore = additionalParametersOrNull == null || !additionalParametersOrNull.getTradeCorrectnessForSpeed(); DocValuesCollector collector = new DocValuesCollector( forceScore ); searcher.search( query, collector ); return collector.getIndexHits( sorting ); } return result; }
public MultiReader mergeIndexReaders(List<ZoieIndexReader<T>> readerList) { MultiReader r = new MultiReader(readerList.toArray(new IndexReader[readerList.size()]),false); return r; } }
private void reloadSearch() { //ok load all index to searcher if (readerList.size() > 0) { try { DirectoryReader[] dictList = readerList.values().toArray(new DirectoryReader[readerList.size()]); this.allInOneReader = new MultiReader(dictList); searcher = new IndexSearcher(this.allInOneReader); } catch (Exception e) { e.printStackTrace(); } } }
/** * 多个资料库的搜索 * * @param objClasses 对象数据类型 * @return 多个资料库的搜索对象 * @throws IOException */ private IndexSearcher getSearchers(List<Class<? extends Searchable>> objClasses) throws IOException { IndexReader[] readers = new IndexReader[objClasses.size()]; int idx = 0; for (Class<? extends Searchable> objClass : objClasses) { FSDirectory dir = FSDirectory.open(new File(indexPath + objClass.getSimpleName())); readers[idx++] = DirectoryReader.open(dir); } return new IndexSearcher(new MultiReader(readers, true)); }
public IndexSearcher msearch() throws IOException { int size=indexes.size(); IndexReader reader[] = new IndexReader[size]; for(int j=0;j<size;j++) { String indexPath = ((CMSSearchIndex)indexes.get(j)).getAbsoluteIndexPath(); //索引文件路径 reader[j] = DirectoryReader.open(FSDirectory.open(new File(indexPath))); } IndexSearcher multiSearcher = new IndexSearcher( new MultiReader( reader) ); //联合检索工具 return multiSearcher; } public HitResult search(){
IndexSearcher makeIndexSearcher() throws IOException { ArrayList<IndexReader> readers = getReaders(); IndexReader[] ireaders = new IndexReader[readers.size()]; for(int i = 0; i < readers.size(); i++){ ireaders[i] = readers.get(i); } IndexReader ireader = new MultiReader(ireaders); return new IndexSearcher(ireader); }
@Override public TopDocs handleQuery() throws Exception { List<IndexReader> readers = null; IndexSearcher searcher = null; int idx = _rand.nextInt(_queries.length); Query q = _queries[idx]; try { readers = _readerFactory.getIndexReaders(); MultiReader reader = new MultiReader(readers.toArray(new IndexReader[0]), false); searcher = new IndexSearcher(reader); TopDocs docs = searcher.search(q, 10); return docs; } finally { if (readers != null) { _readerFactory.returnIndexReaders(readers); } } }
public synchronized IndexReader acquire() throws IOException { if ( searchers != null ) { release(); throw new IllegalStateException( "acquire() called 2nd time without release() in between!" ); } this.searchers = new ArrayList<IndexSearcher>(); final ArrayList<IndexReader> contextReaders = new ArrayList<IndexReader>( contexts.size() ); for ( IndexingContext ctx : contexts ) { final IndexSearcher indexSearcher = ctx.acquireIndexSearcher(); searchers.add( indexSearcher ); contextReaders.add( indexSearcher.getIndexReader() ); } return new MultiReader( contextReaders.toArray( new IndexReader[contextReaders.size()] ) ); }
private MultiReader getCombinedIndexReader(Set<Integer> shardIds, int tenantId, String tableName) throws IOException, AnalyticsIndexException { List<IndexReader> indexReaders = new ArrayList<>(); for (int shardId : shardIds) { String tableId = this.generateTableId(tenantId, tableName); try { IndexReader reader = DirectoryReader.open(this.lookupIndexWriter(shardId, tableId), true); indexReaders.add(reader); } catch (IndexNotFoundException ignore) { /* this can happen if a user just started to index records in a table, * but it didn't yet do the first commit, so it does not have segment* files. * The execution comes to this place, because the shards are identified, since * there is some other intermediate files written to the index directory. * So in this situation, if we are in the middle of the initial commit, we ignore * this partially indexed data for now */ } } return new MultiReader(indexReaders.toArray(new IndexReader[indexReaders.size()])); }
public LucisSearcher get() { try { List<IndexReader> readers = Lists.newArrayListWithCapacity(providers.size()); for (DirectoryProvider p : providers) { readers.add(p.getManagedReader()); } if (readers.isEmpty()) { final IndexReader reader = IndexReader.open(EmptyDirectory.get()); readers.add(reader); } return new DefaultLucisSearcher(new MultiReader(readers.toArray(new IndexReader[readers.size()]))); } catch (Exception e) { throw new IndexNotAvailableException(e); } }
public synchronized IndexReader acquire() throws IOException { if ( searchers != null ) { release(); throw new IllegalStateException( "acquire() called 2nd time without release() in between!" ); } this.searchers = new ArrayList<IndexSearcher>(); final ArrayList<IndexReader> contextReaders = new ArrayList<IndexReader>( contexts.size() ); for ( IndexingContext ctx : contexts ) { final IndexSearcher indexSearcher = ctx.acquireIndexSearcher(); searchers.add( indexSearcher ); contextReaders.add( indexSearcher.getIndexReader() ); } return new MultiReader( contextReaders.toArray( new IndexReader[contextReaders.size()] ) ); }
public synchronized IndexReader acquire() throws IOException { if ( searchers != null ) { release(); throw new IllegalStateException( "acquire() called 2nd time without release() in between!" ); } this.searchers = new ArrayList<IndexSearcher>(); final ArrayList<IndexReader> contextReaders = new ArrayList<IndexReader>( contexts.size() ); for ( IndexingContext ctx : contexts ) { final IndexSearcher indexSearcher = ctx.acquireIndexSearcher(); searchers.add( indexSearcher ); contextReaders.add( indexSearcher.getIndexReader() ); } return new MultiReader( contextReaders.toArray( new IndexReader[contextReaders.size()] ) ); }
public IndexSearcher getIndexSearcher( final ClusterSegment... clusterSegments ) { final Set<KCluster> clusters; if ( clusterSegments == null || clusterSegments.length == 0 ) { clusters = new HashSet<KCluster>( indexes.keySet() ); } else { clusters = new HashSet<KCluster>( clusterSegments.length ); for ( final ClusterSegment clusterSegment : clusterSegments ) { clusters.add( new KClusterImpl( clusterSegment.getClusterId() ) ); } } final Collection<IndexReader> readers = new ArrayList<IndexReader>( clusters.size() ); for ( final KCluster cluster : clusters ) { final LuceneIndex index = indexes.get( cluster ); readers.add( index.nrtReader() ); } try { return new SearcherFactory().newSearcher( new MultiReader( readers.toArray( new IndexReader[ readers.size() ] ) ) ); } catch ( IOException e ) { throw new RuntimeException( e ); } }
private IndexReader createReader(List<LuceneIndexReader> nrtReaders) { //Increment count by 1. MultiReader does it for all readers //So no need for an explicit increment for MultiReader if (readers.size() == 1 && nrtReaders.isEmpty()){ IndexReader reader = readers.get(0).getReader(); reader.incRef(); return reader; } if (nrtReaders.size() == 1 && readers.isEmpty()){ IndexReader reader = nrtReaders.get(0).getReader(); reader.incRef(); return reader; } IndexReader[] readerArr = new IndexReader[readers.size() + nrtReaders.size()]; int i = 0; for (LuceneIndexReader r : Iterables.concat(readers, nrtReaders)){ readerArr[i++] = r.getReader(); } return new MultiReader(readerArr, false); }
public LucisSearcher get() { try { List<IndexReader> readers = Lists.newArrayListWithCapacity(providers.size()); for (DirectoryProvider p : providers) { Directory d = p.getDirectory(); if (d != null && IndexReader.indexExists(d)) { readers.add(IndexReader.open(d)); } } if (readers.isEmpty()) { final IndexReader reader = IndexReader.open(EmptyDirectory.get()); readers.add(reader); } return new DefaultLucisSearcher(new MultiReader(readers.toArray(new IndexReader[readers.size()]))); } catch (Exception e) { throw new IndexNotAvailableException(e); } }
private IndexReader createReader(List<LuceneIndexReader> nrtReaders) { //Increment count by 1. MultiReader does it for all readers //So no need for an explicit increment for MultiReader if (readers.size() == 1 && nrtReaders.isEmpty()){ IndexReader reader = readers.get(0).getReader(); reader.incRef(); return reader; } if (nrtReaders.size() == 1 && readers.isEmpty()){ IndexReader reader = nrtReaders.get(0).getReader(); reader.incRef(); return reader; } IndexReader[] readerArr = new IndexReader[readers.size() + nrtReaders.size()]; int i = 0; for (LuceneIndexReader r : Iterables.concat(readers, nrtReaders)){ readerArr[i++] = r.getReader(); } return new MultiReader(readerArr, false); }
public class MultiBufferedReader { public static void main(String[] args) throws IOException { MultiReader mr = new MultiReader(); mr.addListener(new Listener() { @Override public void read(String line) { System.out.println("1: Got " + line); } }); mr.addListener(new Listener() { @Override public void read(String line) { System.out.println("2: Got " + line); } }); mr.read(new File("/etc/hosts")); }
private MultiReader multiReader(OLuceneTxChanges c) { try { return new MultiReader(searcher.getIndexReader(), c.searcher().getIndexReader()); } catch (IOException e) { throw OException.wrapException(new OLuceneIndexException("unable to create reader on changes"), e); } }
public Object doBody() throws IOException { SegmentInfos infos = new SegmentInfos(); infos.read(directory); if (infos.size() == 1) { // index is optimized return new SegmentReader(infos, infos.info(0), closeDirectory); } else { IndexReader[] readers = new IndexReader[infos.size()]; for (int i = 0; i < infos.size(); i++) readers[i] = new SegmentReader(infos.info(i)); return new MultiReader(directory, infos, closeDirectory, readers); } } }.run();