CustomPhraseWeight(IndexSearcher searcher, CustomPhraseQuery query) throws IOException { super(query); this.query = query; IndexReaderContext context = searcher.getTopReaderContext(); this.states = new TermContext[query.terms.length]; for(int i = 0; i < query.terms.length; ++i) { Term term = query.terms[i]; this.states[i] = TermContext.build(context, term); } }
@Override public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { final TermContext context; final IndexReaderContext topContext = searcher.getTopReaderContext(); if (termContext == null || termContext.wasBuiltFor(topContext) == false) { context = TermContext.build(topContext, term); } else { context = termContext; } return new SpanTermWeight(context, searcher, needsScores ? Collections.singletonMap(term, context) : null, boost); }
@Override protected Similarity.SimWeight getStats(IndexSearcher searcher) throws IOException { final IndexReaderContext context = searcher.getTopReaderContext(); // compute idf ArrayList<TermStatistics> allTermStats = new ArrayList<>(); for(final Term[] terms: termArrays) { for (Term term: terms) { TermContext ts = termStates.get(term); if (ts == null) { ts = TermContext.build(context, term); termStates.put(term, ts); } if (needsScores) { TermStatistics termStatistics = searcher.termStatistics(term, ts); if (termStatistics != null) { allTermStats.add(termStatistics); } } } } if (allTermStats.isEmpty()) { return null; // none of the terms were found, we won't use sim at all } else { return similarity.computeWeight( boost, searcher.collectionStatistics(field), allTermStats.toArray(new TermStatistics[allTermStats.size()])); } }
@Override public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { final IndexReaderContext context = searcher.getTopReaderContext(); final TermContext termState; if (perReaderTermState == null || perReaderTermState.wasBuiltFor(context) == false) { if (needsScores) { // make TermQuery single-pass if we don't have a PRTS or if the context // differs! termState = TermContext.build(context, term); } else { // do not compute the term state, this will help save seeks in the terms // dict on segments that have a cache entry for this query termState = null; } } else { // PRTS was pre-build for this IS termState = this.perReaderTermState; } return new TermWeight(searcher, needsScores, boost, termState); }
ConcurrentHashMap<String,DocValuesType> fieldTypes = new ConcurrentHashMap<>(); IndexSearcher searcher = indexReference.getSearcher(); List<LeafReaderContext> leaves = searcher.getTopReaderContext().leaves(); for ( LeafReaderContext leafReaderContext : leaves )
@Override protected Similarity.SimWeight getStats(IndexSearcher searcher) throws IOException { final int[] positions = PhraseQuery.this.getPositions(); if (positions.length < 2) { throw new IllegalStateException("PhraseWeight does not support less than 2 terms, call rewrite first"); } else if (positions[0] != 0) { throw new IllegalStateException("PhraseWeight requires that the first position is 0, call rewrite first"); } final IndexReaderContext context = searcher.getTopReaderContext(); states = new TermContext[terms.length]; TermStatistics termStats[] = new TermStatistics[terms.length]; int termUpTo = 0; for (int i = 0; i < terms.length; i++) { final Term term = terms[i]; states[i] = TermContext.build(context, term); if (needsScores) { TermStatistics termStatistics = searcher.termStatistics(term, states[i]); if (termStatistics != null) { termStats[termUpTo++] = termStatistics; } } } if (termUpTo > 0) { return similarity.computeWeight(boost, searcher.collectionStatistics(field), ArrayUtil.copyOfSubArray(termStats, 0, termUpTo)); } else { return null; // no terms at all, we won't use similarity } }
SynonymWeight(Query query, IndexSearcher searcher, float boost) throws IOException { super(query); CollectionStatistics collectionStats = searcher.collectionStatistics(terms[0].field()); long docFreq = 0; long totalTermFreq = 0; termContexts = new TermContext[terms.length]; for (int i = 0; i < termContexts.length; i++) { termContexts[i] = TermContext.build(searcher.getTopReaderContext(), terms[i]); TermStatistics termStats = searcher.termStatistics(terms[i], termContexts[i]); docFreq = Math.max(termStats.docFreq(), docFreq); if (termStats.totalTermFreq() == -1) { totalTermFreq = -1; } else if (totalTermFreq != -1) { totalTermFreq += termStats.totalTermFreq(); } } TermStatistics pseudoStats = new TermStatistics(null, docFreq, totalTermFreq); this.similarity = searcher.getSimilarity(true); this.simWeight = similarity.computeWeight(boost, collectionStats, pseudoStats); }
BooleanQuery.Builder bq = new BooleanQuery.Builder(); for (TermAndState t : matchingTerms) { final TermContext termContext = new TermContext(searcher.getTopReaderContext()); termContext.register(t.state, context.ord, t.docFreq, t.totalTermFreq); bq.add(new TermQuery(new Term(t.field, t.term), termContext), Occur.SHOULD);
final TermContext termContext = new TermContext(searcher.getTopReaderContext()); termContext.register(t.state, context.ord, t.docFreq, t.totalTermFreq); bq.add(new TermQuery(new Term(query.field, t.term), termContext), Occur.SHOULD);
return new TermQuery(term).createWeight(searcher, needsScores, boost); final TermContext termStates = TermContext.build(searcher.getTopReaderContext(), term); final CollectionStatistics collectionStats = searcher.collectionStatistics(term.field()); final TermStatistics termStats = searcher.termStatistics(term, termStates);
@Override public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { IndexReaderContext context = searcher.getTopReaderContext(); Map<Integer,TermContext> termStates = new HashMap<>(); for (Map.Entry<BytesRef,Integer> ent : termToID.entrySet()) { if (ent.getKey() != null) { termStates.put(ent.getValue(), TermContext.build(context, new Term(field, ent.getKey()))); } } return new TermAutomatonWeight(det, searcher, termStates, boost); }
@Override public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { TermContext context = TermContext.build(searcher.getTopReaderContext(), term); return new PayloadTermWeight(context, searcher, needsScores ? Collections.singletonMap(term, context) : null); }
@Override public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { final TermContext context; final IndexReaderContext topContext = searcher.getTopReaderContext(); if (termContext == null || termContext.topReaderContext != topContext) { context = TermContext.build(topContext, term); } else { context = termContext; } return new SpanTermWeight(context, searcher, needsScores ? Collections.singletonMap(term, context) : null); }
@Override public Weight createWeight(IndexSearcher searcher) throws IOException { final IndexReaderContext context = searcher.getTopReaderContext(); final TermContext termState = TermContext.build(context, term); return new ImageHashWeight(searcher, termState); }
@Override public Weight createWeight(final IndexSearcher searcher) throws IOException { final IndexReaderContext context = searcher.getTopReaderContext(); final TermContext termState = TermContext.build(context, term); return new TermSpanWeight(searcher, termState); }
@Override public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { final TermContext context; final IndexReaderContext topContext = searcher.getTopReaderContext(); if (termContext == null || termContext.wasBuiltFor(topContext) == false) { context = TermContext.build(topContext, term); } else { context = termContext; } return new SpanTermWeight(context, searcher, needsScores ? Collections.singletonMap(term, context) : null); }
@Override public SpanWeight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { final TermContext context; final IndexReaderContext topContext = searcher.getTopReaderContext(); if (termContext == null || termContext.wasBuiltFor(topContext) == false) { context = TermContext.build(topContext, term); } else { context = termContext; } return new SpanTermWeight(context, searcher, needsScores ? Collections.singletonMap(term, context) : null, boost); }
@Override public Weight createWeight(final IndexSearcher searcher, final boolean needsScores, final float boost) throws IOException { final DocumentFrequencyCorrection.DocumentFrequencyAndTermContext dftc = dftcp.getDocumentFrequencyAndTermContext(tqIndex, searcher.getTopReaderContext()); if (dftc.df < 1) { return new NeverMatchWeight(); } return new TermWeight(searcher, needsScores, boost, dftc.termContext); }
protected Scorer getScorer(final Query query) throws IOException { final Weight weight = searcher.createNormalizedWeight(query); assertTrue(searcher.getTopReaderContext() instanceof AtomicReaderContext); final AtomicReaderContext context = (AtomicReaderContext) searcher.getTopReaderContext(); Scorer scorer = weight.scorer(context, context.reader().getLiveDocs()); return scorer; }
protected Scorer getScorer(final Query query) throws IOException { final Weight weight = searcher.createNormalizedWeight(query); assertTrue(searcher.getTopReaderContext() instanceof AtomicReaderContext); final AtomicReaderContext context = (AtomicReaderContext) searcher.getTopReaderContext(); return weight.scorer(context, true, true, context.reader().getLiveDocs()); }