public long getNumFound() { return ids.matches(); }
public Float getMaxScore() { return ids.maxScore(); }
public void writeDocList(DocList ids, JavaBinCodec codec) throws IOException { codec.writeTag(JavaBinCodec.SOLRDOCLST); List l = new ArrayList(3); l.add((long) ids.matches()); l.add((long) ids.offset()); Float maxScore = null; if (includeScore && ids.hasScores()) { maxScore = ids.maxScore(); } l.add(maxScore); codec.writeArray(l); int sz = ids.size(); codec.writeTag(JavaBinCodec.ARR, sz); if(searcher == null) searcher = solrQueryRequest.getSearcher(); if(schema == null) schema = solrQueryRequest.getSchema(); DocIterator iterator = ids.iterator(); for (int i = 0; i < sz; i++) { int id = iterator.nextDoc(); Document doc = searcher.doc(id, returnFields); SolrDocument sdoc = getDoc(doc); if (includeScore && ids.hasScores()) { sdoc.addField("score", iterator.score()); } codec.writeSolrDocument(sdoc); } }
int len = docList.size(); if (len < 1) // do nothing return docList; ParserChunker2MatcherProcessor pos = ParserChunker2MatcherProcessor .getInstance(); DocIterator iter = docList.iterator(); float[] syntMatchScoreArr = new float[len]; String requestExpression = req.getParamString(); List<Pair<Integer, Float>> docIdsScores = new ArrayList<Pair<Integer, Float>> (); try { for (int i=0; i<docList.size(); ++i) { int docId = iter.nextDoc(); docIDsHits[i] = docId; float maxScore = docList.maxScore(); // do not change int limit = docIdsScores.size(); int start = 0;
DocIterator iterator() { if (iterator == null) { iterator = page.iterator(); } return iterator; } };
test(results.size() <= limit); test(results.size() <= results.matches()); test((start==0 && limit>=results.matches()) ? results.size()==results.matches() : true ); test(hits.length() == results.matches()); DocList rrr2 = results.subset(start,limit); test(rrr2 == results); DocIterator iter=results.iterator(); for (int i=0; i<results.size(); i++) { test( iter.nextDoc() == hits.id(i+results.offset()) ); test(results2.size()==results.size() && results2.matches()==results.matches()); DocList results3 = req.getSearcher().getDocList(query,query,null,start,limit); test(results3.size()==results.size() && results3.matches()==results.matches()); test( both.docList.matches() == both.docSet.size() ); test( (start==0 && both.docSet.size() <= limit) ? both.docSet.equals(both.docList) : true); test( both2.docList.size() == 0 ); test( both2.docList.matches() == 0 ); test( both2.docSet.size() == 0 ); test( res.size() >= results.size() ); test( res.intersection(filter).equals(both.docSet));
private MLTResult expandQueryAndReExecute(SolrQueryResponse rsp, SolrParams params, int maxDocumentsToMatch, int flags, String q, Query seedQuery, SortSpec sortSpec, List<Query> targetFqFilters, List<Query> mltFqFilters, SolrIndexSearcher searcher, UnsupervisedFeedbackHelper uff, int start, int rows) throws IOException, SyntaxError { boolean includeMatch = params.getBool(UnsupervisedFeedbackParams.MATCH_INCLUDE, true); int matchOffset = params.getInt(UnsupervisedFeedbackParams.MATCH_OFFSET, 0); // Find the base match DocList match = searcher.getDocList(seedQuery, targetFqFilters, null, matchOffset, maxDocumentsToMatch, flags); // only get the first one... if(match.matches() == 0){ throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, String.format("Unsupervised feedback handler was unable to find any documents matching the seed query: '%s'.", q)); } if (includeMatch) { rsp.add("match", match); } // This is an iterator, but we only handle the first match DocIterator iterator = match.iterator(); MLTResult mltResult = null; if (iterator.hasNext()) { // do a MoreLikeThis query for each document in results mltResult = uff.expandQueryAndReExecute(iterator, seedQuery, start, rows, mltFqFilters, flags, sortSpec.getSort()); } return mltResult; }
public int getCount() { return ids.size(); }
public void writeDocs(boolean includeScore, Set<String> fields) throws IOException { SolrIndexSearcher searcher = request.getSearcher(); DocIterator iterator = ids.iterator(); int sz = ids.size(); includeScore = includeScore && ids.hasScores(); for (int i=0; i<sz; i++) { int id = iterator.nextDoc(); Document doc = searcher.doc(id, fields); writeDoc(null, doc, fields, (includeScore ? iterator.score() : 0.0f), includeScore); } } }, fields );
/** * Returns a random set of documents from the index. Mainly for testing purposes. * * @param req * @param rsp * @throws IOException */ private void handleRandomSearch(SolrQueryRequest req, SolrQueryResponse rsp) throws IOException { SolrIndexSearcher searcher = req.getSearcher(); Query query = new MatchAllDocsQuery(); DocList docList = searcher.getDocList(query, getFilterQueries(req), Sort.RELEVANCE, 0, numberOfCandidateResults, 0); int paramRows = Math.min(req.getParams().getInt("rows", defaultNumberOfResults), docList.size()); if (docList.size() < 1) { rsp.add("Error", "No documents in index"); } else { LinkedList list = new LinkedList(); while (list.size() < paramRows) { DocList auxList = docList.subset((int) (Math.random() * docList.size()), 1); Document doc = null; for (DocIterator it = auxList.iterator(); it.hasNext(); ) { doc = searcher.doc(it.nextDoc()); } if (!list.contains(doc)) { list.add(doc); } } rsp.addResponse(list); } }
/** * Debugs the given query command. * * @param cmd the query command. */ public void debugQuery(final QueryCommand cmd, final SolrIndexSearcher.QueryResult result) { if (logger.isDebugEnabled()) { final StringBuilder builder = new StringBuilder("*:*"); for (final Query filter : cmd.getFilterList()) { builder.append(" & ").append(filter); } logger.debug(createMessage( MessageCatalog._00109_SOLR_QUERY, builder.toString(), result.getDocList().size(), result.getDocList().matches())); } } }
if ((cmd.getFlags() & GET_SCORES)==0 || superset.hasScores()) { out.docList = superset.subset(cmd.getOffset(),cmd.getLen()); out.docList = superset.subset(cmd.getOffset(),cmd.getLen()); } else { out.docList = superset.subset(cmd.getOffset(),cmd.getLen()); if (key != null && superset.size() <= queryResultMaxDocsCached && !qr.isPartialResults()) { queryResultCache.put(key, superset);
public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache, Object oldKey, Object oldVal) throws IOException { QueryResultKey key = (QueryResultKey)oldKey; int nDocs=1; // request 1 doc and let caching round up to the next window size... // unless the window size is <=1, in which case we will pick // the minimum of the number of documents requested last time and // a reasonable number such as 40. // TODO: make more configurable later... if (queryResultWindowSize<=1) { DocList oldList = (DocList)oldVal; int oldnDocs = oldList.offset() + oldList.size(); // 40 has factors of 2,4,5,10,20 nDocs = Math.min(oldnDocs,40); } int flags=NO_CHECK_QCACHE | key.nc_flags; QueryCommand qc = new QueryCommand(); qc.setQuery(key.query) .setFilterList(key.filters) .setSort(key.sort) .setLen(nDocs) .setSupersetMaxDoc(nDocs) .setFlags(flags); QueryResult qr = new QueryResult(); newSearcher.getDocListC(qr,qc); return true; } }
public PageTool(SolrQueryRequest request, SolrQueryResponse response) { String rows = request.getParams().get("rows"); if (rows != null) { results_per_page = Integer.parseInt(rows); } //TODO: Handle group by results Object docs = response.getResponse(); if (docs != null) { if (docs instanceof DocSlice) { results_found = ((DocSlice) docs).matches(); start = ((DocSlice) docs).offset(); } else if(docs instanceof ResultContext) { DocList dl = ((ResultContext) docs).getDocList(); results_found = dl.matches(); start = dl.offset(); } else if(docs instanceof SolrDocumentList) { SolrDocumentList doc_list = (SolrDocumentList) docs; results_found = doc_list.getNumFound(); start = doc_list.getStart(); } else { throw new SolrException(SolrException.ErrorCode.UNKNOWN, "Unknown response type "+docs+". Expected one of DocSlice, ResultContext or SolrDocumentList"); } } page_count = (int) Math.ceil(results_found / (double) results_per_page); current_page_number = (int) Math.ceil(start / (double) results_per_page) + (page_count > 0 ? 1 : 0); }
public long getStart() { return ids.offset(); }
int len = docList.size(); if (len < 1) // do nothing return docList; ParserChunker2MatcherProcessor pos = ParserChunker2MatcherProcessor .getInstance(); DocIterator iter = docList.iterator(); float[] syntMatchScoreArr = new float[len]; String requestExpression = req.getParamString(); List<Pair<Integer, Float>> docIdsScores = new ArrayList<Pair<Integer, Float>> (); try { for (int i=0; i<docList.size(); ++i) { int docId = iter.nextDoc(); docIDsHits[i] = docId; float maxScore = docList.maxScore(); // do not change int limit = docIdsScores.size(); int start = 0;
/** * Takes a list of docs (the doc ids actually) and a set of fields to load, * and reads them into an array of Documents. */ public void readDocs(Document[] docs, DocList ids, Set<String> fields) throws IOException { DocIterator iter = ids.iterator(); for (int i=0; i<docs.length; i++) { docs[i] = doc(iter.nextDoc(), fields); } }