if (!(((SolrIndexReader) rb.req.getSearcher().getIndexReader()).getWrappedReader() instanceof lucandra.IndexReader)) return false; throw new IOException("Missing core name"); IndexReader reader = (IndexReader) ((SolrIndexReader) rb.req.getSearcher().getIndexReader()).getWrappedReader(); .getWrappedReader();
rb.req.getSearcher().getReader().document(docIds.get(0), selector);
public NamedList getStatistics() { NamedList lst = new SimpleOrderedMap(); lst.add("searcherName", name); lst.add("caching", cachingEnabled); lst.add("numDocs", reader.numDocs()); lst.add("maxDoc", reader.maxDoc()); lst.add("reader", reader.toString()); lst.add("readerDir", reader.directory()); lst.add("indexVersion", reader.getVersion()); lst.add("openedAt", new Date(openTime)); if (registerTime!=0) lst.add("registeredAt", new Date(registerTime)); lst.add("warmupTime", warmupTime); return lst; }
private static void buildInfoMap(SolrIndexReader other, HashMap<IndexReader, SolrReaderInfo> map) { if (other == null) return; map.put(other.getWrappedReader(), other.info); SolrIndexReader[] readers = other.getSequentialSubReaders(); if (readers == null) return; for (SolrIndexReader r : readers) { buildInfoMap(r, map); } }
/** * Given a field name and an IndexReader, get a random hash seed. * Using dynamic fields, you can force the random order to change */ private static int getSeed(String fieldName, IndexReader r) { SolrIndexReader top = (SolrIndexReader)r; int base=0; while (top.getParent() != null) { base += top.getBase(); top = top.getParent(); } // calling getVersion() on a segment will currently give you a null pointer exception, so // we use the top-level reader. return fieldName.hashCode() + base + (int)top.getVersion(); }
private static void setInfo(SolrIndexReader target, HashMap<IndexReader, SolrReaderInfo> map) { SolrReaderInfo info = map.get(target.getWrappedReader()); if (info == null) info = new SolrReaderInfo(target.getWrappedReader()); target.info = info; SolrIndexReader[] readers = target.getSequentialSubReaders(); if (readers == null) return; for (SolrIndexReader r : readers) { setInfo(r, map); } }
/** Recursively wrap an IndexReader in SolrIndexReader instances. * @param in the reader to wrap * @param parent the parent, if any (null if none) * @param base the docid offset in the parent (0 if top level) */ public SolrIndexReader(IndexReader in, SolrIndexReader parent, int base) { super(in); assert(!(in instanceof SolrIndexReader)); this.parent = parent; this.base = base; IndexReader subs[] = in.getSequentialSubReaders(); if (subs != null) { subReaders = new SolrIndexReader[subs.length]; int numLeaves = subs.length; leafOffsets = new int[numLeaves]; int b=0; for (int i=0; i<subReaders.length; i++) { SolrIndexReader sir = subReaders[i] = new SolrIndexReader(subs[i], this, b); leafOffsets[i] = b; b += sir.maxDoc(); IndexReader subLeaves[] = sir.leafReaders; numLeaves += subLeaves.length - 1; // subtract 1 for the parent } leafReaders = getLeaves(numLeaves); } else { subReaders = null; leafReaders = new SolrIndexReader[]{this}; leafOffsets = zeroIntArray; } }
private static SolrIndexReader wrap(IndexReader r) { SolrIndexReader sir; // wrap the reader if (!(r instanceof SolrIndexReader)) { sir = new SolrIndexReader(r, null, 0); sir.associateInfo(null); } else { sir = (SolrIndexReader)r; } return sir; }
/** Copies SolrReaderInfo instances from the source to this SolrIndexReader */ public void associateInfo(SolrIndexReader source) { // seemed safer to not mess with reopen() but simply set // one set of caches from another reader tree. HashMap<IndexReader, SolrReaderInfo> map = new HashMap<IndexReader, SolrReaderInfo>(); buildInfoMap(source, map); setInfo(this, map); }
@Override public int docFreq(Term t) throws IOException { ensureOpen(); return in.docFreq(t); }
/** * Calculates a tag for the ETag header. * * @param solrReq * @return a tag */ public static String calcEtag(final SolrQueryRequest solrReq) { final SolrCore core = solrReq.getCore(); final long currentIndexVersion = solrReq.getSearcher().getReader().getVersion(); EtagCacheVal etagCache = etagCoreCache.get(core); if (null == etagCache) { final String etagSeed = core.getSolrConfig().getHttpCachingConfig().getEtagSeed(); etagCache = new EtagCacheVal(etagSeed); etagCoreCache.put(core, etagCache); } return etagCache.calcEtag(currentIndexVersion); }
/** * Calculate the appropriate last-modified time for Solr relative the current request. * * @param solrReq * @return the timestamp to use as a last modified time. */ public static long calcLastModified(final SolrQueryRequest solrReq) { final SolrCore core = solrReq.getCore(); final SolrIndexSearcher searcher = solrReq.getSearcher(); final LastModFrom lastModFrom = core.getSolrConfig().getHttpCachingConfig().getLastModFrom(); long lastMod; try { // assume default, change if needed (getOpenTime() should be fast) lastMod = LastModFrom.DIRLASTMOD == lastModFrom ? IndexReader.lastModified(searcher.getReader().directory()) : searcher.getOpenTime(); } catch (IOException e) { // we're pretty freaking screwed if this happens throw new SolrException(ErrorCode.SERVER_ERROR, e); } // Get the time where the searcher has been opened // We get rid of the milliseconds because the HTTP header has only // second granularity return lastMod - (lastMod % 1000L); }
public void collect(int doc, float score) { try { searcher.getReader().deleteDocument(doc); deleted++; } catch (IOException e) { // don't try to close the searcher on failure for now... // try { closeSearcher(); } catch (Exception ee) { SolrException.log(log,ee); } throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,"Error deleting doc# "+doc,e,false); } } }
/** * Free's resources associated with this searcher. * * In particular, the underlying reader and any cache's in use are closed. */ public void close() throws IOException { if (cachingEnabled) { StringBuilder sb = new StringBuilder(); sb.append("Closing ").append(name); for (SolrCache cache : cacheList) { sb.append("\n\t"); sb.append(cache); } log.info(sb.toString()); } else { log.debug("Closing " + name); } core.getInfoRegistry().remove(name); // super.close(); // can't use super.close() since it just calls reader.close() and that may only be called once // per reader (even if incRef() was previously called). if (closeReader) reader.decRef(); for (SolrCache cache : cacheList) { cache.close(); } }
@Override public SolrIndexReader reopen(boolean openReadOnly) throws IOException { IndexReader r = in.reopen(openReadOnly); if (r == in) { return this; } SolrIndexReader sr = new SolrIndexReader(r, null, 0); sr.associateInfo(this); return sr; }
@Override public TermPositions termPositions() throws IOException { ensureOpen(); return in.termPositions(); }
@Override public TermDocs termDocs() throws IOException { ensureOpen(); return in.termDocs(); }
@Override public TermDocs termDocs(Term term) throws IOException { ensureOpen(); return in.termDocs(term); }