/** * Create a new FilterDirectoryReader that filters a passed in DirectoryReader, * using the supplied SubReaderWrapper to wrap its subreader. * @param in the DirectoryReader to filter * @param wrapper the SubReaderWrapper to use to wrap subreaders */ public FilterDirectoryReader(DirectoryReader in, SubReaderWrapper wrapper) throws IOException { super(in.directory(), wrapper.wrap(in.getSequentialSubReaders())); this.in = in; }
final JSONObject result = new JSONObject(); result.put("current", reader.isCurrent()); result.put("disk_size", Utils.directorySize(reader.directory())); result.put("doc_count", reader.numDocs()); result.put("doc_del_count", reader.numDeletedDocs());
private static Directory getDirectory(IndexReader reader) { if (reader instanceof DirectoryReader) { return ((DirectoryReader) reader).directory(); } return null; }
private static Directory getDirectory(IndexReader reader) { if (reader instanceof DirectoryReader) { return ((DirectoryReader) reader).directory(); } return null; }
/** * Closes the provided reader and logs any exceptions. */ private void closeReader(IndexReader reader) { if (reader == null) return; if (log.isDebugEnabled()) { if (reader instanceof DirectoryReader) { log.debug("Closing index reader: " + ((DirectoryReader) reader).directory()); } } try { reader.close(); } catch (IOException e) { if (reader instanceof DirectoryReader) { log.error("Error closing reader: " + ((DirectoryReader) reader).directory(), e); } } }
/** * Create a new FilterDirectoryReader that filters a passed in DirectoryReader, * using the supplied SubReaderWrapper to wrap its subreader. * @param in the DirectoryReader to filter * @param wrapper the SubReaderWrapper to use to wrap subreaders */ public FilterDirectoryReader(DirectoryReader in, SubReaderWrapper wrapper) throws IOException { super(in.directory(), wrapper.wrap(in.getSequentialSubReaders())); this.in = in; }
/** * Create a new FilterDirectoryReader that filters a passed in DirectoryReader, * using the supplied SubReaderWrapper to wrap its subreader. * @param in the DirectoryReader to filter * @param wrapper the SubReaderWrapper to use to wrap subreaders */ public FilterDirectoryReader(DirectoryReader in, SubReaderWrapper wrapper) throws IOException { super(in.directory(), wrapper.wrap(in.getSequentialSubReaders())); this.in = in; }
public Skywalker(IndexReader reader) { this.reader = reader; this.dirImpl = "N/A"; this.version = "-1"; this.formatDetails = new FormatDetails("N/A", "N/A", "N/A"); Directory dir = null; if (reader instanceof DirectoryReader) { dir = ((DirectoryReader) reader).directory(); this.dirImpl = dir.getClass().getName(); this.version = Long.toString(((DirectoryReader) reader).getVersion()); this.formatDetails = getIndexFormat(dir); } }
/** * Create a new FilterDirectoryReader that filters a passed in DirectoryReader, * using the supplied SubReaderWrapper to wrap its subreader. * @param in the DirectoryReader to filter * @param wrapper the SubReaderWrapper to use to wrap subreaders */ public FilterDirectoryReader(DirectoryReader in, SubReaderWrapper wrapper) throws IOException { super(in.directory(), wrapper.wrap(in.getSequentialSubReaders())); this.in = in; }
public void doOptimize() { IndexWriter writer = null; boolean createIndex = false; try { writer = new IndexWriter(reader.directory(), config); LogMergePolicy lmp =new LogDocMergePolicy(); lmp.setMergeFactor(this.mergeFactor); config.setMergePolicy(lmp); long timeBefore = System.currentTimeMillis(); //TODO http://blog.trifork.com/2011/11/21/simon-says-optimize-is-bad-for-you/ //writer.optimize(); long timeAfter = System.currentTimeMillis(); double numSeconds = ((timeAfter - timeBefore) * 1.0) / DOUBLE; LOG.log(Level.INFO, "LuceneCatalog: [" + this.catalogPath + "] optimized: took: [" + numSeconds + "] seconds"); } catch (IOException e) { LOG.log(Level.WARNING, "Unable to optimize lucene index: [" + catalogPath + "]: Message: " + e.getMessage()); } finally { try { writer.close(); } catch (Exception ignore) { } } }
public void doOptimize() { IndexWriter writer = null; boolean createIndex = false; try { writer = new IndexWriter(reader.directory(), config); LogMergePolicy lmp =new LogDocMergePolicy(); lmp.setMergeFactor(this.mergeFactor); config.setMergePolicy(lmp); long timeBefore = System.currentTimeMillis(); //TODO http://blog.trifork.com/2011/11/21/simon-says-optimize-is-bad-for-you/ //writer.optimize(); long timeAfter = System.currentTimeMillis(); double numSeconds = ((timeAfter - timeBefore) * 1.0) / DOUBLE; LOG.log(Level.INFO, "LuceneCatalog: [" + this.catalogPath + "] optimized: took: [" + numSeconds + "] seconds"); } catch (IOException e) { LOG.log(Level.WARNING, "Unable to optimize lucene index: [" + catalogPath + "]: Message: " + e.getMessage()); } finally { try { writer.close(); } catch (Exception ignore) { } } }
SegmentInfos sis = SegmentInfos.readLatestCommit(dr.directory()); // read infos from dir for (SegmentCommitInfo commitInfo : sis) { if (commitInfo != null) {
private void dumpIndexDir() throws IOException { Directory dir = ((DirectoryReader)getSearcher().getIndexReader()).directory(); System.out.println("================"); String[] fileNames = dir.listAll(); Arrays.sort(fileNames); for (String file : fileNames){ System.out.printf("%s - %d %n", file, dir.fileLength(file)); } releaseIndexNode(); }
time = System.currentTimeMillis(); int[] tmpHist = new int[numClusters]; IndexWriter iw = LuceneUtils.createIndexWriter(((DirectoryReader) reader).directory(), true, LuceneUtils.AnalyzerType.WhitespaceAnalyzer, 256d); if (pm != null) { // set to 50 of 100 after clustering. pm.setProgress(50);
time = System.currentTimeMillis(); int[] tmpHist = new int[numClusters]; IndexWriter iw = LuceneUtils.createIndexWriter(((DirectoryReader) reader).directory(), true, LuceneUtils.AnalyzerType.WhitespaceAnalyzer, 256d); if (pm != null) { // set to 50 of 100 after clustering. pm.setProgress(50);
IndexWriter iw = LuceneUtils.createIndexWriter(((DirectoryReader) reader).directory(), false, LuceneUtils.AnalyzerType.WhitespaceAnalyzer); for (int i = 0; i < reader.maxDoc(); i++) {
IndexWriter iw = LuceneUtils.createIndexWriter(((DirectoryReader) reader).directory(), true, LuceneUtils.AnalyzerType.WhitespaceAnalyzer); for (int i = 0; i < reader.maxDoc(); i++) {