if (reader.hasDeletions()) { final List<LeafReaderContext> leaves = reader.leaves(); final int size = leaves.size();
} else if (query instanceof TermQuery && reader.hasDeletions() == false) { Term term = ((TermQuery) query).getTerm(); int count = 0;
protected void init(IndexReader reader) { this.reader = reader; if (reader.hasDeletions()) { throw new UnsupportedOperationException("The index has to be optimized first to be cached! Use IndexWriter.forceMerge(0) to do this.");
protected void init(IndexReader reader) { this.reader = reader; if (reader.hasDeletions()) { throw new UnsupportedOperationException("The index has to be optimized first to be cached! Use IndexWriter.forceMerge(0) to do this.");
/** * Returns true if a {@link SortedDocsProducer} should be used to optimize the execution. */ protected boolean checkIfSortedDocsIsApplicable(IndexReader reader, MappedFieldType fieldType) { if (fieldType == null || missing != null || (missingBucket && afterValue == null) || fieldType.indexOptions() == IndexOptions.NONE || // inverse of the natural order reverseMul == -1) { return false; } if (reader.hasDeletions() && (reader.numDocs() == 0 || (double) reader.numDocs() / (double) reader.maxDoc() < 0.5)) { // do not use the index if it has more than 50% of deleted docs return false; } return true; } }
} else if (query.getClass() == TermQuery.class && reader.hasDeletions() == false) { final Term term = ((TermQuery) query).getTerm(); int count = 0;
if (reader.hasDeletions() && !liveDocs.get(i)) continue; // if it is deleted, just ignore it.
int numDuplicates = 0; for (int i = 0; i < docs; i++) { if (reader.hasDeletions() && !liveDocs.get(i)) continue; // if it is deleted, just ignore it.
int numDuplicates = 0; for (int i = 0; i < docs; i++) { if (reader.hasDeletions() && !liveDocs.get(i)) continue; // if it is deleted, just ignore it.
int numDuplicates = 0; for (int i = 0; i < docs; i++) { if (reader.hasDeletions() && !liveDocs.get(i)) continue; // if it is deleted, just ignore it.
int numDuplicates = 0; for (int i = 0; i < docs; i++) { if (reader.hasDeletions() && !liveDocs.get(i)) continue; // if it is deleted, just ignore it.
protected void init() { // put all respective features into an in-memory cache ... if (isCaching && reader != null) { Bits liveDocs = MultiFields.getLiveDocs(reader); int docs = reader.numDocs(); featureCache = new LinkedHashMap<Integer, byte[]>(docs); try { Document d; for (int i = 0; i < docs; i++) { if (!(reader.hasDeletions() && !liveDocs.get(i))) { d = reader.document(i); if (d.getField(fieldName) != null) { cachedInstance.setByteArrayRepresentation(d.getField(fieldName).binaryValue().bytes, d.getField(fieldName).binaryValue().offset, d.getField(fieldName).binaryValue().length); // featureCache.put(i, new SearchItem(cachedInstance.getByteArrayRepresentation(), new SimpleResult(-1d, i, d.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]))); // featureCache.put(i, new SearchItem(i, cachedInstance.getByteArrayRepresentation(), d.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0])); featureCache.put(i, cachedInstance.getByteArrayRepresentation()); } } } } catch (IOException e) { e.printStackTrace(); } } }
protected void init() { // put all respective features into an in-memory cache ... if (isCaching && reader != null) { Bits liveDocs = MultiFields.getLiveDocs(reader); int docs = reader.numDocs(); featureCache = new LinkedHashMap<Integer, byte[]>(docs); try { Document d; for (int i = 0; i < docs; i++) { if (!(reader.hasDeletions() && !liveDocs.get(i))) { d = reader.document(i); if (d.getField(fieldName) != null) { cachedInstance.setByteArrayRepresentation(d.getField(fieldName).binaryValue().bytes, d.getField(fieldName).binaryValue().offset, d.getField(fieldName).binaryValue().length); // featureCache.put(i, new SearchItem(cachedInstance.getByteArrayRepresentation(), new SimpleResult(-1d, i, d.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]))); // featureCache.put(i, new SearchItem(i, cachedInstance.getByteArrayRepresentation(), d.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0])); featureCache.put(i, cachedInstance.getByteArrayRepresentation()); } } } } catch (IOException e) { e.printStackTrace(); } } }
protected void init() { // put all respective features into an in-memory cache ... if (isCaching && reader != null) { Bits liveDocs = MultiFields.getLiveDocs(reader); int docs = reader.numDocs(); featureCache = new LinkedHashMap<Integer, SearchItemForEvaluation>(docs); try { Document d; for (int i = 0; i < docs; i++) { if (!(reader.hasDeletions() && !liveDocs.get(i))) { d = reader.document(i); cachedInstance.setByteArrayRepresentation(d.getField(fieldName).binaryValue().bytes, d.getField(fieldName).binaryValue().offset, d.getField(fieldName).binaryValue().length); featureCache.put(i, new SearchItemForEvaluation(cachedInstance.getByteArrayRepresentation(), new SimpleResultForEvaluation(-1d, i, d.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]))); } } } catch (IOException e) { e.printStackTrace(); } } }
protected void init() { // put all respective features into an in-memory cache ... if (isCaching && reader != null) { Bits liveDocs = MultiFields.getLiveDocs(reader); int docs = reader.numDocs(); featureCache = new LinkedHashMap<Integer, SearchItemForEvaluation>(docs); try { Document d; for (int i = 0; i < docs; i++) { if (!(reader.hasDeletions() && !liveDocs.get(i))) { d = reader.document(i); cachedInstance.setByteArrayRepresentation(d.getField(fieldName).binaryValue().bytes, d.getField(fieldName).binaryValue().offset, d.getField(fieldName).binaryValue().length); featureCache.put(i, new SearchItemForEvaluation(cachedInstance.getByteArrayRepresentation(), new SimpleResultForEvaluation(-1d, i, d.getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]))); } } } catch (IOException e) { e.printStackTrace(); } } }
private void testSearchSpeed(Class<? extends GlobalFeature> featureClass) throws IOException { ParallelIndexer parallelIndexer = new ParallelIndexer(DocumentBuilder.NUM_OF_THREADS, indexPath, testExtensive, true); parallelIndexer.addExtractor(featureClass); parallelIndexer.run(); IndexReader reader = DirectoryReader.open(new RAMDirectory(FSDirectory.open(Paths.get(indexPath)), IOContext.READONCE)); Bits liveDocs = MultiFields.getLiveDocs(reader); double queryCount = 0d; ImageSearcher searcher = new GenericFastImageSearcher(100, featureClass); long ms = System.currentTimeMillis(); String fileName; Document queryDoc; ImageSearchHits hits; for (int i = 0; i < reader.maxDoc(); i++) { if (reader.hasDeletions() && !liveDocs.get(i)) continue; // if it is deleted, just ignore it. fileName = getIDfromFileName(reader.document(i).getValues(DocumentBuilder.FIELD_NAME_IDENTIFIER)[0]); if (queries.keySet().contains(fileName)) { queryCount += 1d; // ok, we've got a query here for a document ... queryDoc = reader.document(i); hits = searcher.search(queryDoc, reader); } } ms = System.currentTimeMillis() - ms; System.out.printf("%s \t %3.1f \n", featureClass.getName().substring(featureClass.getName().lastIndexOf('.') + 1), (double) ms / queryCount); }
try { int counter = 0; while ((reader.hasDeletions() && !liveDocs.get(counter))&&(counter<docs)){ counter++; if (!(reader.hasDeletions() && !liveDocs.get(i))) { d = reader.document(i); if (d.getField(fieldName) !=null) {
try { int counter = 0; while ((reader.hasDeletions() && !liveDocs.get(counter))&&(counter<docs)){ counter++; if (!(reader.hasDeletions() && !liveDocs.get(i))) { d = reader.document(i); if (d.getField(fieldName) !=null) {
try { int counter = 0; while ((reader.hasDeletions() && !liveDocs.get(counter))&&(counter<docs)){ counter++; if (!(reader.hasDeletions() && !liveDocs.get(i))) { d = reader.document(i); cachedInstance.setByteArrayRepresentation(d.getField(fieldName).binaryValue().bytes, d.getField(fieldName).binaryValue().offset, d.getField(fieldName).binaryValue().length);
try { int counter = 0; while ((reader.hasDeletions() && !liveDocs.get(counter))&&(counter<docs)){ counter++; if (!(reader.hasDeletions() && !liveDocs.get(i))) { d = reader.document(i); cachedInstance.setByteArrayRepresentation(d.getField(fieldName).binaryValue().bytes, d.getField(fieldName).binaryValue().offset, d.getField(fieldName).binaryValue().length);