/** * Asserts that the doc in the index operation really doesn't exist */ private boolean assertDocDoesNotExist(final Index index, final boolean allowDeleted) throws IOException { // NOTE this uses direct access to the version map since we are in the assertion code where we maintain a secondary // map in the version map such that we don't need to refresh if we are unsafe; final VersionValue versionValue = versionMap.getVersionForAssert(index.uid().bytes()); if (versionValue != null) { if (versionValue.isDelete() == false || allowDeleted == false) { throw new AssertionError("doc [" + index.type() + "][" + index.id() + "] exists in version map (version " + versionValue + ")"); } } else { try (Searcher searcher = acquireSearcher("assert doc doesn't exist", SearcherScope.INTERNAL)) { final long docsWithId = searcher.searcher().count(new TermQuery(index.uid())); if (docsWithId > 0) { throw new AssertionError("doc [" + index.type() + "][" + index.id() + "] exists [" + docsWithId + "] times in index"); } } } return true; }
public SignificantTextAggregatorFactory(String name, IncludeExclude includeExclude, QueryBuilder filterBuilder, TermsAggregator.BucketCountThresholds bucketCountThresholds, SignificanceHeuristic significanceHeuristic, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, String fieldName, String [] sourceFieldNames, boolean filterDuplicateText, Map<String, Object> metaData) throws IOException { super(name, context, parent, subFactoriesBuilder, metaData); // Note that if the field is unmapped (its field type is null), we don't fail, // and just use the given field name as a placeholder. this.fieldType = context.getQueryShardContext().fieldMapper(fieldName); this.indexedFieldName = fieldType != null ? fieldType.name() : fieldName; this.sourceFieldNames = sourceFieldNames == null ? new String[] { indexedFieldName } : sourceFieldNames; this.includeExclude = includeExclude; this.filter = filterBuilder == null ? null : filterBuilder.toQuery(context.getQueryShardContext()); this.filterDuplicateText = filterDuplicateText; IndexSearcher searcher = context.searcher(); // Important - need to use the doc count that includes deleted docs // or we have this issue: https://github.com/elastic/elasticsearch/issues/7951 this.supersetNumDocs = filter == null ? searcher.getIndexReader().maxDoc() : searcher.count(filter); this.bucketCountThresholds = bucketCountThresholds; this.significanceHeuristic = significanceHeuristic; }
public int count(Query q) { try { return searcher.count(q); } catch(IOException ex){ throw Throwables.propagate(ex); } }
public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> config, IncludeExclude includeExclude, String executionHint, QueryBuilder filterBuilder, TermsAggregator.BucketCountThresholds bucketCountThresholds, SignificanceHeuristic significanceHeuristic, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); if (!config.unmapped()) { this.fieldType = config.fieldContext().fieldType(); this.indexedFieldName = fieldType.name(); } this.includeExclude = includeExclude; this.executionHint = executionHint; this.filter = filterBuilder == null ? null : filterBuilder.toFilter(context.getQueryShardContext()); IndexSearcher searcher = context.searcher(); this.supersetNumDocs = filter == null // Important - need to use the doc count that includes deleted docs // or we have this issue: https://github.com/elastic/elasticsearch/issues/7951 ? searcher.getIndexReader().maxDoc() : searcher.count(filter); this.bucketCountThresholds = bucketCountThresholds; this.significanceHeuristic = significanceHeuristic; }
@Override public boolean hasNext() { if (nrOfAvailableTimeSeries == -1) { try { nrOfAvailableTimeSeries = searcher.count(query); } catch (IOException e) { LOGGER.error("Could not count the found documents", e); } } return currentDocumentCount < nrOfAvailableTimeSeries; }
/** * This searches for exact phrases. * @param term * @return */ public int count(String term) throws IOException { Query query = new TermQuery(new Term("body", term)); return searcher.count(query); }
protected int countCollatedMatches(final SuggestWord[] suggestion, final IndexSearcher searcher) { org.apache.lucene.search.BooleanQuery.Builder builder = new org.apache.lucene.search.BooleanQuery.Builder(); for (final SuggestWord word : suggestion) { builder.add(new org.apache.lucene.search.BooleanClause( new TermQuery(new org.apache.lucene.index.Term(dictionaryField, word.string)), org.apache.lucene.search.BooleanClause.Occur.FILTER)); } try { return searcher.count(builder.build()); } catch (final IOException e) { throw new RuntimeException(e); } }
/** * Counts the total number of documents for a given query. * @param appid appid * @param query a query * @return total docs found in index */ public static int count(String appid, Query query) { if (StringUtils.isBlank(appid) || query == null) { return 0; } DirectoryReader ireader = null; try { ireader = getIndexReader(appid); if (ireader != null) { IndexSearcher isearcher = new IndexSearcher(ireader); return isearcher.count(query); } } catch (Exception e) { logger.error(null, e); } finally { closeIndexReader(ireader); } return 0; }
private void applyDocumentVersionRetentionPolicy(IndexWriter w) throws Throwable { IndexWriter wr = this.writer; if (wr == null) { return; } applyMemoryLimit(); Operation dummyDelete = Operation.createDelete(null); int count = 0; Iterator<Entry<String, Long>> it = this.selfLinksRequiringRetentionLimit.entrySet() .iterator(); while (it.hasNext()) { Entry<String, Long> e = it.next(); Query linkQuery = new TermQuery(new Term(ServiceDocument.FIELD_NAME_SELF_LINK, e.getKey())); int documentCount = this.searcher.count(linkQuery); int pastRetentionLimitVersions = (int) (documentCount - e.getValue()); if (pastRetentionLimitVersions <= 0) { continue; } it.remove(); // trim durable index for this link deleteDocumentsFromIndex(dummyDelete, e.getKey(), this.selfLinks.get(e.getKey()), e.getValue()); count++; } if (!this.selfLinksRequiringRetentionLimit.isEmpty()) { logInfo("Applied retention policy to %d links", count); } }
protected HighlightsMatch doMatch(String queryId, Query query) throws IOException { IndexSearcher searcher = docs.getSearcher(); if (searcher.count(query) == 0) return null; try { Query rewritten = rewriter.rewrite(query, searcher); return findHighlights(queryId, rewritten); } catch (RewriteException e) { return fallback(queryId, query, e); } }
/** * Asserts that the doc in the index operation really doesn't exist */ private boolean assertDocDoesNotExist(final Index index, final boolean allowDeleted) throws IOException { //final VersionValue versionValue = versionMap.getUnderLock(index.uid()); final VersionValue versionValue = null; if (versionValue != null) { if (versionValue.isDelete() == false || allowDeleted == false) { throw new AssertionError("doc [" + index.type() + "][" + index.id() + "] exists in version map (version " + versionValue + ")"); } } else { try (Searcher searcher = acquireSearcher("assert doc doesn't exist")) { final long docsWithId = searcher.searcher().count(new TermQuery(index.uid())); if (docsWithId > 0) { throw new AssertionError("doc [" + index.type() + "][" + index.id() + "] exists [" + docsWithId + "] times in index"); } } } return true; }
/** * Asserts that the doc in the index operation really doesn't exist */ private boolean assertDocDoesNotExist(final Index index, final boolean allowDeleted) throws IOException { final VersionValue versionValue = versionMap.getUnderLock(index.uid()); if (versionValue != null) { if (versionValue.isDelete() == false || allowDeleted == false) { throw new AssertionError("doc [" + index.type() + "][" + index.id() + "] exists in version map (version " + versionValue + ")"); } } else { try (Searcher searcher = acquireSearcher("assert doc doesn't exist")) { final long docsWithId = searcher.searcher().count(new TermQuery(index.uid())); if (docsWithId > 0) { throw new AssertionError("doc [" + index.type() + "][" + index.id() + "] exists [" + docsWithId + "] times in index"); } } } return true; }
/** * Asserts that the doc in the index operation really doesn't exist */ private boolean assertDocDoesNotExist(final Index index, final boolean allowDeleted) throws IOException { // NOTE this uses direct access to the version map since we are in the assertion code where we maintain a secondary // map in the version map such that we don't need to refresh if we are unsafe; final VersionValue versionValue = versionMap.getVersionForAssert(index.uid().bytes()); if (versionValue != null) { if (versionValue.isDelete() == false || allowDeleted == false) { throw new AssertionError("doc [" + index.type() + "][" + index.id() + "] exists in version map (version " + versionValue + ")"); } } else { try (Searcher searcher = acquireSearcher("assert doc doesn't exist", SearcherScope.INTERNAL)) { final long docsWithId = searcher.searcher().count(new TermQuery(index.uid())); if (docsWithId > 0) { throw new AssertionError("doc [" + index.type() + "][" + index.id() + "] exists [" + docsWithId + "] times in index"); } } } return true; }
response.documentCount = (long) searcher.count(termQuery); long queryTimeMicros = Utils.getNowMicrosUtc() - queryStartTimeMicros; response.queryTimeMicros = queryTimeMicros;
private void buildFacetQueries(final LinkedHashMap<String, AbstractQuery> queries, final FacetBuilder facetBuilder) throws Exception { final BiConsumerEx<String, AbstractQuery, Exception> consumer = (name, facetQuery) -> { final BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add(searchQuery, BooleanClause.Occur.FILTER); builder.add(facetQuery.getQuery(queryContext), BooleanClause.Occur.FILTER); facetBuilder.put(new LabelAndValue(name, queryContext.indexSearcher.count(builder.build()))); }; ConcurrentUtils.forEachEx(queries, consumer); }
public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> config, IncludeExclude includeExclude, String executionHint, QueryBuilder filterBuilder, TermsAggregator.BucketCountThresholds bucketCountThresholds, SignificanceHeuristic significanceHeuristic, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); this.includeExclude = includeExclude; this.executionHint = executionHint; this.filter = filterBuilder == null ? null : filterBuilder.toFilter(context.getQueryShardContext()); IndexSearcher searcher = context.searcher(); this.supersetNumDocs = filter == null // Important - need to use the doc count that includes deleted docs // or we have this issue: https://github.com/elastic/elasticsearch/issues/7951 ? searcher.getIndexReader().maxDoc() : searcher.count(filter); this.bucketCountThresholds = bucketCountThresholds; this.significanceHeuristic = significanceHeuristic; setFieldInfo(context); }
public SignificantTextAggregatorFactory(String name, IncludeExclude includeExclude, QueryBuilder filterBuilder, TermsAggregator.BucketCountThresholds bucketCountThresholds, SignificanceHeuristic significanceHeuristic, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, String fieldName, String [] sourceFieldNames, boolean filterDuplicateText, Map<String, Object> metaData) throws IOException { super(name, context, parent, subFactoriesBuilder, metaData); // Note that if the field is unmapped (its field type is null), we don't fail, // and just use the given field name as a placeholder. this.fieldType = context.getQueryShardContext().fieldMapper(fieldName); this.indexedFieldName = fieldType != null ? fieldType.name() : fieldName; this.sourceFieldNames = sourceFieldNames == null ? new String[] { indexedFieldName } : sourceFieldNames; this.includeExclude = includeExclude; this.filter = filterBuilder == null ? null : filterBuilder.toQuery(context.getQueryShardContext()); this.filterDuplicateText = filterDuplicateText; IndexSearcher searcher = context.searcher(); // Important - need to use the doc count that includes deleted docs // or we have this issue: https://github.com/elastic/elasticsearch/issues/7951 this.supersetNumDocs = filter == null ? searcher.getIndexReader().maxDoc() : searcher.count(filter); this.bucketCountThresholds = bucketCountThresholds; this.significanceHeuristic = significanceHeuristic; }
public SignificantTextAggregatorFactory(String name, IncludeExclude includeExclude, QueryBuilder filterBuilder, TermsAggregator.BucketCountThresholds bucketCountThresholds, SignificanceHeuristic significanceHeuristic, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, String fieldName, String [] sourceFieldNames, boolean filterDuplicateText, Map<String, Object> metaData) throws IOException { super(name, context, parent, subFactoriesBuilder, metaData); // Note that if the field is unmapped (its field type is null), we don't fail, // and just use the given field name as a placeholder. this.fieldType = context.getQueryShardContext().fieldMapper(fieldName); this.indexedFieldName = fieldType != null ? fieldType.name() : fieldName; this.sourceFieldNames = sourceFieldNames == null ? new String[] { indexedFieldName } : sourceFieldNames; this.includeExclude = includeExclude; this.filter = filterBuilder == null ? null : filterBuilder.toQuery(context.getQueryShardContext()); this.filterDuplicateText = filterDuplicateText; IndexSearcher searcher = context.searcher(); // Important - need to use the doc count that includes deleted docs // or we have this issue: https://github.com/elastic/elasticsearch/issues/7951 this.supersetNumDocs = filter == null ? searcher.getIndexReader().maxDoc() : searcher.count(filter); this.bucketCountThresholds = bucketCountThresholds; this.significanceHeuristic = significanceHeuristic; }
public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> config, IncludeExclude includeExclude, String executionHint, QueryBuilder filterBuilder, TermsAggregator.BucketCountThresholds bucketCountThresholds, SignificanceHeuristic significanceHeuristic, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); if (!config.unmapped()) { this.fieldType = config.fieldContext().fieldType(); this.indexedFieldName = fieldType.name(); } this.includeExclude = includeExclude; this.executionHint = executionHint; this.filter = filterBuilder == null ? null : filterBuilder.toFilter(context.getQueryShardContext()); IndexSearcher searcher = context.searcher(); this.supersetNumDocs = filter == null // Important - need to use the doc count that includes deleted docs // or we have this issue: https://github.com/elastic/elasticsearch/issues/7951 ? searcher.getIndexReader().maxDoc() : searcher.count(filter); this.bucketCountThresholds = bucketCountThresholds; this.significanceHeuristic = significanceHeuristic; }
public SignificantTermsAggregatorFactory(String name, ValuesSourceConfig<ValuesSource> config, IncludeExclude includeExclude, String executionHint, QueryBuilder filterBuilder, TermsAggregator.BucketCountThresholds bucketCountThresholds, SignificanceHeuristic significanceHeuristic, SearchContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); if (!config.unmapped()) { this.fieldType = config.fieldContext().fieldType(); this.indexedFieldName = fieldType.name(); } this.includeExclude = includeExclude; this.executionHint = executionHint; this.filter = filterBuilder == null ? null : filterBuilder.toFilter(context.getQueryShardContext()); IndexSearcher searcher = context.searcher(); this.supersetNumDocs = filter == null // Important - need to use the doc count that includes deleted docs // or we have this issue: https://github.com/elastic/elasticsearch/issues/7951 ? searcher.getIndexReader().maxDoc() : searcher.count(filter); this.bucketCountThresholds = bucketCountThresholds; this.significanceHeuristic = significanceHeuristic; }