private IdScrollIterator(EsClient esClient, SearchResponse scrollResponse, Function<String, I> idConverter) { this.esClient = esClient; this.scrollId = scrollResponse.getScrollId(); this.idConverter = idConverter; Collections.addAll(hits, scrollResponse.getHits().getHits()); }
@Override public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable) { List<String> result = new ArrayList<String>(); for (SearchHit searchHit : response.getHits().getHits()) { String id = searchHit.getId(); result.add(id); } if (result.size() > 0) { return new AggregatedPageImpl<T>((List<T>) result, response.getScrollId()); } return new AggregatedPageImpl<T>(Collections.EMPTY_LIST, response.getScrollId()); } };
private List<SearchHit> sendElasticsearchQuery(ElasticsearchQueryBuilder queryBuilder) { SearchResponse response = getSearchResponse(queryBuilder); if (response.getHits().getTotalHits() > maxHits) { throw new PrestoException(ELASTICSEARCH_MAX_HITS_EXCEEDED, format("The number of hits for the query (%d) exceeds the configured max hits (%d)", response.getHits().getTotalHits(), maxHits)); } ImmutableList.Builder<SearchHit> result = ImmutableList.builder(); while (true) { for (SearchHit hit : response.getHits().getHits()) { result.add(hit); } response = getScrollResponse(queryBuilder, response.getScrollId()); if (response.getHits().getHits().length == 0) { break; } } return result.build(); }
private List<SearchHit> scrollTillLimit(TableInJoinRequestBuilder tableInJoinRequest, Integer hintLimit) { SearchResponse scrollResp = scrollOneTimeWithMax(client,tableInJoinRequest); updateMetaSearchResults(scrollResp); List<SearchHit> hitsWithScan = new ArrayList<>(); int curentNumOfResults = 0; SearchHit[] hits = scrollResp.getHits().getHits(); if (hintLimit == null) hintLimit = MAX_RESULTS_FOR_FIRST_TABLE; while (hits.length != 0 && curentNumOfResults < hintLimit) { curentNumOfResults += hits.length; Collections.addAll(hitsWithScan, hits); if (curentNumOfResults >= MAX_RESULTS_FOR_FIRST_TABLE) { //todo: log or exception? System.out.println("too many results for first table, stoping at:" + curentNumOfResults); break; } scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); hits = scrollResp.getHits().getHits(); } return hitsWithScan; }
@Override public <T> AggregatedPage<T> mapResults(SearchResponse response, Class<T> clazz, Pageable pageable) { long totalHits = response.getHits().getTotalHits(); float maxScore = response.getHits().getMaxScore(); List<T> results = new ArrayList<>(); for (SearchHit hit : response.getHits()) { if (hit != null) { T result = null; if (!StringUtils.isEmpty(hit.getSourceAsString())) { result = mapEntity(hit.getSourceAsString(), clazz); } else { result = mapEntity(hit.getFields().values(), clazz); } setPersistentEntityId(result, hit.getId(), clazz); setPersistentEntityVersion(result, hit.getVersion(), clazz); setPersistentEntityScore(result, hit.getScore(), clazz); populateScriptFields(result, hit); results.add(result); } } return new AggregatedPageImpl<T>(results, pageable, totalHits, response.getAggregations(), response.getScrollId(), maxScore); }
break; scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); hits = scrollResp.getHits().getHits(); break; scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); hits = scrollResp.getHits().getHits();
/** * Delete documents using a query. Check what would be deleted first with a normal search query! * Elasticsearch once provided a native prepareDeleteByQuery method, but this was removed * in later versions. Instead, there is a plugin which iterates over search results, * see https://www.elastic.co/guide/en/elasticsearch/plugins/current/plugins-delete-by-query.html * We simulate the same behaviour here without the need of that plugin. * * @param q * @return delete document count */ public int deleteByQuery(String indexName, final QueryBuilder q) { Map<String, String> ids = new TreeMap<>(); // FIXME: deprecated, "will be removed in 3.0, you should do a regular scroll instead, ordered by `_doc`" @SuppressWarnings("deprecation") SearchResponse response = elasticsearchClient.prepareSearch(indexName).setSearchType(SearchType.SCAN) .setScroll(new TimeValue(60000)).setQuery(q).setSize(100).execute().actionGet(); while (true) { // accumulate the ids here, don't delete them right now to prevent an interference of the delete with the // scroll for (SearchHit hit : response.getHits().getHits()) { ids.put(hit.getId(), hit.getType()); } response = elasticsearchClient.prepareSearchScroll(response.getScrollId()).setScroll(new TimeValue(600000)) .execute().actionGet(); // termination if (response.getHits().getHits().length == 0) break; } return deleteBulk(indexName, ids); }
public List<String> findAllViewUuids() { SearchRequestBuilder esSearch = esClient.prepareSearch(ViewIndexDefinition.INDEX_TYPE_VIEW) .addSort("_doc", SortOrder.ASC) .setScroll(TimeValue.timeValueMinutes(SCROLL_TIME_IN_MINUTES)) .setFetchSource(false) .setSize(100) .setQuery(matchAllQuery()); SearchResponse response = esSearch.get(); List<String> result = newArrayList(); while (true) { List<SearchHit> hits = newArrayList(response.getHits()); for (SearchHit hit : hits) { result.add(hit.getId()); } String scrollId = response.getScrollId(); response = esClient.prepareSearchScroll(scrollId) .setScroll(TimeValue.timeValueMinutes(SCROLL_TIME_IN_MINUTES)) .get(); // Break condition: No hits are returned if (response.getHits().getHits().length == 0) { esClient.nativeClient().prepareClearScroll().addScrollId(scrollId).get(); break; } } return result; } }
break; responseForSecondTable = client.prepareSearchScroll(responseForSecondTable.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); secondQueryHits = responseForSecondTable.getHits().getHits(); scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); hits = scrollResp.getHits().getHits();
@Test public void trace_logs() { logTester.setLevel(LoggerLevel.TRACE); SearchResponse response = es.client().prepareSearch(FakeIndexDefinition.INDEX) .setScroll(TimeValue.timeValueMinutes(1)) .get(); logTester.clear(); es.client().prepareSearchScroll(response.getScrollId()).get(); assertThat(logTester.logs()).hasSize(1); }
@Test public void no_trace_logs() { logTester.setLevel(LoggerLevel.DEBUG); SearchResponse response = es.client().prepareSearch(FakeIndexDefinition.INDEX) .setScroll(TimeValue.timeValueMinutes(1)) .get(); logTester.clear(); es.client().prepareSearchScroll(response.getScrollId()).get(); assertThat(logTester.logs()).isEmpty(); }
firstTableResponse = client.prepareSearchScroll(firstTableResponse.getScrollId()).setScroll(new TimeValue(600000)).get(); else finishedWithFirstTable = true;
searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); } else break; } else {
/** * Get all the indexed documents (no paginated results). Results are not sorted. */ public List<SearchHit> getDocuments(IndexType indexType) { SearchRequestBuilder req = SHARED_NODE.client().prepareSearch(indexType.getIndex()).setTypes(indexType.getType()).setQuery(matchAllQuery()); EsUtils.optimizeScrollRequest(req); req.setScroll(new TimeValue(60000)) .setSize(100); SearchResponse response = req.get(); List<SearchHit> result = newArrayList(); while (true) { Iterables.addAll(result, response.getHits()); response = SHARED_NODE.client().prepareSearchScroll(response.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); // Break condition: No hits are returned if (response.getHits().getHits().length == 0) { break; } } return result; }
scrollState.updateScrollId(searchResponse.getScrollId()); if (isEmpty(searchResponse.getHits())) {
String scrollId = searchResponse.getScrollId(); if (scrollId == null) { break;
tc.threadPool().getThreadContext().putHeader("sg_impersonate_as", "nagilum"); SearchResponse searchRes = tc.prepareSearch("starfleet").setTypes("ships").setScroll(TimeValue.timeValueMinutes(5)).get(); scrollId = searchRes.getScrollId(); } finally { ctx.close(); Assert.assertNotNull(searchRes.getScrollId()); tc.prepareSearchScroll(searchRes.getScrollId()).get(); Assert.fail(); } catch (Exception e) { tc.threadPool().getThreadContext().putHeader("sg_impersonate_as", "nagilum"); searchRes = tc.prepareSearch("starfleet").setTypes("ships").setScroll(TimeValue.timeValueMinutes(5)).get(); SearchResponse scrollRes = tc.prepareSearchScroll(searchRes.getScrollId()).get(); Assert.assertEquals(0, scrollRes.getFailedShards()); } finally {
tc.threadPool().getThreadContext().putHeader("sg_impersonate_as", "nagilum"); SearchResponse searchRes = tc.prepareSearch("starfleet").setTypes("ships").setScroll(TimeValue.timeValueMinutes(5)).get(); scrollId = searchRes.getScrollId(); } finally { ctx.close(); Assert.assertNotNull(searchRes.getScrollId()); tc.prepareSearchScroll(searchRes.getScrollId()).get(); Assert.fail(); } catch (Exception e) { tc.threadPool().getThreadContext().putHeader("sg_impersonate_as", "nagilum"); searchRes = tc.prepareSearch("starfleet").setTypes("ships").setScroll(TimeValue.timeValueMinutes(5)).get(); SearchResponse scrollRes = tc.prepareSearchScroll(searchRes.getScrollId()).get(); Assert.assertEquals(0, scrollRes.getFailedShards()); } finally {
private Response wrap(SearchResponse response) { List<SearchFailure> failures; if (response.getShardFailures() == null) { failures = emptyList(); } else { failures = new ArrayList<>(response.getShardFailures().length); for (ShardSearchFailure failure: response.getShardFailures()) { String nodeId = failure.shard() == null ? null : failure.shard().getNodeId(); failures.add(new SearchFailure(failure.getCause(), failure.index(), failure.shardId(), nodeId)); } } List<Hit> hits; if (response.getHits().getHits() == null || response.getHits().getHits().length == 0) { hits = emptyList(); } else { hits = new ArrayList<>(response.getHits().getHits().length); for (SearchHit hit: response.getHits().getHits()) { hits.add(new ClientHit(hit)); } hits = unmodifiableList(hits); } return new Response(response.isTimedOut(), failures, response.getHits().getTotalHits(), hits, response.getScrollId()); }
ids.put(hit.getId(), hit.getType()); response = elasticsearchClient.prepareSearchScroll(response.getScrollId()).setScroll(new TimeValue(600000)) .execute().actionGet();