protected void buildClient() throws Exception { Assert.hasText(hosts, "[Assertion Failed] At least one host must be set."); ArrayList<HttpHost> httpHosts = new ArrayList<HttpHost>(); for (String host : hosts.split(COMMA)) { URL hostUrl = new URL(host); httpHosts.add(new HttpHost(hostUrl.getHost(), hostUrl.getPort(), hostUrl.getProtocol())); } client = new RestHighLevelClient(RestClient.builder(httpHosts.toArray(new HttpHost[httpHosts.size()]))); }
private SearchResponse doSearch(SearchRequest searchRequest, SearchQuery searchQuery) { prepareSearch(searchRequest, searchQuery); try { return client.search(searchRequest); } catch (IOException e) { throw new ElasticsearchException("Error for search request with scroll: " + searchRequest.toString(), e); } }
@Override public DeleteOperationResponse deleteById(String index, String type, List<String> ids) throws IOException { BulkRequest bulk = new BulkRequest(); for (int idx = 0; idx < ids.size(); idx++) { DeleteRequest request = new DeleteRequest(index, type, ids.get(idx)); bulk.add(request); } BulkResponse response = highLevelClient.bulk(bulk); DeleteOperationResponse dor = new DeleteOperationResponse(response.getTookInMillis()); return dor; }
@Override public RestHighLevelClient createClient(Map<String, String> clientConfig) throws IOException { RestClientBuilder builder = RestClient.builder(httpHosts.toArray(new HttpHost[httpHosts.size()])); restClientFactory.configureRestClientBuilder(builder); RestHighLevelClient rhlClient = new RestHighLevelClient(builder); if (LOG.isInfoEnabled()) { LOG.info("Pinging Elasticsearch cluster via hosts {} ...", httpHosts); } if (!rhlClient.ping()) { throw new RuntimeException("There are no reachable Elasticsearch nodes!"); } if (LOG.isInfoEnabled()) { LOG.info("Created Elasticsearch RestHighLevelClient connected to {}", httpHosts.toString()); } return rhlClient; }
message.setBody(restHighLevelClient.index(indexRequest, RequestOptions.DEFAULT).getId()); } else if (operation == ElasticsearchOperation.Update) { UpdateRequest updateRequest = message.getBody(UpdateRequest.class); message.setBody(restHighLevelClient.update(updateRequest, RequestOptions.DEFAULT).getId()); } else if (operation == ElasticsearchOperation.GetById) { GetRequest getRequest = message.getBody(GetRequest.class); message.setBody(restHighLevelClient.get(getRequest, RequestOptions.DEFAULT)); } else if (operation == ElasticsearchOperation.Bulk) { BulkRequest bulkRequest = message.getBody(BulkRequest.class); message.setBody(restHighLevelClient.bulk(bulkRequest, RequestOptions.DEFAULT).getItems()); } else if (operation == ElasticsearchOperation.BulkIndex) { BulkRequest bulkRequest = message.getBody(BulkRequest.class); message.setBody(restHighLevelClient.bulk(bulkRequest, RequestOptions.DEFAULT).getItems()); } else if (operation == ElasticsearchOperation.Delete) { DeleteRequest deleteRequest = message.getBody(DeleteRequest.class); message.setBody(restHighLevelClient.delete(deleteRequest, RequestOptions.DEFAULT).getResult()); } else if (operation == ElasticsearchOperation.DeleteIndex) { DeleteRequest deleteRequest = message.getBody(DeleteRequest.class); searchRequest.source(sourceBuilder); try { restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT); message.setBody(true); } catch (ElasticsearchStatusException e) { message.setBody(restHighLevelClient.search(searchRequest, RequestOptions.DEFAULT).getHits()); } else if (operation == ElasticsearchOperation.MultiSearch) { MultiSearchRequest searchRequest = message.getBody(MultiSearchRequest.class); message.setBody(restHighLevelClient.msearch(searchRequest, RequestOptions.DEFAULT).getResponses());
try { this.client = new RestHighLevelClient(lowLevelClient); checkVersion(); logger.info("Elasticsearch Client for version {}.x connected to a node running version {}", compatibleVersion(), getVersion()); (request, bulkListener) -> client.bulkAsync(request, bulkListener);
@Override public GetResponse get(GetRequest getRequest) throws IOException { return highLevelClient.get(getRequest); }
@Override public void destroy() throws Exception { try { log.info("Closing elasticSearch client"); if (client != null) { client.close(); } } catch (final Exception e) { log.error("Error closing ElasticSearch client: ", e); } }
searchResponse = client.search(searchRequest); String scrollId = searchResponse.getScrollId(); SearchHit[] searchHits = searchResponse.getHits().getHits(); DeleteResponse deleteResponse = client.delete(deleteRequest); SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId); scrollRequest.scroll(scroll); searchResponse = client.searchScroll(scrollRequest); scrollId = searchResponse.getScrollId(); searchHits = searchResponse.getHits().getHits(); DeleteResponse deleteResponse = client.delete(deleteRequest); ClearScrollResponse clearScrollResponse = client.clearScroll(clearScrollRequest); boolean succeeded = clearScrollResponse.isSucceeded(); } catch (IOException e) {
/** * Performs an index operation with a retry. * @param request The index request that we want to perform. * @param operationDescription The type of operation that we are performing. */ private void indexWithRetry(final IndexRequest request, final String operationDescription) { try { new RetryUtil<IndexResponse>().retryOnException(() -> { try { return elasticSearchClient.index(request); } catch (IOException e) { throw new RuntimeException(e); } }, null, null, RETRY_COUNT, operationDescription, "indexWithRetry"); } catch (Exception e) { Monitors.error(className, "index"); logger.error("Failed to index {} for request type: {}", request.id(), request.type(), e); } }
@Override public void removeWorkflow(String workflowId) { DeleteRequest request = new DeleteRequest(indexName, WORKFLOW_DOC_TYPE, workflowId); try { DeleteResponse response = elasticSearchClient.delete(request); if (response.getResult() == DocWriteResponse.Result.NOT_FOUND) { logger.error("Index removal failed - document not found by id: {}", workflowId); } } catch (IOException e) { logger.error("Failed to remove workflow {} from index", workflowId, e); Monitors.error(className, "remove"); } }
@Override public void start() throws IOException { if (client != null) { // The client has already been initialized. Let's skip this again return; } try { // Create an elasticsearch client client = new RestHighLevelClient(buildRestClient(settings.getElasticsearch())); checkVersion(); logger.info("Elasticsearch Client for version {}.x connected to a node running version {}", compatibleVersion(), getVersion()); } catch (Exception e) { logger.warn("failed to create elasticsearch client, disabling crawler..."); throw e; } if (settings.getElasticsearch().getPipeline() != null) { // Check that the pipeline exists if (!isExistingPipeline(settings.getElasticsearch().getPipeline())) { throw new RuntimeException("You defined pipeline:" + settings.getElasticsearch().getPipeline() + ", but it does not exist."); } } BiConsumer<BulkRequest, ActionListener<BulkResponse>> bulkConsumer = (request, bulkListener) -> client.bulkAsync(request, RequestOptions.DEFAULT, bulkListener); bulkProcessor = BulkProcessor.builder(bulkConsumer, new DebugListener(logger)) .setBulkActions(settings.getElasticsearch().getBulkSize()) .setFlushInterval(TimeValue.timeValueMillis(settings.getElasticsearch().getFlushInterval().millis())) .setBulkSize(new ByteSizeValue(settings.getElasticsearch().getByteSize().getBytes())) .build(); }
@Override public Map<String, Object> get(String index, String type, String id) throws IOException { GetRequest get = new GetRequest(index, type, id); GetResponse resp = highLevelClient.get(get, new Header[]{}); return resp.getSource(); }
@Override default void close() throws IOException { rest().close(); } }
@Override public String index(IndexQuery query) { String documentId; IndexRequest request = prepareIndex(query); try { documentId = client.index(request).getId(); } catch (IOException e) { throw new ElasticsearchException("Error while index for request: " + request.toString(), e); } // We should call this because we are not going through a mapper. if (query.getObject() != null) { setPersistentEntityId(query.getObject(), documentId); } return documentId; }
@Override public String delete(String indexName, String type, String id) { DeleteRequest request = new DeleteRequest(indexName, type, id); try { return client.delete(request).getId(); } catch (IOException e) { throw new ElasticsearchException("Error while deleting item request: " + request.toString(), e); } }
@Inject public ElasticSearchRestDAOV5(RestClient lowLevelRestClient, ElasticSearchConfiguration config, ObjectMapper objectMapper) { this.objectMapper = objectMapper; this.elasticSearchAdminClient = lowLevelRestClient; this.elasticSearchClient = new RestHighLevelClient(lowLevelRestClient); this.indexName = config.getIndexName(); this.logIndexPrefix = config.getTasklogIndexName(); this.clusterHealthColor = config.getClusterHealthColor(); // Set up a workerpool for performing async operations. int corePoolSize = 6; int maximumPoolSize = 12; long keepAliveTime = 1L; this.executorService = new ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, TimeUnit.MINUTES, new LinkedBlockingQueue<>()); }
private long doCount(SearchRequest countRequest, QueryBuilder elasticsearchQuery) { SearchSourceBuilder sourceBuilder = new SearchSourceBuilder(); if (elasticsearchQuery != null) { sourceBuilder.query(elasticsearchQuery); } countRequest.source(sourceBuilder); try { return client.search(countRequest).getHits().getTotalHits(); } catch (IOException e) { throw new ElasticsearchException("Error while searching for request: " + countRequest.toString(), e); } }
@Override public void start() throws IOException { if (client != null) { // The client has already been initialized. Let's skip this again return; } try { // Create an elasticsearch client client = new RestHighLevelClient(buildRestClient(settings.getElasticsearch())); checkVersion(); logger.info("Elasticsearch Client for version {}.x connected to a node running version {}", compatibleVersion(), getVersion()); } catch (Exception e) { logger.warn("failed to create elasticsearch client, disabling crawler..."); throw e; } if (settings.getElasticsearch().getPipeline() != null) { // Check that the pipeline exists if (!isExistingPipeline(settings.getElasticsearch().getPipeline())) { throw new RuntimeException("You defined pipeline:" + settings.getElasticsearch().getPipeline() + ", but it does not exist."); } } BiConsumer<BulkRequest, ActionListener<BulkResponse>> bulkConsumer = (request, bulkListener) -> client.bulkAsync(request, RequestOptions.DEFAULT, bulkListener); bulkProcessor = BulkProcessor.builder(bulkConsumer, new DebugListener(logger)) .setBulkActions(settings.getElasticsearch().getBulkSize()) .setFlushInterval(TimeValue.timeValueMillis(settings.getElasticsearch().getFlushInterval().millis())) .setBulkSize(new ByteSizeValue(settings.getElasticsearch().getByteSize().getBytes())) .build(); }
new RetryUtil<BulkResponse>().retryOnException(() -> { try { return elasticSearchClient.bulk(bulkRequest); } catch (IOException e) { throw new RuntimeException(e);