private void checkForBulkUpdateFailure(BulkResponse bulkResponse) { if (bulkResponse.hasFailures()) { Map<String, String> failedDocuments = new HashMap<>(); for (BulkItemResponse item : bulkResponse.getItems()) { if (item.isFailed()) failedDocuments.put(item.getId(), item.getFailureMessage()); } throw new ElasticsearchException( "Bulk indexing has failures. Use ElasticsearchException.getFailedDocuments() for detailed messages [" + failedDocuments + "]", failedDocuments); } }
if (mutation.isDeleted()) { log.trace("Deleting entire document {}", docid); brb.add(new DeleteRequest(indexName, storename, docid)); } else { String script = getDeletionScript(informations, storename, mutation); brb.add(client.prepareUpdate(indexName, storename, docid).setScript(script, ScriptService.ScriptType.INLINE)); log.trace("Adding script {}", script); brb.add(new IndexRequest(indexName, storename, docid) .source(getNewDocument(mutation.getAdditions(), informations.get(storename), ttl))); BulkResponse bulkItemResponses = brb.execute().actionGet(); if (bulkItemResponses.hasFailures()) { boolean actualFailure = false; for(BulkItemResponse response : bulkItemResponses.getItems()) { if(response.isFailed() && response.getFailure().getStatus() != RestStatus.NOT_FOUND) { log.error("Failed to execute ES query {}", response.getFailureMessage()); actualFailure = true; throw new Exception(bulkItemResponses.buildFailureMessage());
@Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { stopProfiler(request); List<DocId> successDocIds = new ArrayList<>(); for (BulkItemResponse item : response.getItems()) { if (item.isFailed()) { LOGGER.error("index [{}], type [{}], id [{}], message [{}]", item.getIndex(), item.getType(), item.getId(), item.getFailureMessage()); } else { result.incrementSuccess(); successDocIds.add(new DocId(item.getIndex(), item.getType(), item.getId())); } } indexingListener.onSuccess(successDocIds); }
@Override public Throwable extractFailureCauseFromBulkItemResponse(BulkItemResponse bulkItemResponse) { if (!bulkItemResponse.isFailed()) { return null; } else { return new RuntimeException(bulkItemResponse.getFailureMessage()); } }
private void processFailBulkResponse(BulkResponse bulkResponse, boolean hasParent) { for (BulkItemResponse response : bulkResponse.getItems()) { if (!response.isFailed()) { continue; } if (response.getFailure().getStatus() == RestStatus.NOT_FOUND) { logger.warn(response.getFailureMessage()); } else { logger.error("全量导入数据有误 {}", response.getFailureMessage()); throw new RuntimeException("全量数据 etl 异常: " + response.getFailureMessage()); } } }
@Override public Throwable extractFailureCauseFromBulkItemResponse(BulkItemResponse bulkItemResponse) { if (!bulkItemResponse.isFailed()) { return null; } else { return bulkItemResponse.getFailure().getCause(); } }
/** * Permanently delete index documents. * * @param type index type * @param ids entity ids */ private void hardDeleteDoc(final String type, final List<String> ids) { try { RETRY_ES_PUBLISH.call(() -> { final BulkRequestBuilder bulkRequest = client.prepareBulk(); ids.forEach(id -> bulkRequest.add(client.prepareDelete(esIndex, type, id))); final BulkResponse bulkResponse = bulkRequest.execute().actionGet(esBulkCallTimeout); log.info("Deleting metadata of type {} with count {}", type, ids.size()); if (bulkResponse.hasFailures()) { for (BulkItemResponse item : bulkResponse.getItems()) { if (item.isFailed()) { handleException("ElasticSearchUtil.bulkDelete.item", type, item.getId(), item.getFailure().getCause(), Metrics.CounterElasticSearchDelete.getMetricName()); } } } return null; }); } catch (Exception e) { handleException("ElasticSearchUtil.bulkDelete", type, ids, e, Metrics.CounterElasticSearchBulkDelete.getMetricName()); } }
@Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { logger.trace("Executed bulk request with [{}] requests", request.numberOfActions()); if (response.hasFailures()) { final int[] failures = {0}; response.iterator().forEachRemaining(bir -> { if (bir.isFailed()) { failures[0]++; logger.debug("Error caught for [{}]/[{}]/[{}]: {}", bir.getIndex(), bir.getType(), bir.getId(), bir.getFailureMessage()); } }); logger.warn("Got [{}] failures of [{}] requests", failures[0], request.numberOfActions()); } }
/** * 批量提交 * * @param bulkRequestBuilder * @return */ private static boolean commitBulkRequest(BulkRequestBuilder bulkRequestBuilder) { if (bulkRequestBuilder.numberOfActions() > 0) { BulkResponse response = bulkRequestBuilder.execute().actionGet(); if (response.hasFailures()) { for (BulkItemResponse itemResponse : response.getItems()) { if (!itemResponse.isFailed()) { continue; } if (itemResponse.getFailure().getStatus() == RestStatus.NOT_FOUND) { logger.warn(itemResponse.getFailureMessage()); } else { logger.error("ES sync commit error: {}", itemResponse.getFailureMessage()); } } } return !response.hasFailures(); } return true; }
_document_name); a.setSource(setElement.toString()); bulkRequest.add(a); + bulkRequest.numberOfActions()); BulkResponse resp = bulkRequest.execute().actionGet(); for(BulkItemResponse r: resp.getItems()) r.getResponse(); _LOG.trace("[OpenSOC] ES SUCCESS MESSAGE: " + r.getFailureMessage()); if (resp.hasFailures()) { _LOG.error("[OpenSOC] Received bulk response error: " + resp.buildFailureMessage()); for(BulkItemResponse r: resp.getItems()) r.getResponse(); _LOG.error("[OpenSOC] ES FAILURE MESSAGE: " + r.getFailureMessage());
String type = doc.getType(); String uid = doc.getUID(); bulkRequest.add(nodeClient.prepareIndex(index, type, uid).setSource(doc)); bulkRequest.setRefresh(true); BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { for (BulkItemResponse item : bulkResponse.getItems()) { if (item.isFailed()) { logger.warn("Error updating {}: {}", item, item.getFailureMessage()); throw new SearchIndexException(item.getFailureMessage());
@Override public void bulkIndex(List<IndexQuery> queries) { BulkRequestBuilder bulkRequest = client.prepareBulk(); for(IndexQuery query : queries){ bulkRequest.add(prepareIndex(query)); } BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { Map<String, String> failedDocuments = new HashMap<String, String>(); for (BulkItemResponse item : bulkResponse.items()) { if (item.failed()) failedDocuments.put(item.getId(), item.failureMessage()); } throw new ElasticsearchException("Bulk indexing has failures. Use ElasticsearchException.getFailedDocuments() for detailed messages [" + failedDocuments+"]", failedDocuments); } }
indexReq.parent(hit.parent()); brb.add(indexReq); } catch (Exception ex) { logger.warn("Cannot add object:" + hit + " to bulkIndexing action." + ex.getMessage()); if (brb.numberOfActions() > 0) { BulkResponse rsp = brb.execute().actionGet(); if (rsp.hasFailures()) { List<Integer> list = new ArrayList<Integer>(rsp.getItems().length); for (BulkItemResponse br : rsp.getItems()) { if (br.isFailed()) list.add(br.getItemId());
public static <Response extends ReplicationResponse & WriteResponse> ActionListener<BulkResponse> wrapBulkResponse(ActionListener<Response> listener) { return ActionListener.wrap(bulkItemResponses -> { assert bulkItemResponses.getItems().length == 1 : "expected only one item in bulk request"; BulkItemResponse bulkItemResponse = bulkItemResponses.getItems()[0]; if (bulkItemResponse.isFailed() == false) { final DocWriteResponse response = bulkItemResponse.getResponse(); listener.onResponse((Response) response); } else { listener.onFailure(bulkItemResponse.getFailure().getCause()); } }, listener::onFailure); }
private <E extends Entity> List<E> executeBulkUpdate(List<E> entities, BulkRequestBuilder brb) { Watch w = Watch.start(); BulkResponse indexResponse = brb.execute().actionGet(); if (!indexResponse.hasFailures() && LOG.isFINE()) { LOG.FINE("BULK-SAVE SUCCEEDED"); } else if (indexResponse.hasFailures()) { Exceptions.handle().withSystemErrorMessage(indexResponse.buildFailureMessage()).handle(); } for (int i = 0; i < indexResponse.getItems().length; i++) { E entity = entities.get(i); entity.id = indexResponse.getItems()[i].getId(); entity.version = indexResponse.getItems()[i].getVersion(); if (!indexResponse.getItems()[i].isFailed()) { entity.afterSave(); } traceChange(entity); } queryDuration.addValue(w.elapsedMillis()); w.submitMicroTiming("ES", "BULK-UPDATE"); return entities; }
@Override public List<String> deleteAllIds(final Collection<String> ids) { if (ids.isEmpty()) { return ImmutableList.of(); } final BulkRequestBuilder bulk = client .prepareBulk() .setRefreshPolicy(policy.get()); for (final String id : ids) { bulk.add(client.prepareDelete(index, type, id)); } final BulkResponse response = bulk.execute().actionGet(); final ImmutableList.Builder<String> builder = ImmutableList.builder(); for (final BulkItemResponse item : response.getItems()) { builder.add(item.getId()); } return builder.build(); }
if (bulk.numberOfActions() > 0) { final BulkResponse response = bulk.execute().actionGet(); if (response.hasFailures()) { BulkItemResponse[] responses = response.getItems(); if (responses != null && responses.length > 0) { for (int i = responses.length - 1; i >= 0; i--) { final BulkItemResponse item = responses[i]; final FlowFile flowFile = flowFilesToTransfer.get(item.getItemId()); if (item.isFailed()) { logger.warn("Failed to insert {} into Elasticsearch due to {}, transferring to failure", new Object[]{flowFile, item.getFailure().getMessage()}); session.transfer(flowFile, REL_FAILURE); session.getProvenanceReporter().send(flowFile, response.remoteAddress().getAddress()); session.transfer(flowFile, REL_SUCCESS);
protected void executeBulkRequest(BulkRequestBuilder bulkRequest) { if (bulkRequest.numberOfActions() == 0) return; BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (!bulkResponse.hasFailures()) return; for (BulkItemResponse response : bulkResponse) { if (!response.isFailed()) continue; LOG.warning(String.format("Unable to save Entity %s in %s/%s, cause: %s", response.getId(), response.getIndex(), response.getType(), response.getFailureMessage())); } }
for (BulkWriteEntry be: jsonMapList) { if (be.getId() == null) continue; bulkRequest.add( elasticsearchClient.prepareIndex(indexName, be.getType(), be.getId()).setSource(be.getJsonMap()) .setVersion(be.getVersion() == null ? 1 : be.getVersion().longValue()) .setVersionType(be.getVersion() == null ? VersionType.FORCE : VersionType.EXTERNAL)); BulkResponse bulkResponse = bulkRequest.get(); BulkWriteResult result = new BulkWriteResult(); for (BulkItemResponse r: bulkResponse.getItems()) { String id = r.getId(); ActionWriteResponse response = r.getResponse(); if (response instanceof IndexResponse) { if (((IndexResponse) response).isCreated()) result.getCreated().add(id); String err = r.getFailureMessage(); if (err != null) { result.getErrors().put(id, err);
if ( bulkRequest.numberOfActions() == 0 ) { return; responses = bulkRequest.execute().actionGet( ); } catch ( Throwable t ) { logger.error( "Unable to communicate with elasticsearch", t ); for ( BulkItemResponse response : responses ) { if ( response.isFailed() ) { logger.error( "Unable to index id={}, type={}, index={}, failureMessage={} ", response.getId(), response.getType(), response.getIndex(), response.getFailureMessage() ); if(response.getFailure()!=null && response.getFailure().getStatus() == RestStatus.TOO_MANY_REQUESTS){ hasTooManyRequests =true; errorString.append( response.getFailureMessage() ).append( "\n" );