@Override public void addEvent(Event event, IndexNameBuilder indexNameBuilder, String indexType, long ttlMs) throws Exception { if (bulkRequestBuilder == null) { bulkRequestBuilder = client.prepareBulk(); } IndexRequestBuilder indexRequestBuilder = null; if (indexRequestBuilderFactory == null) { indexRequestBuilder = client .prepareIndex(indexNameBuilder.getIndexName(event), indexType) .setSource(serializer.getContentBuilder(event).bytes()); } else { indexRequestBuilder = indexRequestBuilderFactory.createIndexRequest( client, indexNameBuilder.getIndexPrefix(event), indexType, event); } if (ttlMs > 0) { indexRequestBuilder.setTTL(ttlMs); } bulkRequestBuilder.add(indexRequestBuilder); }
@Override public void execute() throws Exception { try { BulkResponse bulkResponse = bulkRequestBuilder.execute().actionGet(); if (bulkResponse.hasFailures()) { throw new EventDeliveryException(bulkResponse.buildFailureMessage()); } } finally { bulkRequestBuilder = client.prepareBulk(); } }
if ( bulkRequest.numberOfActions() == 0 ) { return; responses = bulkRequest.execute().actionGet( ); } catch ( Throwable t ) { logger.error( "Unable to communicate with elasticsearch", t ); for ( BulkItemResponse response : responses ) { if ( response.isFailed() ) { logger.error( "Unable to index id={}, type={}, index={}, failureMessage={} ", response.getId(), response.getType(), response.getIndex(), response.getFailureMessage() ); if(response.getFailure()!=null && response.getFailure().getStatus() == RestStatus.TOO_MANY_REQUESTS){
/** * 批量提交 * * @param bulkRequestBuilder * @return */ private static boolean commitBulkRequest(BulkRequestBuilder bulkRequestBuilder) { if (bulkRequestBuilder.numberOfActions() > 0) { BulkResponse response = bulkRequestBuilder.execute().actionGet(); if (response.hasFailures()) { for (BulkItemResponse itemResponse : response.getItems()) { if (!itemResponse.isFailed()) { continue; } if (itemResponse.getFailure().getStatus() == RestStatus.NOT_FOUND) { logger.warn(itemResponse.getFailureMessage()); } else { logger.error("ES sync commit error: {}", itemResponse.getFailureMessage()); } } } return !response.hasFailures(); } return true; }
public void restore(Map<String,Map<String, List<IndexEntry>>> documents, KeyInformation.IndexRetriever informations, BaseTransaction tx) throws BackendException { BulkRequestBuilder bulk = client.prepareBulk(); int requests = 0; try { for (Map.Entry<String, Map<String, List<IndexEntry>>> stores : documents.entrySet()) { String store = stores.getKey(); for (Map.Entry<String, List<IndexEntry>> entry : stores.getValue().entrySet()) { String docID = entry.getKey(); List<IndexEntry> content = entry.getValue(); if (content == null || content.size() == 0) { // delete if (log.isTraceEnabled()) log.trace("Deleting entire document {}", docID); bulk.add(new DeleteRequest(indexName, store, docID)); requests++; } else { // Add if (log.isTraceEnabled()) log.trace("Adding entire document {}", docID); bulk.add(new IndexRequest(indexName, store, docID).source(getNewDocument(content, informations.get(store), IndexMutation.determineTTL(content)))); requests++; } } } if (requests > 0) bulk.execute().actionGet(); } catch (Exception e) { throw convert(e); } }
/** * Permanently delete index documents. * * @param type index type * @param ids entity ids */ private void hardDeleteDoc(final String type, final List<String> ids) { try { RETRY_ES_PUBLISH.call(() -> { final BulkRequestBuilder bulkRequest = client.prepareBulk(); ids.forEach(id -> bulkRequest.add(client.prepareDelete(esIndex, type, id))); final BulkResponse bulkResponse = bulkRequest.execute().actionGet(esBulkCallTimeout); log.info("Deleting metadata of type {} with count {}", type, ids.size()); if (bulkResponse.hasFailures()) { for (BulkItemResponse item : bulkResponse.getItems()) { if (item.isFailed()) { handleException("ElasticSearchUtil.bulkDelete.item", type, item.getId(), item.getFailure().getCause(), Metrics.CounterElasticSearchDelete.getMetricName()); } } } return null; }); } catch (Exception e) { handleException("ElasticSearchUtil.bulkDelete", type, ids, e, Metrics.CounterElasticSearchBulkDelete.getMetricName()); } }
@Override public void mutate(Map<String, Map<String, IndexMutation>> mutations, KeyInformation.IndexRetriever informations, BaseTransaction tx) throws BackendException { BulkRequestBuilder brb = client.prepareBulk(); if (mutation.isDeleted()) { log.trace("Deleting entire document {}", docid); brb.add(new DeleteRequest(indexName, storename, docid)); } else { String script = getDeletionScript(informations, storename, mutation); brb.add(client.prepareUpdate(indexName, storename, docid).setScript(script, ScriptService.ScriptType.INLINE)); log.trace("Adding script {}", script); brb.add(new IndexRequest(indexName, storename, docid) .source(getNewDocument(mutation.getAdditions(), informations.get(storename), ttl))); brb.add(update); log.trace("Adding script {}", script); BulkResponse bulkItemResponses = brb.execute().actionGet(); if (bulkItemResponses.hasFailures()) { boolean actualFailure = false; for(BulkItemResponse response : bulkItemResponses.getItems()) { throw new Exception(bulkItemResponses.buildFailureMessage()); log.error("Failed to execute ES query {}", brb.request().timeout(), e); throw convert(e);
BulkRequestBuilder bulkRequest = nodeClient.prepareBulk(); for (ElasticsearchDocument doc : documents) { String type = doc.getType(); String uid = doc.getUID(); bulkRequest.add(nodeClient.prepareIndex(index, type, uid).setSource(doc)); bulkRequest.setRefresh(true); BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { for (BulkItemResponse item : bulkResponse.getItems()) { if (item.isFailed()) { logger.warn("Error updating {}: {}", item, item.getFailureMessage()); throw new SearchIndexException(item.getFailureMessage());
BulkRequestBuilder bulkRequest = elasticsearchClient.prepareBulk(); for (BulkWriteEntry be: jsonMapList) { if (be.getId() == null) continue; bulkRequest.add( elasticsearchClient.prepareIndex(indexName, be.getType(), be.getId()).setSource(be.getJsonMap()) .setVersion(be.getVersion() == null ? 1 : be.getVersion().longValue()) .setVersionType(be.getVersion() == null ? VersionType.FORCE : VersionType.EXTERNAL)); BulkResponse bulkResponse = bulkRequest.get(); BulkWriteResult result = new BulkWriteResult(); for (BulkItemResponse r: bulkResponse.getItems()) { String id = r.getId(); ActionWriteResponse response = r.getResponse(); if (response instanceof IndexResponse) { if (((IndexResponse) response).isCreated()) result.getCreated().add(id); String err = r.getFailureMessage(); if (err != null) { result.getErrors().put(id, err);
public void doOperation( final Client client, final BulkRequestBuilder bulkRequest ) { IndexRequestBuilder builder = client.prepareIndex( writeAlias, IndexingUtils.ES_ENTITY_TYPE, documentId ).setSource( data ); bulkRequest.add( builder ); }
@Override public List<T> saveAll(final List<T> entities) { if(entities.isEmpty()) { return entities; } final BulkRequestBuilder bulk = client .prepareBulk() .setRefreshPolicy(policy.get()); for(final T entity : entities) { final String source = serializer.apply(entity); final IndexRequestBuilder request = client .prepareIndex(index, type) .setSource(source, JSON); ofNullable(emptyToNull(entity.getId())).ifPresent(request::setId); bulk.add(request); } final BulkResponse response = bulk.execute().actionGet(); final BulkItemResponse[] items = response.getItems(); final ImmutableList.Builder<T> saved = ImmutableList.builder(); for(int i=0; i<items.length; i++) { final BulkItemResponse item = items[i]; @SuppressWarnings("unchecked") final T entity = (T) entities.get(i).withId(item.getId()); saved.add(entity); } return saved.build(); }
@Override public void writeToNoSQL(List<Map<String, Object>> entityList) { JSONArray array = JSONArray.fromObject(entityList); for (int i = 0; i < array.size(); i++) { IndexRequestBuilder builder = client.prepareIndex(index_name, index_type); if (getPrimaryKey() != null) { builder.setId(((JSONObject) array.get(i)).getString(getPrimaryKey())); } builder.setSource(array.get(i).toString()); bulkRequest.add(builder); } if (bulkRequest.numberOfActions() > 0) { long t1 = System.currentTimeMillis(); ListenableActionFuture<BulkResponse> action = bulkRequest.execute(); long t2 = System.currentTimeMillis(); BulkResponse response = action.actionGet(); for (Iterator<BulkItemResponse> iterator = response.iterator(); iterator.hasNext();) { BulkItemResponse e = (BulkItemResponse) iterator.next(); if (e.isFailed()) { throw new FailedCommunicationException("Insertion to ES failed."); } } log.info("Time taken to Write " + bulkRequest.numberOfActions() + " documents to ES :" + ((t2 - t1)) + " ms"); } }
Collection<Integer> bulkUpdate(MySearchHits objects, String indexName, String newType, boolean withVersion, Client client) { BulkRequestBuilder brb = client.prepareBulk(); for (MySearchHit hit : objects.getHits()) { if (hit.id() == null || hit.id().isEmpty()) { indexReq.parent(hit.parent()); brb.add(indexReq); } catch (Exception ex) { logger.warn("Cannot add object:" + hit + " to bulkIndexing action." + ex.getMessage()); if (brb.numberOfActions() > 0) { BulkResponse rsp = brb.execute().actionGet(); if (rsp.hasFailures()) { List<Integer> list = new ArrayList<Integer>(rsp.getItems().length); for (BulkItemResponse br : rsp.getItems()) { if (br.isFailed()) list.add(br.getItemId());
final BulkRequestBuilder bulk = esClient.get().prepareBulk(); if (bulk.numberOfActions() > 0) { final BulkResponse response = bulk.execute().actionGet(); if (response.hasFailures()) { BulkItemResponse[] responses = response.getItems(); if (responses != null && responses.length > 0) { for (int i = responses.length - 1; i >= 0; i--) { final BulkItemResponse item = responses[i]; final FlowFile flowFile = flowFilesToTransfer.get(item.getItemId()); if (item.isFailed()) { logger.warn("Failed to insert {} into Elasticsearch due to {}, transferring to failure", new Object[]{flowFile, item.getFailure().getMessage()}); session.transfer(flowFile, REL_FAILURE); session.getProvenanceReporter().send(flowFile, response.remoteAddress().getAddress()); session.transfer(flowFile, REL_SUCCESS);
final BulkRequestBuilder bulk = esClient.get().prepareBulk(); if (authToken != null) { bulk.putHeader("Authorization", authToken); final BulkResponse response = bulk.execute().actionGet(); if (response.hasFailures()) { BulkItemResponse[] responses = response.getItems(); if (responses != null && responses.length > 0) { for (int i = responses.length - 1; i >= 0; i--) { final FlowFile flowFile = flowFilesToTransfer.get(i); if (responses[i].isFailed()) { logger.error("Failed to insert {} into Elasticsearch due to {}, transferring to failure", new Object[]{flowFile, responses[i].getFailure().getMessage()}); session.transfer(flowFile, REL_FAILURE); session.getProvenanceReporter().send(flowFile, context.getProperty(HOSTS).evaluateAttributeExpressions().getValue() + "/" + responses[i].getIndex()); session.transfer(flowFile, REL_SUCCESS);
/** * 批量删除 * * @param transportClient */ private static void batchDelete(TransportClient transportClient) throws IOException { BulkRequestBuilder bulkRequestBuilder = transportClient.prepareBulk(); DeleteRequestBuilder deleteRequestBuilder1 = transportClient.prepareDelete("product_index", "product", "1"); DeleteRequestBuilder deleteRequestBuilder2 = transportClient.prepareDelete("product_index", "product", "2"); DeleteRequestBuilder deleteRequestBuilder3 = transportClient.prepareDelete("product_index", "product", "3"); bulkRequestBuilder.add(deleteRequestBuilder1); bulkRequestBuilder.add(deleteRequestBuilder2); bulkRequestBuilder.add(deleteRequestBuilder3); BulkResponse bulkResponse = bulkRequestBuilder.get(); for (BulkItemResponse bulkItemResponse : bulkResponse.getItems()) { logger.info("--------------------------------version= " + bulkItemResponse.getVersion()); } }
BulkRequestBuilder bulkRequest = client.prepareBulk(); for (int i = 0; i < batchSize; i++) { id = idGenerator.incrementAndGet(); if (useAutoGeneratedIDs) { bulkRequest.add(client.prepareIndex(index, type).setSource(generateSource(id, threadRandom))); } else { bulkRequest.add(client.prepareIndex(index, type, Long.toString(id)).setSource(generateSource(id, threadRandom))); BulkResponse bulkResponse = bulkRequest.get(); for (BulkItemResponse bulkItemResponse : bulkResponse) { if (!bulkItemResponse.isFailed()) { boolean add = ids.add(bulkItemResponse.getId()); assert add : "ID: " + bulkItemResponse.getId() + " already used"; } else { throw new ElasticsearchException("bulk request failure, id: [" IndexResponse indexResponse = client.prepareIndex(index, type).setSource(generateSource(id, threadRandom)).get(); boolean add = ids.add(indexResponse.getId()); assert add : "ID: " + indexResponse.getId() + " already used";
public void putDocuments(IndexType indexType, Map<String, Object>... docs) { try { BulkRequestBuilder bulk = SHARED_NODE.client().prepareBulk() .setRefreshPolicy(REFRESH_IMMEDIATE); for (Map<String, Object> doc : docs) { bulk.add(new IndexRequest(indexType.getIndex(), indexType.getType()) .source(doc)); } BulkResponse bulkResponse = bulk.get(); if (bulkResponse.hasFailures()) { throw new IllegalStateException(bulkResponse.buildFailureMessage()); } } catch (Exception e) { throw Throwables.propagate(e); } }
public void addElement(Element element, String index, String routing, boolean create) { IndexRequestBuilder indexRequest = client.prepareIndex(index, element.label(), element.id().toString()) .setSource(propertiesMap(element)).setRouting(routing).setCreate(create); if(bulkRequest != null) bulkRequest.add(indexRequest); else indexRequest.execute().actionGet(); revision++; }
@Override public List<String> deleteAllIds(final Collection<String> ids) { if (ids.isEmpty()) { return ImmutableList.of(); } final BulkRequestBuilder bulk = client .prepareBulk() .setRefreshPolicy(policy.get()); for (final String id : ids) { bulk.add(client.prepareDelete(index, type, id)); } final BulkResponse response = bulk.execute().actionGet(); final ImmutableList.Builder<String> builder = ImmutableList.builder(); for (final BulkItemResponse item : response.getItems()) { builder.add(item.getId()); } return builder.build(); }