@Override public DeleteOperationResponse deleteById(String index, String type, List<String> ids) throws IOException { BulkRequest bulk = new BulkRequest(); for (int idx = 0; idx < ids.size(); idx++) { DeleteRequest request = new DeleteRequest(index, type, ids.get(idx)); bulk.add(request); } BulkResponse response = highLevelClient.bulk(bulk); DeleteOperationResponse dor = new DeleteOperationResponse(response.getTookInMillis()); return dor; }
BulkRequest bulkRequest = new BulkRequest();
@Override public void bulkUpdate(List<UpdateQuery> queries) { BulkRequest bulkRequest = new BulkRequest(); for (UpdateQuery query : queries) { bulkRequest.add(prepareUpdate(query)); } try { checkForBulkUpdateFailure(client.bulk(bulkRequest)); } catch (IOException e) { throw new ElasticsearchException("Error while bulk for request: " + bulkRequest.toString(), e); } }
@Override public void bulkIndex(List<IndexQuery> queries) { BulkRequest bulkRequest = new BulkRequest(); for (IndexQuery query : queries) { bulkRequest.add(prepareIndex(query)); } try { checkForBulkUpdateFailure(client.bulk(bulkRequest)); } catch (IOException e) { throw new ElasticsearchException("Error while bulk for request: " + bulkRequest.toString(), e); } }
protected Pair<BulkRequest, FutureCallbackHolder> prepareBatch(Batch<Object> batch, WriteCallback callback) { BulkRequest bulkRequest = new BulkRequest(); final StringBuilder stringBuilder = new StringBuilder(); for (Object record : batch.getRecords()) { try { byte[] serializedBytes = this.serializer.serializeToJson(record); log.debug("serialized record: {}", serializedBytes); IndexRequest indexRequest = new IndexRequest(this.indexName, this.indexType) .source(serializedBytes, 0, serializedBytes.length, XContentType.JSON); if (this.idMappingEnabled) { String id = this.typeMapper.getValue(this.idFieldName, record); indexRequest.id(id); stringBuilder.append(";").append(id); } bulkRequest.add(indexRequest); } catch (Exception e) { log.error("Encountered exception {}", e); } } FutureCallbackHolder futureCallbackHolder = new FutureCallbackHolder(callback, exception -> log.error("Batch: {} failed on ids; {} with exception {}", batch.getId(), stringBuilder.toString(), exception), this.malformedDocPolicy); return new Pair(bulkRequest, futureCallbackHolder); }
nextBulkRequest = new BulkRequest();
@Override public IndexOperationResponse add(List<IndexOperationRequest> operations) throws IOException { BulkRequest bulkRequest = new BulkRequest(); for (int index = 0; index < operations.size(); index++) { IndexOperationRequest or = operations.get(index); IndexRequest indexRequest = new IndexRequest(or.getIndex(), or.getType(), or.getId()) .source(or.getFields()); bulkRequest.add(indexRequest); } bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); BulkResponse response = highLevelClient.bulk(bulkRequest); IndexOperationResponse retVal = new IndexOperationResponse(response.getTookInMillis(), response.getIngestTookInMillis()); return retVal; }
/** * Creates a new bulk request. */ public static BulkRequest bulkRequest() { return new BulkRequest(); }
public BulkRequestBuilder(ElasticsearchClient client, BulkAction action) { super(client, action, new BulkRequest()); }
@Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { while (nextBulkRequest.numberOfActions() > 0) { // wait until we are allowed to continue with the flushing flushLatch.await(); // create a copy of the accumulated mock requests, so that // re-added requests from the failure handler are included in the next bulk BulkRequest currentBulkRequest = nextBulkRequest; nextBulkRequest = new BulkRequest(); listener.beforeBulk(123L, currentBulkRequest); if (nextBulkFailure == null) { BulkItemResponse[] mockResponses = new BulkItemResponse[currentBulkRequest.requests().size()]; for (int i = 0; i < currentBulkRequest.requests().size(); i++) { Throwable mockItemFailure = mockItemFailuresList.get(i); if (mockItemFailure == null) { // the mock response for the item is success mockResponses[i] = new BulkItemResponse(i, "opType", mock(ActionResponse.class)); } else { // the mock response for the item is failure mockResponses[i] = new BulkItemResponse(i, "opType", new BulkItemResponse.Failure("index", "type", "id", mockItemFailure)); } } listener.afterBulk(123L, currentBulkRequest, new BulkResponse(mockResponses, 1000L)); } else { listener.afterBulk(123L, currentBulkRequest, nextBulkFailure); } } return null; } }).when(mockBulkProcessor).flush();
BulkRequest request = new BulkRequest(); List<String> ids = new ArrayList<String>();
private Supplier<BulkRequest> createBulkRequestWithGlobalDefaults() { return () -> new BulkRequest(globalIndex, globalType) .pipeline(globalPipeline) .routing(globalRouting); } }
public BulkRequestBuilder(ElasticsearchClient client, BulkAction action, @Nullable String globalIndex, @Nullable String globalType) { super(client, action, new BulkRequest(globalIndex, globalType)); }
private BulkRequest createBulkRequestForRetry(BulkResponse bulkItemResponses) { BulkRequest requestToReissue = new BulkRequest(); int index = 0; for (BulkItemResponse bulkItemResponse : bulkItemResponses.getItems()) { if (bulkItemResponse.isFailed()) { requestToReissue.add(currentBulkRequest.requests().get(index)); } index++; } return requestToReissue; }
public void bulkDocument(String type, Map<String, Map<String, Object>> sources) { try { if (this.instance() == null) return; BulkRequest requests = new BulkRequest(); Iterator<String> it = sources.keySet().iterator(); int count = 0; while(it.hasNext()) { count++; String next = it.next(); IndexRequest request = new IndexRequest(name, type, next); request.source(sources.get(next)); requests.add(request); if (count % 1000 == 0) { client.bulk(requests, RequestOptions.DEFAULT); requests.requests().clear(); count = 0; } } if (requests.numberOfActions() > 0) client.bulk(requests, RequestOptions.DEFAULT); } catch (IOException e) { log.error(e.getMessage()); } }
public void bulkDeleteDocument(String type, List<Integer> ids) { try { if (this.instance() == null) return; BulkRequest requests = new BulkRequest(); int count = 0; for (Integer id: ids) { count++; DeleteRequest request = new DeleteRequest(name, type, String.valueOf(id)); requests.add(request); if (count % 1000 == 0) { client.bulk(requests, RequestOptions.DEFAULT); requests.requests().clear(); count = 0; } } if (requests.numberOfActions() > 0) client.bulk(requests, RequestOptions.DEFAULT); } catch (IOException e) { log.error(e.getMessage()); } }
BulkRequest getBulkRequest() { if (itemResponses.isEmpty()) { return bulkRequest; } else { BulkRequest modifiedBulkRequest = new BulkRequest(); modifiedBulkRequest.setRefreshPolicy(bulkRequest.getRefreshPolicy()); modifiedBulkRequest.waitForActiveShards(bulkRequest.waitForActiveShards()); modifiedBulkRequest.timeout(bulkRequest.timeout()); int slot = 0; List<DocWriteRequest<?>> requests = bulkRequest.requests(); originalSlots = new int[requests.size()]; // oversize, but that's ok for (int i = 0; i < requests.size(); i++) { DocWriteRequest request = requests.get(i); if (failedSlots.get(i) == false) { modifiedBulkRequest.add(request); originalSlots[slot++] = i; } } return modifiedBulkRequest; } }
@Override public BulkDocumentWriterResults<D> write() { BulkDocumentWriterResults<D> results = new BulkDocumentWriterResults<>(); try { // create an index request for each document BulkRequest bulkRequest = new BulkRequest(); bulkRequest.setRefreshPolicy(refreshPolicy); for(Indexable doc: documents) { DocWriteRequest request = createRequest(doc.document, doc.index); bulkRequest.add(request); } // submit the request and handle the response BulkResponse bulkResponse = client.getHighLevelClient().bulk(bulkRequest); handleBulkResponse(bulkResponse, documents, results); } catch(IOException e) { // assume all documents have failed for(Indexable indexable: documents) { D failed = indexable.document; results.addFailure(failed, e, ExceptionUtils.getRootCauseMessage(e)); } LOG.error("Failed to submit bulk request; all documents failed", e); } finally { // flush all documents no matter which ones succeeded or failed documents.clear(); } LOG.debug("Wrote document(s) to Elasticsearch; batchSize={}, success={}, failed={}", documents.size(), results.getSuccesses().size(), results.getFailures().size()); return results; }
public static BulkRequest toSingleItemBulkRequest(ReplicatedWriteRequest request) { BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(((DocWriteRequest) request)); bulkRequest.setRefreshPolicy(request.getRefreshPolicy()); bulkRequest.timeout(request.timeout()); bulkRequest.waitForActiveShards(request.waitForActiveShards()); request.setRefreshPolicy(WriteRequest.RefreshPolicy.NONE); return bulkRequest; } }
public void bulk() throws IOException { BulkRequest request = new BulkRequest(); request.add(new IndexRequest("posts", "doc", "1") .source(XContentType.JSON,"field", "foo")); request.add(new IndexRequest("posts", "doc", "2") .source(XContentType.JSON,"field", "bar")); request.add(new IndexRequest("posts", "doc", "3") .source(XContentType.JSON,"field", "baz")); BulkResponse bulkResponse = client.bulk(request); } }