@Override public void execute() throws Exception { try { BulkResponse bulkResponse = bulkRequestBuilder.execute().actionGet(); if (bulkResponse.hasFailures()) { throw new EventDeliveryException(bulkResponse.buildFailureMessage()); } } finally { bulkRequestBuilder = client.prepareBulk(); } }
@Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { if (response.hasFailures()) { logger.error("Bulk processor failed. {}", response.buildFailureMessage()); MongoDBRiverHelper.setRiverStatus(client, definition.getRiverName(), Status.IMPORT_FAILED); request.requests().clear(); bulkProcessor.close(); river.close(); } else { documentCount.addAndGet(response.getItems().length); logStatistics(response.getTookInMillis()); deletedDocuments.set(0); updatedDocuments.set(0); insertedDocuments.set(0); if (logger.isTraceEnabled()) { logger.trace("afterBulk - bulk [{}] success [{} items] [{} ms] total [{}]", executionId, response.getItems().length, response.getTookInMillis(), documentCount.get()); } } } };
throw new Exception(bulkItemResponses.buildFailureMessage());
onFailure(new RuntimeException("Partial failures in the batch: " + bulkItemResponses.buildFailureMessage())); } else if (logicalErrors) {
public void putDocuments(IndexType indexType, Map<String, Object>... docs) { try { BulkRequestBuilder bulk = SHARED_NODE.client().prepareBulk() .setRefreshPolicy(REFRESH_IMMEDIATE); for (Map<String, Object> doc : docs) { bulk.add(new IndexRequest(indexType.getIndex(), indexType.getType()) .source(doc)); } BulkResponse bulkResponse = bulk.get(); if (bulkResponse.hasFailures()) { throw new IllegalStateException(bulkResponse.buildFailureMessage()); } } catch (Exception e) { throw Throwables.propagate(e); } }
public void putDocuments(IndexType indexType, BaseDoc... docs) { try { BulkRequestBuilder bulk = SHARED_NODE.client().prepareBulk() .setRefreshPolicy(REFRESH_IMMEDIATE); for (BaseDoc doc : docs) { bulk.add(new IndexRequest(indexType.getIndex(), indexType.getType(), doc.getId()) .parent(doc.getParent()) .routing(doc.getRouting()) .source(doc.getFields())); } BulkResponse bulkResponse = bulk.get(); if (bulkResponse.hasFailures()) { throw new IllegalStateException(bulkResponse.buildFailureMessage()); } } catch (Exception e) { throw Throwables.propagate(e); } }
private void saveDocuments() { if (this.documentCount < 1) return; BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { log.error("error while bulk import:" + bulkResponse.buildFailureMessage()); } this.bulkRequest = this.esClient.prepareBulk(); }
logError( response.buildFailureMessage() );
private void updateDocuments() { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { log.error("error while bulk update: " + bulkResponse.buildFailureMessage()); } this.bulkRequest = this.esClient.prepareBulk(); } }
private void flushIndex(boolean force) { if ((force && this.bulkRequestCounter > 0) || this.bulkRequestCounter >= this.bulkSize) { BulkResponse bulkResponse = this.bulkRequest.execute().actionGet(); if (bulkResponse.hasFailures()) { throw new DukeException(bulkResponse.buildFailureMessage()); } // reset bulk this.bulkRequestCounter = 0; this.bulkRequest = this.client.prepareBulk(); } }
@Override public String buildFailureMessage() { return this.bulkResponse.buildFailureMessage(); } }
@Override public void afterBulk(final long executionId, // final BulkRequest request, // final BulkResponse response) { if (response.hasFailures()) { log.error("failed index data; {}", response.buildFailureMessage()); } }
public static void assertNoFailures(BulkResponse response) { assertThat("Unexpected ShardFailures: " + response.buildFailureMessage(), response.hasFailures(), is(false)); assertVersionSerializable(response); }
@Override public void end() throws IOException { if (bulkRequest.numberOfActions() > 0) { BulkResponse response = bulkRequest.execute().actionGet(); if (response.hasFailures()) { throw new IOException(response.buildFailureMessage()); } } } }
@Override public void onResponse(final BulkResponse response) { if (response.hasFailures()) { logger.error("Failed to write a result on {}/{}: {}", index, type, response.buildFailureMessage()); } else { logger.info("Wrote {} results in {}/{}.", currentQueue.size(), index, type); } }
@Override public void executeESBulkRequest(BulkRequestBuilder esBulk) throws Exception { BulkResponse response = esBulk.execute().actionGet(); if (response.hasFailures()) { throw new ElasticsearchException("Failed to execute ES index bulk update: " + response.buildFailureMessage()); } }
@Override public void execute() throws Exception { try { BulkResponse bulkResponse = bulkRequestBuilder.execute().actionGet(); if (bulkResponse.hasFailures()) { throw new EventDeliveryException(bulkResponse.buildFailureMessage()); } } finally { bulkRequestBuilder = client.prepareBulk(); } }
@Override public void execute() throws Exception { try { BulkResponse bulkResponse = bulkRequestBuilder.execute().actionGet(); if (bulkResponse.hasFailures()) { throw new EventDeliveryException(bulkResponse.buildFailureMessage()); } } finally { bulkRequestBuilder = client.prepareBulk(); } }
@Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { console.println("Executed bulk of " + response.getItems().length + " items"); if (response.hasFailures()) { console.println(response.buildFailureMessage()); } }
@Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { if (logger.isTraceEnabled()) { logger.trace("[{}] executed [{}]/[{}], took [{}]", executionId, request.numberOfActions(), new ByteSizeValue(request.estimatedSizeInBytes()), response.getTook()); } if (response.hasFailures()) { logger.warn("[{}] failed to execute bulk request: {}", executionId, response.buildFailureMessage()); } }