@Override public UpdateRequest createUpdateRequest( String index, String docType, String key, XContentType contentType, byte[] document) { return new UpdateRequest(index, docType, key) .doc(document, contentType) .upsert(document, contentType); }
/** * Execute an {@link UpdateRequest} against the {@literal update} API to alter a document. * * @param consumer never {@literal null}. * @see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update.html">Update API on * elastic.co</a> * @return the {@link Mono} emitting the {@link UpdateResponse}. */ default Mono<UpdateResponse> update(Consumer<UpdateRequest> consumer) { UpdateRequest request = new UpdateRequest(); consumer.accept(request); return update(request); }
@Override public void indexWorkflow(Workflow workflow) { try { String id = workflow.getWorkflowId(); WorkflowSummary summary = new WorkflowSummary(workflow); byte[] doc = objectMapper.writeValueAsBytes(summary); UpdateRequest req = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, id); req.doc(doc, XContentType.JSON); req.upsert(doc, XContentType.JSON); req.retryOnConflict(5); updateWithRetry(req, "Index workflow into doc_type workflow"); } catch (Exception e) { logger.error("Failed to index workflow: {}", workflow.getWorkflowId(), e); } }
public static Request update(UpdateRequest updateRequest) { String endpoint = endpoint(updateRequest.index(), updateRequest.type(), updateRequest.id(), "_update"); Request request = new Request(HttpMethod.POST.name(), endpoint); parameters.withRouting(updateRequest.routing()); parameters.withTimeout(updateRequest.timeout()); parameters.withRefreshPolicy(updateRequest.getRefreshPolicy()); parameters.withWaitForActiveShards(updateRequest.waitForActiveShards()); parameters.withDocAsUpsert(updateRequest.docAsUpsert()); parameters.withFetchSourceContext(updateRequest.fetchSource()); parameters.withRetryOnConflict(updateRequest.retryOnConflict()); parameters.withVersion(updateRequest.version()); parameters.withVersionType(updateRequest.versionType()); if (updateRequest.doc() != null) { xContentType = updateRequest.doc().getContentType(); if (updateRequest.upsertRequest() != null) { XContentType upsertContentType = updateRequest.upsertRequest().getContentType(); if ((xContentType != null) && (xContentType != upsertContentType)) { throw new IllegalStateException("Update request cannot have different content types for doc [" + xContentType
private UpdateRequest prepareUpdate(UpdateQuery query) { String indexName = hasText(query.getIndexName()) ? query.getIndexName() : getPersistentEntityFor(query.getClazz()).getIndexName(); String type = hasText(query.getType()) ? query.getType() : getPersistentEntityFor(query.getClazz()).getIndexType(); Assert.notNull(indexName, "No index defined for Query"); Assert.notNull(type, "No type define for Query"); Assert.notNull(query.getId(), "No Id define for Query"); Assert.notNull(query.getUpdateRequest(), "No IndexRequest define for Query"); UpdateRequest updateRequest = new UpdateRequest(indexName, type, query.getId()); updateRequest.routing(query.getUpdateRequest().routing()); if (query.getUpdateRequest().script() == null) { // doc if (query.DoUpsert()) { updateRequest.docAsUpsert(true).doc(query.getUpdateRequest().doc()); } else { updateRequest.doc(query.getUpdateRequest().doc()); } } else { // or script updateRequest.script(query.getUpdateRequest().script()); } return updateRequest; }
public UpdateQuery build() { UpdateQuery updateQuery = new UpdateQuery(); updateQuery.setId(id); updateQuery.setIndexName(indexName); updateQuery.setType(type); updateQuery.setClazz(clazz); if (this.indexRequest != null) { if (this.updateRequest == null) { updateRequest = new UpdateRequest(); } updateRequest.doc(indexRequest); } updateQuery.setUpdateRequest(updateRequest); updateQuery.setDoUpsert(doUpsert); return updateQuery; } }
if (updateRequest.doc() != null) { bulkContentType = enforceSameContentType(updateRequest.doc(), bulkContentType); if (updateRequest.upsertRequest() != null) { bulkContentType = enforceSameContentType(updateRequest.upsertRequest(), bulkContentType); if (updateRequest.retryOnConflict() > 0) { metadata.field("retry_on_conflict", updateRequest.retryOnConflict()); if (updateRequest.fetchSource() != null) { metadata.field("_source", updateRequest.fetchSource());
@Override public void process(final InputStream in) throws IOException { String json = IOUtils.toString(in, charset) .replace("\r\n", " ").replace('\n', ' ').replace('\r', ' '); if (indexOp.equalsIgnoreCase("index")) { bulk.add(esClient.get().prepareIndex(index, docType, id) .setSource(json.getBytes(charset))); } else if (indexOp.equalsIgnoreCase("upsert")) { bulk.add(esClient.get().prepareUpdate(index, docType, id) .setDoc(json.getBytes(charset)) .setDocAsUpsert(true)); } else if (indexOp.equalsIgnoreCase("update")) { bulk.add(esClient.get().prepareUpdate(index, docType, id) .setDoc(json.getBytes(charset))); } else { throw new IOException("Index operation: " + indexOp + " not supported."); } } });
private void updateWithRetry(UpdateRequest request, String operationDescription) { try { new RetryUtil<UpdateResponse>().retryOnException( () -> elasticSearchClient.update(request).actionGet(), null, null, RETRY_COUNT, operationDescription, "updateWithRetry" ); } catch (Exception e) { Monitors.error(className, "index"); logger.error("Failed to index {} for request type: {}", request.index(), request.type(), e); } }
public void append4Update(BulkRequestBuilder bulkRequestBuilder, ESMapping mapping, Object pkVal, Map<String, Object> esFieldData) { if (mapping.get_id() != null) { bulkRequestBuilder .add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), pkVal.toString()) .setDoc(esFieldData)); } else { SearchResponse response = transportClient.prepareSearch(mapping.get_index()) .setTypes(mapping.get_type()) .setQuery(QueryBuilders.termQuery(mapping.getPk(), pkVal)) .setSize(MAX_BATCH_SIZE) .get(); for (SearchHit hit : response.getHits()) { bulkRequestBuilder .add(transportClient.prepareUpdate(mapping.get_index(), mapping.get_type(), hit.getId()) .setDoc(esFieldData)); } } }
} else { String script = getDeletionScript(informations, storename, mutation); brb.add(client.prepareUpdate(indexName, storename, docid).setScript(script, ScriptService.ScriptType.INLINE)); log.trace("Adding script {}", script); UpdateRequestBuilder update = client.prepareUpdate(indexName, storename, docid).setScript(script, ScriptService.ScriptType.INLINE); if (needUpsert) { XContentBuilder doc = getNewDocument(mutation.getAdditions(), informations.get(storename), ttl); update.setUpsert(doc);
@Override public UpdateResponse update(UpdateQuery query) { UpdateRequest request = prepareUpdate(query); try { return client.update(request); } catch (IOException e) { throw new ElasticsearchException("Error while update for request: " + request.toString(), e); } }
private static UpdateRequest createUpdateRequest(Tuple2<String, String> element, ParameterTool parameterTool) { Map<String, Object> json = new HashMap<>(); json.put("data", element.f1); return new UpdateRequest( parameterTool.getRequired("index"), parameterTool.getRequired("type"), element.f0) .doc(json) .upsert(json); } }
@Override public void addEventExecution(EventExecution eventExecution) { try { byte[] doc = objectMapper.writeValueAsBytes(eventExecution); String id = eventExecution.getName() + "." + eventExecution.getEvent() + "." + eventExecution .getMessageId() + "." + eventExecution.getId(); UpdateRequest req = new UpdateRequest(logIndexName, EVENT_DOC_TYPE, id); req.doc(doc, XContentType.JSON); req.upsert(doc, XContentType.JSON); req.retryOnConflict(5); updateWithRetry(req, "Update Event execution for doc_type event"); } catch (Exception e) { logger.error("Failed to index event execution: {}", eventExecution.getId(), e); } }
@Override public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { if (keys.length != values.length) { throw new ApplicationException(Code.INVALID_INPUT, "Number of keys and values do not match"); } UpdateRequest request = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId); Map<String, Object> source = IntStream.range(0, keys.length) .boxed() .collect(Collectors.toMap(i -> keys[i], i -> values[i])); request.doc(source); logger.debug("Updating workflow {} with {}", workflowInstanceId, source); new RetryUtil<>().retryOnException( () -> elasticSearchClient.update(request), null, null, RETRY_COUNT, "Updating index for doc_type workflow", "updateWorkflow" ); }
private static UpdateRequest createUpdateRequest(Tuple2<String, String> element, ParameterTool parameterTool) { Map<String, Object> json = new HashMap<>(); json.put("data", element.f1); return new UpdateRequest( parameterTool.getRequired("index"), parameterTool.getRequired("type"), element.f0) .doc(json) .upsert(json); } }
@Override public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { if (keys.length != values.length) { throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, "Number of keys and values do not match"); } UpdateRequest request = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId); Map<String, Object> source = IntStream.range(0, keys.length).boxed() .collect(Collectors.toMap(i -> keys[i], i -> values[i])); request.doc(source); logger.debug("Updating workflow {} with {}", workflowInstanceId, source); new RetryUtil<UpdateResponse>().retryOnException(() -> { try { return elasticSearchClient.update(request); } catch (IOException e) { throw new RuntimeException(e); } }, null, null, RETRY_COUNT, "Updating index for doc_type workflow", "updateWorkflow"); }
private static UpdateRequest createUpdateRequest(Tuple2<String, String> element, ParameterTool parameterTool) { Map<String, Object> json = new HashMap<>(); json.put("data", element.f1); return new UpdateRequest( parameterTool.getRequired("index"), parameterTool.getRequired("type"), element.f0) .doc(json) .upsert(json); } }
private static UpdateRequest createUpdateRequest(Tuple2<String, String> element, ParameterTool parameterTool) { Map<String, Object> json = new HashMap<>(); json.put("data", element.f1); return new UpdateRequest( parameterTool.getRequired("index"), parameterTool.getRequired("type"), element.f0) .doc(json) .upsert(json); } }
@Override public void indexTask(Task task) { try { String id = task.getTaskId(); TaskSummary summary = new TaskSummary(task); byte[] doc = objectMapper.writeValueAsBytes(summary); UpdateRequest req = new UpdateRequest(indexName, TASK_DOC_TYPE, id); req.doc(doc, XContentType.JSON); req.upsert(doc, XContentType.JSON); updateWithRetry(req, "Index workflow into doc_type workflow"); } catch (Exception e) { logger.error("Failed to index task: {}", task.getTaskId(), e); } }