@Override public Iterable<RawQuery.Result<String>> query(RawQuery query, KeyInformation.IndexRetriever informations, BaseTransaction tx) throws BackendException { SearchRequestBuilder srb = client.prepareSearch(indexName); srb.setTypes(query.getStore()); srb.setQuery(QueryBuilders.queryStringQuery(query.getQuery())); srb.setFrom(query.getOffset()); if (query.hasLimit()) srb.setSize(query.getLimit()); else srb.setSize(maxResultsSize); srb.setNoFields(); //srb.setExplain(true); SearchResponse response = srb.execute().actionGet(); log.debug("Executed query [{}] in {} ms", query.getQuery(), response.getTookInMillis()); SearchHits hits = response.getHits(); if (!query.hasLimit() && hits.totalHits() >= maxResultsSize) log.warn("Query result set truncated to first [{}] elements for query: {}", maxResultsSize, query); List<RawQuery.Result<String>> result = new ArrayList<RawQuery.Result<String>>(hits.hits().length); for (SearchHit hit : hits) { result.add(new RawQuery.Result<String>(hit.id(),hit.getScore())); } return result; }
List<String> result = new ArrayList<String>(hits.hits().length); for (SearchHit hit : hits) { result.add(hit.id());
@Override public String read(SearchHit hit) { return hit.id(); } }).put(TimestampFieldMapper.NAME, new HitReader<Long>() {
@Override public String id() { return sh.id(); }
@Override public String id() { return this.searchHit.id(); }
SearchResponse searchResponse = client.prepareSearch().setQuery(matchAllQuery()).get(); for (SearchHit hit : searchResponse.getHits()) { String yourId = hit.id(); }
private Iterator<? extends Vertex> createVertex(Iterator<SearchHit> hits) { ArrayList<BaseVertex> vertices = new ArrayList<>(); hits.forEachRemaining(hit -> { BaseVertex vertex = new DocVertex(hit.id(), hit.getType(), null, graph, null, elasticMutations, indexName); vertex.setSiblings(vertices); hit.getSource().entrySet().forEach((field) -> vertex.addPropertyLocal(field.getKey(), field.getValue())); vertices.add(vertex); }); return vertices.iterator(); } }
@Override public boolean nextKeyValue() throws IOException { if (hitsItr!=null) { if (recordsRead < recsToRead) { if (hitsItr.hasNext()) { SearchHit hit = hitsItr.next(); currentKey = new Text(hit.id()); currentValue = new Text(hit.sourceAsString()); recordsRead += 1; return true; } } else { hitsItr = null; } } else { if (recordsRead < recsToRead) { hitsItr = fetchNextHits(); if (hitsItr.hasNext()) { SearchHit hit = hitsItr.next(); currentKey = new Text(hit.id()); currentValue = new Text(hit.sourceAsString()); recordsRead += 1; return true; } } } return false; }
private Iterator<? extends Vertex> createVertex(Iterator<SearchHit> hits) { ArrayList<BaseVertex> vertices = new ArrayList<>(); hits.forEachRemaining(hit -> { StarVertex vertex = new StarVertex(hit.id(), hit.getType(), null, graph, null, elasticMutations, hit.getIndex(), edgeMappings); vertex.setFields(hit.getSource()); vertex.setSiblings(vertices); vertices.add(vertex); }); return vertices.iterator(); }
//Scroll until no hits are returned while (true) { scrollResp = client.prepareSearchScroll(scrollResp.getScrollId()).setScroll(new TimeValue(600000)).execute().actionGet(); //Break condition: No hits are returned if (scrollResp.getHits().getHits().length == 0) { logger.info("Closing the bulk processor"); bulkProcessor.close(); break; } // Get results from a scan search and add it to bulk ingest for (SearchHit hit: scrollResp.getHits()) { IndexRequest request = new IndexRequest("new_index", hit.type(), hit.id()); Map source = ((Map) ((Map) hit.getSource())); request.source(source); bulkProcessor.add(request); } }
/** * get active percolate tags. * @param index index * @return result */ public Set<String> getActivePercolateTags(String index) { Set<String> tags = new HashSet<>(); SearchRequestBuilder searchBuilder = manager.client().prepareSearch("*").setIndices(index).setTypes(".percolator").setSize(1000); SearchResponse response = searchBuilder.setQuery(QueryBuilders.matchAllQuery()).execute().actionGet(); SearchHits hits = response.getHits(); for (SearchHit hit : hits.getHits()) { tags.add(hit.id()); } return tags; }
private void deleteIndexType(final String fromIndex, final String fromType) { final SearchRequestBuilder builder = client.prepareSearch(fromIndex).setTypes(fromType).setScroll("1m"); SearchResponse searchResponse = builder.get(); SearchHit[] hits = searchResponse.getHits().getHits(); for (SearchHit hit : hits) client.prepareDelete(hit.index(), hit.type(), hit.id()).get(); while (hits.length != 0) { searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()).setScroll("1m").get(); hits = searchResponse.getHits().getHits(); for (SearchHit hit : hits) client.prepareDelete(hit.index(), hit.type(), hit.id()).get(); } }
@Override public GetResponse getDelegatedFromInstigator(final SearchResponse searchResponse) { if (searchResponse.getHits().getTotalHits() <= 0) { return new GetResponse(new GetResult(request.index(), request.type(), request.id(), request.version(), false, null, null)); } else if (searchResponse.getHits().getTotalHits() > 1) { throw new RuntimeException("cannot happen"); } else { final SearchHit sh = searchResponse.getHits().getHits()[0]; return new GetResponse(new GetResult(sh.index(), sh.type(), sh.id(), sh.version(), true, sh.getSourceRef(), null)); } } });
@Override public Iterable<RawQuery.Result<String>> query(RawQuery query, KeyInformation.IndexRetriever informations, TransactionHandle tx) throws StorageException { SearchRequestBuilder srb = client.prepareSearch(indexName); srb.setTypes(query.getStore()); srb.setQuery(QueryBuilders.queryString(query.getQuery())); srb.setFrom(0); if (query.hasLimit()) srb.setSize(query.getLimit()); else srb.setSize(maxResultsSize); srb.setNoFields(); //srb.setExplain(true); SearchResponse response = srb.execute().actionGet(); log.debug("Executed query [{}] in {} ms", query.getQuery(), response.getTookInMillis()); SearchHits hits = response.getHits(); if (!query.hasLimit() && hits.totalHits() >= maxResultsSize) log.warn("Query result set truncated to first [{}] elements for query: {}", maxResultsSize, query); List<RawQuery.Result<String>> result = new ArrayList<RawQuery.Result<String>>(hits.hits().length); for (SearchHit hit : hits) { result.add(new RawQuery.Result<String>(hit.id(),hit.getScore())); } return result; }
void dumpSave(Client client, Builder builder) throws IOException { TimeValue scrollDuration = TimeValue.timeValueSeconds(30); SearchResponse searchResponse = client.prepareSearch(builder.indices()) .setTypes(builder.types()).setSearchType(SearchType.SCAN) .setQuery(builder.query()).setSize(100).setScroll(scrollDuration).execute().actionGet(); OutputStreamWriter writer = new OutputStreamWriter(new FileOutputStream(builder.path(), true), builder.charset()); int i = 0; try { while (true) { searchResponse = client.prepareSearchScroll(searchResponse.getScrollId()) .setScroll(scrollDuration).execute().actionGet(); if (searchResponse.getHits().hits().length == 0) { break; } for (SearchHit hit : searchResponse.getHits()) { Document document = Document.fromSource(hit.sourceAsString(), hit.index(), hit.type(), hit.id()); writer.write(document.getDump()); i++; writer.write('\n'); } } } finally { writer.close(); console.println("Saved " + i + " documents to " + builder.path()); } }
@Override public Iterable<RawQuery.Result<String>> query(RawQuery query, KeyInformation.IndexRetriever informations, BaseTransaction tx) throws BackendException { SearchRequestBuilder srb = client.prepareSearch(indexName); srb.setTypes(query.getStore()); srb.setQuery(QueryBuilders.queryString(query.getQuery())); srb.setFrom(query.getOffset()); if (query.hasLimit()) srb.setSize(query.getLimit()); else srb.setSize(maxResultsSize); srb.setNoFields(); //srb.setExplain(true); SearchResponse response = srb.execute().actionGet(); log.debug("Executed query [{}] in {} ms", query.getQuery(), response.getTookInMillis()); SearchHits hits = response.getHits(); if (!query.hasLimit() && hits.totalHits() >= maxResultsSize) log.warn("Query result set truncated to first [{}] elements for query: {}", maxResultsSize, query); List<RawQuery.Result<String>> result = new ArrayList<RawQuery.Result<String>>(hits.hits().length); for (SearchHit hit : hits) { result.add(new RawQuery.Result<String>(hit.id(),hit.getScore())); } return result; }
@Override public GetResponse getDelegatedFromInstigator(final MultiSearchResponse searchResponse) { if (searchResponse.getResponses() == null || searchResponse.getResponses().length <= 0) { final Item item = request.getItems().get(0); return new GetResponse(new GetResult(item.index(), item.type(), item.id(), item.version(), false, null, null)); } else if (searchResponse.getResponses().length > 1) { throw new RuntimeException("cannot happen"); } else { final org.elasticsearch.action.search.MultiSearchResponse.Item item = searchResponse.getResponses()[0]; final SearchHit sh = item.getResponse().getHits().getHits()[0]; return new GetResponse(new GetResult(sh.index(), sh.type(), sh.id(), sh.version(), true, sh.getSourceRef(), null)); } } });
@Override protected IndexRequest buildIndexRequest(SearchHit doc) { IndexRequest index = new IndexRequest(mainRequest); index.index(doc.index()); index.type(doc.type()); index.id(doc.id()); index.source(doc.sourceRef()); index.versionType(VersionType.INTERNAL); index.version(doc.version()); return index; }
@Override protected IndexRequest buildIndexRequest(SearchHit doc) { IndexRequest index = new IndexRequest(mainRequest); index.index(doc.index()); index.type(doc.type()); index.id(doc.id()); index.source(doc.sourceRef()); index.versionType(VersionType.INTERNAL); index.version(doc.version()); return index; }
@Override public void hitExecute(SearchContext context, HitContext hitContext) { if (context.explain() == false) { return; } try { final int topLevelDocId = hitContext.hit().docId(); Explanation explanation = context.searcher().explain(context.query(), topLevelDocId); for (RescoreSearchContext rescore : context.rescore()) { explanation = rescore.rescorer().explain(topLevelDocId, context, rescore, explanation); } // we use the top level doc id, since we work with the top level searcher hitContext.hit().explanation(explanation); } catch (IOException e) { throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().type() + "#" + hitContext.hit().id() + "]", e); } finally { context.clearReleasables(SearchContext.Lifetime.COLLECTION); } } }