/** * Compile a search string * @param search the search string to compile * @param path the path where to perform the search (may be null if the * whole data store should be searched) * @return the compiled query */ public JsonObject compileQuery(String search, String path) { return compileQuery(search, path, null); }
@Override public JsonObject compileQuery(String search) { JsonObject r = compileQueryNoOptimize(search); return ElasticsearchQueryOptimizer.optimize(r); }
@Override public void exitKeyvalue(KeyvalueContext ctx) { KeyValueQueryPart kvqp = new KeyValueQueryPart( currentKeyvalue.getString("key"), currentKeyvalue.getString("value"), ComparisonOperator.valueOf(currentKeyvalue.getString("comp"))); JsonObject q = makeQuery(kvqp); if (!combine(q)) { result.push(q); } currentKeyvalue = null; }
DefaultQueryCompiler compiler = new DefaultQueryCompiler(); compiler.setQueryCompilers(queryCompilers); JsonObject compiledQuery = compiler.compileQuery(query); if (!expected.equals(compiledQuery)) { System.out.println(Json.encodePrettily(compiledQuery));
@Override public void start(Future<Void> startFuture) { // load and copy all indexer factories now and not lazily to avoid // concurrent modifications to the service loader's internal cache indexerFactories = ImmutableList.copyOf(FilteredServiceLoader.load(IndexerFactory.class)); queryCompiler = createQueryCompiler(); queryCompiler.setQueryCompilers(indexerFactories); new ElasticsearchClientFactory(vertx).createElasticsearchClient(INDEX_NAME) .doOnSuccess(es -> { client = es; }) .flatMapCompletable(v -> client.ensureIndex()) .andThen(Completable.defer(() -> ensureMapping())) .subscribe(() -> { registerMessageConsumers(); startFuture.complete(); }, startFuture::fail); }
queryCompiler.setQueryCompilers(indexerFactories);
/** * Update the meta data of existing chunks in the index. The chunks are * specified by a search query. * @param body the message containing the search and path * @param scriptName the name of the painscript file * @param params the parameters for the painscript * @return a Completable that will complete when the chunks have been updated * successfully */ private Completable updateMetadata(JsonObject body, String scriptName, JsonObject params) { String search = body.getString("search", ""); String path = body.getString("path", ""); JsonObject postFilter = queryCompiler.compileQuery(search, path); JsonObject updateScript = new JsonObject() .put("lang", "painless"); try { updateScript.put("params", params); URL url = getClass().getResource(scriptName); if (url == null) { throw new FileNotFoundException("Script " + scriptName + " does not exist"); } String script = Resources.toString(url, StandardCharsets.UTF_8); updateScript.put("inline", script); return updateDocuments(postFilter, updateScript); } catch (IOException e) { return Completable.error(e); } }
@Override public void enterString(StringContext ctx) { String str = ctx.getText(); if (currentKeyvalue != null) { if (currentKeyvalue.containsKey("key")) { currentKeyvalue.put("value", str); } else { currentKeyvalue.put("key", str); } } else { StringQueryPart sqp = new StringQueryPart(str); JsonObject q = makeQuery(sqp); if (!combine(q)) { result.push(q); } } }
JsonObject qb = compileQueryNoOptimize(search); List<JsonObject> filter = new ArrayList<>();
private Single<JsonObject> executeQuery(JsonObject body, String keyExists) { String search = body.getString("search"); String path = body.getString("path"); String scrollId = body.getString("scrollId"); JsonObject parameters = new JsonObject() .put("size", body.getInteger("pageSize", 100)); String timeout = "1m"; // one minute if (scrollId == null) { try { // Execute a new search. Use a post_filter because we only want to get // a yes/no answer and no scoring (i.e. we only want to get matching // documents and not those that likely match). For the difference between // query and post_filter see the Elasticsearch documentation. JsonObject postFilter = queryCompiler.compileQuery(search, path, keyExists); return client.beginScroll(TYPE_NAME, null, postFilter, parameters, timeout); } catch (Throwable t) { return Single.error(t); } } else { // continue searching return client.continueScroll(scrollId, timeout); } }
postFilter = queryCompiler.compileQuery(search, path); } catch (Throwable t) { return Single.error(t);