/** * StormDataContext Constructor. */ public StormDataContext() { // Store the time at which the query started executing. The SQL // standard says that functions such as CURRENT_TIMESTAMP return the // same value throughout the query. final Holder<Long> timeHolder = Holder.of(System.currentTimeMillis()); // Give a hook chance to alter the clock. Hook.CURRENT_TIME.run(timeHolder); final long time = timeHolder.get(); final TimeZone timeZone = Calendar.getInstance().getTimeZone(); final long localOffset = timeZone.getOffset(time); final long currentOffset = localOffset; ImmutableMap.Builder<Object, Object> builder = ImmutableMap.builder(); builder.put(Variable.UTC_TIMESTAMP.camelName, time) .put(Variable.CURRENT_TIMESTAMP.camelName, time + currentOffset) .put(Variable.LOCAL_TIMESTAMP.camelName, time + localOffset) .put(Variable.TIME_ZONE.camelName, timeZone); map = builder.build(); }
@SuppressWarnings("unchecked") private <T> Sequence<T> runQuery(Query<T> query) { Hook.QUERY_PLAN.run(query); final String queryId = UUID.randomUUID().toString(); plannerContext.addNativeQueryId(queryId); query = query.withId(queryId) .withSqlQueryId(plannerContext.getSqlQueryId()); final AuthenticationResult authenticationResult = plannerContext.getAuthenticationResult(); return queryLifecycleFactory.factorize().runSimple(query, authenticationResult, null); }
public StreamlineDataContext() { // Store the time at which the query started executing. The SQL // standard says that functions such as CURRENT_TIMESTAMP return the // same value throughout the query. final Holder<Long> timeHolder = Holder.of(System.currentTimeMillis()); // Give a hook chance to alter the clock. Hook.CURRENT_TIME.run(timeHolder); final long time = timeHolder.get(); final TimeZone timeZone = Calendar.getInstance().getTimeZone(); final long localOffset = timeZone.getOffset(time); final long currentOffset = localOffset; ImmutableMap.Builder<Object, Object> builder = ImmutableMap.builder(); builder.put(Variable.UTC_TIMESTAMP.camelName, time) .put(Variable.CURRENT_TIMESTAMP.camelName, time + currentOffset) .put(Variable.LOCAL_TIMESTAMP.camelName, time + localOffset) .put(Variable.TIME_ZONE.camelName, timeZone); map = builder.build(); }
public StormDataContext() { // Store the time at which the query started executing. The SQL // standard says that functions such as CURRENT_TIMESTAMP return the // same value throughout the query. final Holder<Long> timeHolder = Holder.of(System.currentTimeMillis()); // Give a hook chance to alter the clock. Hook.CURRENT_TIME.run(timeHolder); final long time = timeHolder.get(); final TimeZone timeZone = Calendar.getInstance().getTimeZone(); final long localOffset = timeZone.getOffset(time); final long currentOffset = localOffset; ImmutableMap.Builder<Object, Object> builder = ImmutableMap.builder(); builder.put(Variable.UTC_TIMESTAMP.camelName, time) .put(Variable.CURRENT_TIMESTAMP.camelName, time + currentOffset) .put(Variable.LOCAL_TIMESTAMP.camelName, time + localOffset) .put(Variable.TIME_ZONE.camelName, timeZone); map = builder.build(); }
/** Returns the value of a property hook. * (Property hooks take a {@link Holder} as an argument.) */ public <V> V get(V defaultValue) { final Holder<V> holder = Holder.of(defaultValue); run(holder); return holder.get(); }
/** Returns the value of a property hook. * (Property hooks take a {@link Holder} as an argument.) */ public <V> V get(V defaultValue) { final Holder<V> holder = Holder.of(defaultValue); run(holder); return holder.get(); }
DruidQueryNode(Compiler interpreter, DruidQuery query) { this.query = query; this.sink = interpreter.sink(query); this.querySpec = query.getQuerySpec(); Hook.QUERY_PLAN.run(querySpec); }
DruidQueryNode(Compiler interpreter, DruidQuery query) { this.query = query; this.sink = interpreter.sink(query); this.querySpec = query.getQuerySpec(); Hook.QUERY_PLAN.run(querySpec); }
private ElasticsearchJson.Result httpRequest(ObjectNode query) throws IOException { Objects.requireNonNull(query, "query"); String uri = String.format(Locale.ROOT, "/%s/%s/_search", indexName, typeName); Hook.QUERY_PLAN.run(query); final String json = mapper.writeValueAsString(query); LOGGER.debug("Elasticsearch Query: {}", json); HttpEntity entity = new StringEntity(json, ContentType.APPLICATION_JSON); Response response = restClient.performRequest("POST", uri, Collections.emptyMap(), entity); if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) { final String error = EntityUtils.toString(response.getEntity()); final String message = String.format(Locale.ROOT, "Error while querying Elastic (on %s/%s) status: %s\nQuery:\n%s\nError:\n%s\n", response.getHost(), response.getRequestLine(), response.getStatusLine(), query, error); throw new RuntimeException(message); } try (InputStream is = response.getEntity().getContent()) { return mapper.readValue(is, ElasticsearchJson.Result.class); } }
public void reduce(RexBuilder rexBuilder, List<RexNode> constExps, List<RexNode> reducedValues) { Object[] values; try { values = compiledFunction.apply(dataContext); assert values.length == constExps.size(); final List<Object> valueList = Arrays.asList(values); for (Pair<RexNode, Object> value : Pair.zip(constExps, valueList)) { reducedValues.add( rexBuilder.makeLiteral(value.right, value.left.getType(), true)); } } catch (RuntimeException e) { // One or more of the expressions failed. // Don't reduce any of the expressions. reducedValues.addAll(constExps); values = new Object[constExps.size()]; } Hook.EXPRESSION_REDUCER.run(Pair.of(code, values)); }
public void reduce(RexBuilder rexBuilder, List<RexNode> constExps, List<RexNode> reducedValues) { Object[] values; try { values = compiledFunction.apply(dataContext); assert values.length == constExps.size(); final List<Object> valueList = Arrays.asList(values); for (Pair<RexNode, Object> value : Pair.zip(constExps, valueList)) { reducedValues.add( rexBuilder.makeLiteral(value.right, value.left.getType(), true)); } } catch (RuntimeException e) { // One or more of the expressions failed. // Don't reduce any of the expressions. reducedValues.addAll(constExps); values = new Object[constExps.size()]; } Hook.EXPRESSION_REDUCER.run(Pair.of(code, values)); }
@SuppressWarnings("unchecked") private <T> Sequence<T> runQuery(final Query<T> query) { Hook.QUERY_PLAN.run(query); final AuthenticationResult authenticationResult = plannerContext.getAuthenticationResult(); return queryLifecycleFactory.factorize().runSimple(query, authenticationResult, null); }
protected Program getProgram() { // Allow a test to override the default program. final Holder<Program> holder = Holder.of(null); Hook.PROGRAM.run(holder); if (holder.get() != null) { return holder.get(); } return Programs.standard(); }
protected Program getProgram() { // Allow a test to override the default program. final Holder<Program> holder = Holder.of(null); Hook.PROGRAM.run(holder); if (holder.get() != null) { return holder.get(); } return Programs.standard(); }
/** Wraps the SQL string in a * {@link org.apache.calcite.jdbc.CalcitePrepare.Query} object, giving the * {@link Hook#STRING_TO_QUERY} hook chance to override. */ private CalcitePrepare.Query<Object> toQuery( Context context, String sql) { final Holder<CalcitePrepare.Query<Object>> queryHolder = Holder.of(CalcitePrepare.Query.of(sql)); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig(SqlParser.Config.DEFAULT) .defaultSchema(context.getRootSchema().plus()) .build(); Hook.STRING_TO_QUERY.run(Pair.of(config, queryHolder)); return queryHolder.get(); }
/** Wraps the SQL string in a * {@link org.apache.calcite.jdbc.CalcitePrepare.Query} object, giving the * {@link Hook#STRING_TO_QUERY} hook chance to override. */ private CalcitePrepare.Query<Object> toQuery( Context context, String sql) { final Holder<CalcitePrepare.Query<Object>> queryHolder = Holder.of(CalcitePrepare.Query.of(sql)); final FrameworkConfig config = Frameworks.newConfigBuilder() .parserConfig(SqlParser.Config.DEFAULT) .defaultSchema(context.getRootSchema().plus()) .build(); Hook.STRING_TO_QUERY.run(Pair.of(config, queryHolder)); return queryHolder.get(); }
public static Bindable toBindable(Map<String, Object> parameters, CalcitePrepare.SparkHandler spark, EnumerableRel rel, EnumerableRel.Prefer prefer) { EnumerableRelImplementor relImplementor = new EnumerableRelImplementor(rel.getCluster().getRexBuilder(), parameters); final ClassDeclaration expr = relImplementor.implementRoot(rel, prefer); String s = Expressions.toString(expr.memberDeclarations, "\n", false); if (CalcitePrepareImpl.DEBUG) { Util.debugCode(System.out, s); } Hook.JAVA_PLAN.run(s); try { if (spark != null && spark.enabled()) { return spark.compile(expr, s); } else { return getBindable(expr, s, rel.getRowType().getFieldCount()); } } catch (Exception e) { throw Helper.INSTANCE.wrap("Error while compiling generated Java code:\n" + s, e); } }
public static Bindable toBindable(Map<String, Object> parameters, CalcitePrepare.SparkHandler spark, EnumerableRel rel, EnumerableRel.Prefer prefer) { EnumerableRelImplementor relImplementor = new EnumerableRelImplementor(rel.getCluster().getRexBuilder(), parameters); final ClassDeclaration expr = relImplementor.implementRoot(rel, prefer); String s = Expressions.toString(expr.memberDeclarations, "\n", false); if (CalcitePrepareImpl.DEBUG) { Util.debugCode(System.out, s); } Hook.JAVA_PLAN.run(s); try { if (spark != null && spark.enabled()) { return spark.compile(expr, s); } else { return getBindable(expr, s, rel.getRowType().getFieldCount()); } } catch (Exception e) { throw Helper.INSTANCE.wrap("Error while compiling generated Java code:\n" + s, e); } }