@Override public void streamsPlan(StreamsPlanCreator planCreator) throws Exception { String sourceName = Joiner.on('.').join(getTable().getQualifiedName()); Map<String, ISqlStreamsDataSource> sources = planCreator.getSources(); if (!sources.containsKey(sourceName)) { throw new RuntimeException("Cannot find table " + sourceName); } List<String> fieldNames = getRowType().getFieldNames(); final Stream<Values> finalStream = planCreator.getStreamBuilder() .newStream(sources.get(sourceName).getProducer(), new StreamsScanTupleValueMapper(fieldNames), parallelismHint); planCreator.addStream(finalStream); } }
@Override public void streamsPlan(StreamsPlanCreator planCreator) throws Exception { // SingleRel RelNode input = getInput(); StormRelUtils.getStormRelInput(input).streamsPlan(planCreator); Stream<Values> inputStream = planCreator.pop(); Preconditions.checkArgument(isInsert(), "Only INSERT statement is supported."); // Calcite ensures that the value is structurized to the table definition // hence we can use PK index directly // To elaborate, if table BAR is defined as ID INTEGER PK, NAME VARCHAR, DEPTID INTEGER // and query like INSERT INTO BAR SELECT NAME, ID FROM FOO is executed, // Calcite makes the projection ($1 <- ID, $0 <- NAME, null) to the value before INSERT. String tableName = Joiner.on('.').join(getTable().getQualifiedName()); IRichBolt consumer = planCreator.getSources().get(tableName).getConsumer(); // To make logic simple, it assumes that all the tables have one PK (which it should be extended to support composed key), // and provides PairStream(KeyedStream) to consumer bolt. inputStream.mapToPair(new StreamInsertMapToPairFunction(primaryKeyIndex)).to(consumer); planCreator.addStream(inputStream); }
@Override public void streamsPlan(StreamsPlanCreator planCreator) throws Exception { // SingleRel RelNode input = getInput(); StormRelUtils.getStormRelInput(input).streamsPlan(planCreator); Stream<Values> inputStream = planCreator.pop(); List<RexNode> childExps = getChildExps(); RelDataType inputRowType = getInput(0).getRowType(); String filterClassName = StormRelUtils.getClassName(this); ExecutableExpression filterInstance = planCreator.createScalarInstance(childExps, inputRowType, filterClassName); EvaluationFilter evalFilter = new EvaluationFilter(filterInstance, planCreator.getDataContext()); final Stream<Values> finalStream = inputStream.filter(evalFilter); planCreator.addStream(finalStream); } }
@Override public void streamsPlan(StreamsPlanCreator planCreator) throws Exception { // SingleRel RelNode input = getInput(); StormRelUtils.getStormRelInput(input).streamsPlan(planCreator); Stream<Values> inputStream = planCreator.pop(); String projectionClassName = StormRelUtils.getClassName(this); List<String> outputFieldNames = getRowType().getFieldNames(); int outputCount = outputFieldNames.size(); List<RexNode> childExps = getChildExps(); RelDataType inputRowType = getInput(0).getRowType(); ExecutableExpression projectionInstance = planCreator.createScalarInstance(childExps, inputRowType, projectionClassName); EvaluationFunction evalFunc = new EvaluationFunction(projectionInstance, outputCount, planCreator.getDataContext()); final Stream<Values> finalStream = inputStream.map(evalFunc); planCreator.addStream(finalStream); } }
EvaluationCalc evalCalc = new EvaluationCalc(filterInstance, projectionInstance, outputCount, planCreator.getDataContext()); final Stream finalStream = inputStream.flatMap(evalCalc);