public DataFlowOpForgeInitializeResult initializeForge(DataFlowOpForgeInitializeContext context) throws ExprValidationException { if (!context.getOutputPorts().isEmpty()) { throw new IllegalArgumentException("EventBusSink operator does not provide an output stream"); } eventTypes = new EventType[context.getInputPorts().size()]; for (int i = 0; i < eventTypes.length; i++) { eventTypes[i] = context.getInputPorts().get(i).getTypeDesc().getEventType(); } return null; }
/** * Validate the provided expression. * * @param name parameter name * @param eventType event type * @param expr expression * @param expectedReturnType expected result type * @param context forge initialization context * @return validated expression node * @throws ExprValidationException when validation failed */ public static ExprNode validate(String name, ExprNode expr, EventType eventType, Class expectedReturnType, DataFlowOpForgeInitializeContext context) throws ExprValidationException { if (expr == null) { return null; } ExprNode validated = EPLValidationUtil.validateSimpleGetSubtree(ExprNodeOrigin.DATAFLOWFILTER, expr, eventType, false, context.getStatementRawInfo(), context.getServices()); validateReturnType(name, validated, expectedReturnType); return validated; }
public DataFlowOpForgeInitializeResult initializeForge(DataFlowOpForgeInitializeContext context) throws ExprValidationException { if (context.getInputPorts().isEmpty()) { throw new IllegalArgumentException("Select operator requires at least one input stream"); if (context.getOutputPorts().size() != 1) { throw new IllegalArgumentException("Select operator requires one output stream but produces " + context.getOutputPorts().size() + " streams"); DataFlowOpOutputPort portZero = context.getOutputPorts().get(0); if (portZero.getOptionalDeclaredType() != null && !portZero.getOptionalDeclaredType().isUnderlying()) { submitEventBean = true; int numStreams = context.getInputPorts().size(); eventTypes = new EventType[numStreams]; for (int i = 0; i < numStreams; i++) { eventTypes[i] = context.getInputPorts().get(i).getTypeDesc().getEventType(); for (int streamNum = 0; streamNum < select.getStreamSpecs().size(); streamNum++) { FilterStreamSpecRaw filter = streams.get(streamNum); Map.Entry<Integer, DataFlowOpInputPort> inputPort = findInputPort(filter.getRawFilterSpec().getEventTypeName(), context.getInputPorts()); if (inputPort == null) { throw new ExprValidationException("Failed to find stream '" + filter.getRawFilterSpec().getEventTypeName() + "' among input ports, input ports are " + Arrays.toString(getInputPortNames(context.getInputPorts()))); Annotation[] mergedAnnotations = AnnotationUtil.mergeAnnotations(context.getStatementRawInfo().getAnnotations(), context.getOperatorAnnotations()); mergedAnnotations = addObjectArrayRepresentation(mergedAnnotations); StreamSpecCompiled[] streamSpecArray = streamSpecCompileds.toArray(new StreamSpecCompiled[streamSpecCompileds.size()]); String dataflowClassPostfix = context.getCodegenEnv().getClassPostfix() + "__dfo" + context.getOperatorNumber(); StatementSpecCompiled containerStatement = context.getBase().getStatementSpec(); context.getBase().setStatementSpec(compiled);
assertEquals("setPropOne=abc", events.get(1)); DataFlowOpForgeInitializeContext forgeCtx = (DataFlowOpForgeInitializeContext) events.get(2); assertEquals(0, forgeCtx.getInputPorts().size()); assertEquals(1, forgeCtx.getOutputPorts().size()); assertEquals("outstream", forgeCtx.getOutputPorts().get(0).getStreamName()); assertEquals("SupportBean", forgeCtx.getOutputPorts().get(0).getOptionalDeclaredType().getEventType().getName()); assertEquals(2, forgeCtx.getOperatorAnnotations().length); assertEquals("Goodie", ((Name) forgeCtx.getOperatorAnnotations()[0]).value()); assertNotNull((Audit) forgeCtx.getOperatorAnnotations()[1]); assertEquals("MyDataFlow", forgeCtx.getDataflowName()); assertEquals(0, forgeCtx.getOperatorNumber());
public DataFlowOpForgeInitializeResult initializeForge(DataFlowOpForgeInitializeContext context) throws ExprValidationException { port = context.getOutputPorts().get(0); return null; }
private DataFlowOpForgeInitializeResult initializeTypeUndeclared(DataFlowOpForgeInitializeContext context) throws ExprValidationException { // No type has been declared, we can create one Map<String, Object> types = new LinkedHashMap<String, Object>(); Set<String> props = allProperties.keySet(); props.removeAll(PARAMETER_PROPERTIES); int count = 0; evaluatorForges = new ExprForge[props.size()]; for (String propertyName : props) { ExprNode exprNode = allProperties.get(propertyName); ExprNode validated = EPLValidationUtil.validateSimpleGetSubtree(ExprNodeOrigin.DATAFLOWBEACON, exprNode, null, false, context.getStatementRawInfo(), context.getServices()); types.put(propertyName, validated.getForge().getEvaluationType()); evaluatorForges[count] = validated.getForge(); count++; } String eventTypeName = context.getServices().getEventTypeNameGeneratorStatement().getDataflowOperatorTypeName(context.getOperatorNumber()); EventTypeMetadata metadata = new EventTypeMetadata(eventTypeName, context.getBase().getModuleName(), EventTypeTypeClass.DBDERIVED, EventTypeApplicationType.OBJECTARR, NameAccessModifier.TRANSIENT, EventTypeBusModifier.NONBUS, false, EventTypeIdPair.unassigned()); outputEventType = BaseNestableEventUtil.makeOATypeCompileTime(metadata, types, null, null, null, null, context.getServices().getBeanEventTypeFactoryPrivate(), context.getServices().getEventTypeCompileTimeResolver()); context.getServices().getEventTypeCompileTimeRegistry().newType(outputEventType); return new DataFlowOpForgeInitializeResult(new GraphTypeDesc[]{new GraphTypeDesc(false, true, outputEventType)}); }
public DataFlowOpForgeInitializeResult initializeForge(DataFlowOpForgeInitializeContext context) throws ExprValidationException { if (context.getOutputPorts().size() != 1) { throw new IllegalArgumentException("EventBusSource operator requires one output stream but produces " + context.getOutputPorts().size() + " streams"); } DataFlowOpOutputPort portZero = context.getOutputPorts().get(0); if (portZero.getOptionalDeclaredType() == null || portZero.getOptionalDeclaredType().getEventType() == null) { throw new IllegalArgumentException("EventBusSource operator requires an event type declated for the output stream"); } EventType eventType = portZero.getOptionalDeclaredType().getEventType(); if (!portZero.getOptionalDeclaredType().isUnderlying()) { submitEventBean = true; } DataFlowParameterValidation.validate("filter", filter, eventType, boolean.class, context); try { List<ExprNode> filters = Collections.emptyList(); if (filter != null) { filters = Collections.singletonList(filter); } StreamTypeServiceImpl streamTypeService = new StreamTypeServiceImpl(eventType, eventType.getName(), true); filterSpecCompiled = FilterSpecCompiler.makeFilterSpec(eventType, eventType.getName(), filters, null, null, null, streamTypeService, null, context.getStatementRawInfo(), context.getServices()); } catch (ExprValidationException ex) { throw new ExprValidationException("Failed to obtain filter parameters: " + ex.getMessage(), ex); } return null; }
public DataFlowOpForgeInitializeResult initializeForge(DataFlowOpForgeInitializeContext context) throws ExprValidationException { if (context.getInputPorts().size() != 1) { throw new EPException(this.getClass().getSimpleName() + " expected a single input port"); } eventType = context.getInputPorts().get(0).getTypeDesc().getEventType(); if (eventType == null) { throw new EPException("No event type defined for input port"); } file = DataFlowParameterValidation.validate("file", file, String.class, context); classpathFile = DataFlowParameterValidation.validate("classpathFile", classpathFile, boolean.class, context); append = DataFlowParameterValidation.validate("append", append, boolean.class, context); return null; }
WriteablePropertyDescriptor[] writables = setupProperties(props.toArray(new String[props.size()]), outputEventType); try { eventBeanManufacturer = EventTypeUtility.getManufacturer(outputEventType, writables, context.getServices().getClasspathImportServiceCompileTime(), false, context.getServices().getEventTypeAvroHandler()); } catch (EventBeanManufactureException e) { throw new ExprValidationException("Cannot manufacture event for the provided type '" + outputEventType.getName() + "': " + e.getMessage(), e); TypeWidenerCustomizer typeWidenerCustomizer = context.getServices().getEventTypeAvroHandler().getTypeWidenerCustomizer(outputEventType); for (WriteablePropertyDescriptor writable : writables) { ExprNode validated = EPLValidationUtil.validateSimpleGetSubtree(ExprNodeOrigin.DATAFLOWBEACON, exprNode, null, false, context.getBase().getStatementRawInfo(), context.getServices()); TypeWidenerSPI widener; try { widener = TypeWidenerFactory.getCheckPropertyAssignType(ExprNodeUtilityPrint.toExpressionStringMinPrecedenceSafe(validated), validated.getForge().getEvaluationType(), writable.getType(), writable.getPropertyName(), false, typeWidenerCustomizer, context.getBase().getStatementName()); } catch (TypeWidenerException e) { throw new ExprValidationException("Failed for property '" + writable.getPropertyName() + "'");
DataFlowOpForgeInitializeContext context = new DataFlowOpForgeInitializeContext(desc.getGraphName(), operatorNumber, operatorAnnotations, operatorSpec, inputPorts, outputPorts, codegenEnv, base, services);
public DataFlowOpForgeInitializeResult initializeForge(DataFlowOpForgeInitializeContext context) throws ExprValidationException { if (context.getOutputPorts().size() != 1) { throw new IllegalArgumentException("EPStatementSource operator requires one output stream but produces " + context.getOutputPorts().size() + " streams"); } if (statementName != null && statementFilter != null) { throw new ExprValidationException("Both 'statementName' or 'statementFilter' parameters were provided, only either one is expected"); } if ((statementDeploymentId == null && statementName != null) | (statementDeploymentId != null && statementName == null)) { throw new ExprValidationException("Both 'statementDeploymentId' and 'statementName' are required when either of these are specified"); } DataFlowOpOutputPort portZero = context.getOutputPorts().get(0); if (portZero != null && portZero.getOptionalDeclaredType() != null && portZero.getOptionalDeclaredType().isWildcard()) { submitEventBean = true; } return null; }
public DataFlowOpForgeInitializeResult initializeForge(DataFlowOpForgeInitializeContext context) throws ExprValidationException { if (context.getInputPorts().size() != 1) { throw new EPException(this.getClass().getSimpleName() + " expected a single input port"); } eventType = context.getInputPorts().get(0).getTypeDesc().getEventType(); if (eventType == null) { throw new EPException("No event type defined for input port"); } file = DataFlowParameterValidation.validate("file", file, String.class, context); classpathFile = DataFlowParameterValidation.validate("classpathFile", classpathFile, boolean.class, context); append = DataFlowParameterValidation.validate("append", append, boolean.class, context); return null; }
public DataFlowOpForgeInitializeResult initializeForge(DataFlowOpForgeInitializeContext context) throws ExprValidationException { if (context.getInputPorts().size() != 1) { throw new ExprValidationException("Filter requires single input port"); } if (filter == null) { throw new ExprValidationException("Required parameter 'filter' providing the filter expression is not provided"); } if (context.getOutputPorts().isEmpty() || context.getOutputPorts().size() > 2) { throw new IllegalArgumentException("Filter operator requires one or two output stream(s) but produces " + context.getOutputPorts().size() + " streams"); } eventType = context.getInputPorts().get(0).getTypeDesc().getEventType(); singleOutputPort = context.getOutputPorts().size() == 1; filter = DataFlowParameterValidation.validate("filter", filter, eventType, Boolean.class, context); GraphTypeDesc[] typesPerPort = new GraphTypeDesc[context.getOutputPorts().size()]; for (int i = 0; i < typesPerPort.length; i++) { typesPerPort[i] = new GraphTypeDesc(false, true, eventType); } return new DataFlowOpForgeInitializeResult(typesPerPort); }
public DataFlowOpForgeInitializeResult initializeForge(DataFlowOpForgeInitializeContext context) throws ExprValidationException { iterations = DataFlowParameterValidation.validate("iterations", iterations, Number.class, context); initialDelay = DataFlowParameterValidation.validate("initialDelay", initialDelay, Number.class, context); interval = DataFlowParameterValidation.validate("interval", interval, Number.class, context); if (context.getOutputPorts().size() != 1) { throw new IllegalArgumentException("BeaconSource operator requires one output stream but produces " + context.getOutputPorts().size() + " streams"); } DataFlowOpOutputPort port = context.getOutputPorts().get(0); // Check if a type is declared if (port.getOptionalDeclaredType() == null || port.getOptionalDeclaredType().getEventType() == null) { return initializeTypeUndeclared(context); } return initializeTypeDeclared(port, context); }
public DataFlowOpForgeInitializeResult initializeForge(DataFlowOpForgeInitializeContext context) throws ExprValidationException { if (!context.getOutputPorts().isEmpty()) { throw new IllegalArgumentException("LogSink operator does not provide an output stream"); } eventTypes = new EventType[context.getInputPorts().size()]; for (Map.Entry<Integer, DataFlowOpInputPort> entry : context.getInputPorts().entrySet()) { eventTypes[entry.getKey()] = entry.getValue().getTypeDesc().getEventType(); } title = DataFlowParameterValidation.validate("title", title, String.class, context); layout = DataFlowParameterValidation.validate("layout", layout, String.class, context); format = DataFlowParameterValidation.validate("format", format, String.class, context); log = DataFlowParameterValidation.validate("log", log, boolean.class, context); linefeed = DataFlowParameterValidation.validate("linefeed", linefeed, boolean.class, context); return null; }
public DataFlowOpForgeInitializeResult initializeForge(DataFlowOpForgeInitializeContext context) throws ExprValidationException { outputEventType = context.getOutputPorts().get(0).getOptionalDeclaredType() != null ? context.getOutputPorts().get(0).getOptionalDeclaredType().getEventType() : null; if (outputEventType == null) { throw new ExprValidationException("No event type provided for output, please provide an event type name"); } outputPortTypes = new EventType[context.getOutputPorts().size()]; for (Map.Entry<Integer, DataFlowOpOutputPort> entry : context.getOutputPorts().entrySet()) { outputPortTypes[entry.getKey()] = entry.getValue().getOptionalDeclaredType().getEventType(); } file = DataFlowParameterValidation.validate("file", file, String.class, context); classpathFile = DataFlowParameterValidation.validate("classpathFile", classpathFile, boolean.class, context); hasHeaderLine = DataFlowParameterValidation.validate("hasHeaderLine", hasHeaderLine, boolean.class, context); hasTitleLine = DataFlowParameterValidation.validate("hasTitleLine", hasTitleLine, boolean.class, context); numLoops = DataFlowParameterValidation.validate("numLoops", numLoops, Integer.class, context); format = DataFlowParameterValidation.validate("format", format, String.class, context); propertyNameLine = DataFlowParameterValidation.validate("propertyNameLine", propertyNameLine, String.class, context); propertyNameFile = DataFlowParameterValidation.validate("propertyNameFile", propertyNameFile, String.class, context); dateFormat = DataFlowParameterValidation.validate("dateFormat", dateFormat, String.class, context); return null; }
public DataFlowOpForgeInitializeResult initializeForge(DataFlowOpForgeInitializeContext context) throws ExprValidationException { outputEventType = context.getOutputPorts().get(0).getOptionalDeclaredType() != null ? context.getOutputPorts().get(0).getOptionalDeclaredType().getEventType() : null; if (outputEventType == null) { throw new ExprValidationException("No event type provided for output, please provide an event type name"); } outputPortTypes = new EventType[context.getOutputPorts().size()]; for (Map.Entry<Integer, DataFlowOpOutputPort> entry : context.getOutputPorts().entrySet()) { outputPortTypes[entry.getKey()] = entry.getValue().getOptionalDeclaredType().getEventType(); } file = DataFlowParameterValidation.validate("file", file, String.class, context); classpathFile = DataFlowParameterValidation.validate("classpathFile", classpathFile, boolean.class, context); hasHeaderLine = DataFlowParameterValidation.validate("hasHeaderLine", hasHeaderLine, boolean.class, context); hasTitleLine = DataFlowParameterValidation.validate("hasTitleLine", hasTitleLine, boolean.class, context); numLoops = DataFlowParameterValidation.validate("numLoops", numLoops, Integer.class, context); format = DataFlowParameterValidation.validate("format", format, String.class, context); propertyNameLine = DataFlowParameterValidation.validate("propertyNameLine", propertyNameLine, String.class, context); propertyNameFile = DataFlowParameterValidation.validate("propertyNameFile", propertyNameFile, String.class, context); dateFormat = DataFlowParameterValidation.validate("dateFormat", dateFormat, String.class, context); return null; }