@SystemResourceConsideration(resource = SystemResource.MEMORY) @ReadsAttributes({ @ReadsAttribute(attribute = "amqp$appId", description = "The App ID field to set on the AMQP Message"), @ReadsAttribute(attribute = "amqp$contentEncoding", description = "The Content Encoding to set on the AMQP Message"), @ReadsAttribute(attribute = "amqp$contentType", description = "The Content Type to set on the AMQP Message"), @ReadsAttribute(attribute = "amqp$headers", description = "The headers to set on the AMQP Message"), @ReadsAttribute(attribute = "amqp$deliveryMode", description = "The numeric indicator for the Message's Delivery Mode"), @ReadsAttribute(attribute = "amqp$priority", description = "The Message priority"), @ReadsAttribute(attribute = "amqp$correlationId", description = "The Message's Correlation ID"), @ReadsAttribute(attribute = "amqp$replyTo", description = "The value of the Message's Reply-To field"), @ReadsAttribute(attribute = "amqp$expiration", description = "The Message Expiration"), @ReadsAttribute(attribute = "amqp$messageId", description = "The unique ID of the Message"), @ReadsAttribute(attribute = "amqp$timestamp", description = "The timestamp of the Message, as the number of milliseconds since epoch"), @ReadsAttribute(attribute = "amqp$type", description = "The type of message"), @ReadsAttribute(attribute = "amqp$userId", description = "The ID of the user"), @ReadsAttribute(attribute = "amqp$clusterId", description = "The ID of the AMQP Cluster"), }) public class PublishAMQP extends AbstractAMQPProcessor<AMQPPublisher> {
private void writeReadsAttribute(final ReadsAttribute attribute) throws IOException { writeStartElement("attribute"); writeTextElement("name", attribute.attribute()); writeTextElement("description", attribute.description()); writeEndElement(); }
+ "FlowFile attributes will be added as JMS headers and/or properties to the outgoing JMS message.") @ReadsAttributes({ @ReadsAttribute(attribute = JmsHeaders.DELIVERY_MODE, description = "This attribute becomes the JMSDeliveryMode message header. Must be an integer."), @ReadsAttribute(attribute = JmsHeaders.EXPIRATION, description = "This attribute becomes the JMSExpiration message header. Must be an integer."), @ReadsAttribute(attribute = JmsHeaders.PRIORITY, description = "This attribute becomes the JMSPriority message header. Must be an integer."), @ReadsAttribute(attribute = JmsHeaders.REDELIVERED, description = "This attribute becomes the JMSRedelivered message header."), @ReadsAttribute(attribute = JmsHeaders.TIMESTAMP, description = "This attribute becomes the JMSTimestamp message header. Must be a long."), @ReadsAttribute(attribute = JmsHeaders.CORRELATION_ID, description = "This attribute becomes the JMSCorrelationID message header."), @ReadsAttribute(attribute = JmsHeaders.TYPE, description = "This attribute becomes the JMSType message header. Must be an integer."), @ReadsAttribute(attribute = JmsHeaders.REPLY_TO, description = "This attribute becomes the JMSReplyTo message header. Must be an integer."), @ReadsAttribute(attribute = JmsHeaders.DESTINATION, description = "This attribute becomes the JMSDestination message header. Must be an integer."), @ReadsAttribute(attribute = "other attributes", description = "All other attributes that do not start with " + JmsHeaders.PREFIX + " are added as message properties."), @ReadsAttribute(attribute = "other attributes .type", description = "When an attribute will be added as a message property, a second attribute of the same name but with an extra" + " `.type` at the end will cause the message property to be sent using that strong type. For example, attribute `delay` with value `12000` and another attribute" + " `delay.type` with value `integer` will cause a JMS message property `delay` to be sent as an Integer rather than a String. Supported types are boolean, byte,"
xmlStreamWriter.writeStartElement("tr"); writeSimpleElement(xmlStreamWriter, "td", defaultIfBlank(attribute.attribute(), "Not Specified")); defaultIfBlank(attribute.description(), "Not Specified")); xmlStreamWriter.writeEndElement();
expressionLanguageScope = ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) @ReadsAttributes({ @ReadsAttribute(attribute = HTTPUtils.HTTP_CONTEXT_ID, description = "The value of this attribute is used to lookup the HTTP Response so that the " + "proper message can be sent back to the requestor. If this attribute is missing, the FlowFile will be routed to 'failure.'"), @ReadsAttribute(attribute = HTTPUtils.HTTP_REQUEST_URI, description = "Value of the URI requested by the client. Used for provenance event."), @ReadsAttribute(attribute = HTTPUtils.HTTP_REMOTE_HOST, description = "IP address of the client. Used for provenance event."), @ReadsAttribute(attribute = HTTPUtils.HTTP_LOCAL_NAME, description = "IP address/hostname of the server. Used for provenance event."), @ReadsAttribute(attribute = HTTPUtils.HTTP_PORT, description = "Listening port of the server. Used for provenance event."), @ReadsAttribute(attribute = HTTPUtils.HTTP_SSL_CERT, description = "SSL distinguished name (if any). Used for provenance event.")}) @SeeAlso(value = {HandleHttpRequest.class}, classNames = {"org.apache.nifi.http.StandardHttpContextMap", "org.apache.nifi.ssl.StandardSSLContextService"}) public class HandleHttpResponse extends AbstractProcessor {
+ "a number indicating the JDBC Type. The content of the FlowFile is expected to be in UTF-8 format.") @ReadsAttributes({ @ReadsAttribute(attribute = "fragment.identifier", description = "If the <Support Fragment Transactions> property is true, this attribute is used to determine whether or " + "not two FlowFiles belong to the same transaction."), @ReadsAttribute(attribute = "fragment.count", description = "If the <Support Fragment Transactions> property is true, this attribute is used to determine how many FlowFiles " + "are needed to complete the transaction."), @ReadsAttribute(attribute = "fragment.index", description = "If the <Support Fragment Transactions> property is true, this attribute is used to determine the order that the FlowFiles " + "in a transaction should be evaluated."), @ReadsAttribute(attribute = "sql.args.N.type", description = "Incoming FlowFiles are expected to be parametrized SQL statements. The type of each Parameter is specified as an integer " + "that represents the JDBC Type of the parameter."), @ReadsAttribute(attribute = "sql.args.N.value", description = "Incoming FlowFiles are expected to be parametrized SQL statements. The value of the Parameters are specified as " + "sql.args.1.value, sql.args.2.value, sql.args.3.value, and so on. The type of the sql.args.1.value Parameter is specified by the sql.args.1.type attribute."), @ReadsAttribute(attribute = "sql.args.N.format", description = "This attribute is always optional, but default options may not always work for your data. " + "Incoming FlowFiles are expected to be parametrized SQL statements. In some cases " + "a format option needs to be specified, currently this is only applicable for binary data types, dates, times and timestamps. Binary Data Types (defaults to 'ascii') - "
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) @Tags({"counter","debug", "instrumentation"}) @CapabilityDescription("This processor allows users to set specific counters and key points in their flow. It is useful for debugging and basic counting functions.") @ReadsAttribute(attribute = "counterName", description = "The name of the counter to update/get.") public class UpdateCounter extends AbstractProcessor {
@CapabilityDescription("Parses the contents of a Windows Event Log file (evtx) and writes the resulting XML to the FlowFile") @ReadsAttributes({ @ReadsAttribute(attribute = "filename", description = "The filename of the evtx file") }) @WritesAttributes({
+ "is expected to be a lowercase string indicating the Cassandra type.") @ReadsAttributes({ @ReadsAttribute(attribute = "cql.args.N.type", description = "Incoming FlowFiles are expected to be parameterized CQL statements. The type of each " + "parameter is specified as a lowercase string corresponding to the Cassandra data type (text, " + "collection should be comma-delimited, follow the collection type, and be enclosed in angle brackets " + "(< and >), for example set<text> or map<timestamp, int>."), @ReadsAttribute(attribute = "cql.args.N.value", description = "Incoming FlowFiles are expected to be parameterized CQL statements. The value of the " + "parameters are specified as cql.args.1.value, cql.args.2.value, cql.args.3.value, and so on. The "
"and the user can select whether it will delete one or many documents that match it." @ReadsAttribute( attribute = "mongodb.delete.mode", description = "Configurable parameter for controlling delete mode on a per-flowfile basis. The process must be " +
+ "created from FlowFiles in different connections. This processor updates the mime.type attribute as appropriate.") @ReadsAttributes({ @ReadsAttribute(attribute = "fragment.identifier", description = "Applicable only if the <Merge Strategy> property is set to Defragment. " + "All FlowFiles with the same value for this attribute will be bundled together."), @ReadsAttribute(attribute = "fragment.index", description = "Applicable only if the <Merge Strategy> property is set to Defragment. " + "This attribute indicates the order in which the fragments should be assembled. This " + "attribute must be present on all FlowFiles when using the Defragment Merge Strategy and must be a unique (i.e., unique across all " + "\"fragment.identifier\" attribute and the same value for the \"fragment.index\" attribute, the first FlowFile processed will be " + "accepted and subsequent FlowFiles will not be accepted into the Bin."), @ReadsAttribute(attribute = "fragment.count", description = "Applicable only if the <Merge Strategy> property is set to Defragment. This " + "attribute must be present on all FlowFiles with the same value for the fragment.identifier attribute. All FlowFiles in the same " + "bundle must have the same value for this attribute. The value of this attribute indicates how many FlowFiles should be expected " + "in the given bundle."), @ReadsAttribute(attribute = "segment.original.filename", description = "Applicable only if the <Merge Strategy> property is set to Defragment. " + "This attribute must be present on all FlowFiles with the same value for the fragment.identifier attribute. All FlowFiles in the same " + "bundle must have the same value for this attribute. The value of this attribute will be used for the filename of the completed merged " + "FlowFile."), @ReadsAttribute(attribute = "tar.permissions", description = "Applicable only if the <Merge Format> property is set to TAR. The value of this " + "attribute must be 3 characters; each character must be in the range 0 to 7 (inclusive) and indicates the file permissions that should " + "be used for the FlowFile's TAR entry. If this attribute is missing or has an invalid value, the default value of 644 will be used") })
+ "a number indicating the JDBC Type. The content of the FlowFile is expected to be in UTF-8 format.") @ReadsAttributes({ @ReadsAttribute(attribute = "hiveql.args.N.type", description = "Incoming FlowFiles are expected to be parametrized HiveQL statements. The type of each Parameter is specified as an integer " + "that represents the JDBC Type of the parameter."), @ReadsAttribute(attribute = "hiveql.args.N.value", description = "Incoming FlowFiles are expected to be parametrized HiveQL statements. The value of the Parameters are specified as " + "hiveql.args.1.value, hiveql.args.2.value, hiveql.args.3.value, and so on. The type of the hiveql.args.1.value Parameter is specified by the hiveql.args.1.type attribute.") })
+ "a number indicating the JDBC Type. The content of the FlowFile is expected to be in UTF-8 format.") @ReadsAttributes({ @ReadsAttribute(attribute = "hiveql.args.N.type", description = "Incoming FlowFiles are expected to be parametrized HiveQL statements. The type of each Parameter is specified as an integer " + "that represents the JDBC Type of the parameter."), @ReadsAttribute(attribute = "hiveql.args.N.value", description = "Incoming FlowFiles are expected to be parametrized HiveQL statements. The value of the Parameters are specified as " + "hiveql.args.1.value, hiveql.args.2.value, hiveql.args.3.value, and so on. The type of the hiveql.args.1.value Parameter is specified by the hiveql.args.1.type attribute.") })
+ "FlowFile attribute 'executesql.row.count' indicates how many rows were selected.") @ReadsAttributes({ @ReadsAttribute(attribute = "sql.args.N.type", description = "Incoming FlowFiles are expected to be parametrized SQL statements. The type of each Parameter is specified as an integer " + "that represents the JDBC Type of the parameter."), @ReadsAttribute(attribute = "sql.args.N.value", description = "Incoming FlowFiles are expected to be parametrized SQL statements. The value of the Parameters are specified as " + "sql.args.1.value, sql.args.2.value, sql.args.3.value, and so on. The type of the sql.args.1.value Parameter is specified by the sql.args.1.type attribute."), @ReadsAttribute(attribute = "sql.args.N.format", description = "This attribute is always optional, but default options may not always work for your data. " + "Incoming FlowFiles are expected to be parametrized SQL statements. In some cases " + "a format option needs to be specified, currently this is only applicable for binary data types, dates, times and timestamps. Binary Data Types (defaults to 'ascii') - "
+ "FlowFile attribute 'executesql.row.count' indicates how many rows were selected.") @ReadsAttributes({ @ReadsAttribute(attribute = "sql.args.N.type", description = "Incoming FlowFiles are expected to be parametrized SQL statements. The type of each Parameter is specified as an integer " + "that represents the JDBC Type of the parameter."), @ReadsAttribute(attribute = "sql.args.N.value", description = "Incoming FlowFiles are expected to be parametrized SQL statements. The value of the Parameters are specified as " + "sql.args.1.value, sql.args.2.value, sql.args.3.value, and so on. The type of the sql.args.1.value Parameter is specified by the sql.args.1.type attribute."), @ReadsAttribute(attribute = "sql.args.N.format", description = "This attribute is always optional, but default options may not always work for your data. " + "Incoming FlowFiles are expected to be parametrized SQL statements. In some cases " + "a format option needs to be specified, currently this is only applicable for binary data types, dates, times and timestamps. Binary Data Types (defaults to 'ascii') - "
+ "(if no existing records with the new primary key values are found), or could inadvertently corrupt the existing data (by changing records for which the new values of the primary keys " + "exist).") @ReadsAttribute(attribute = PutDatabaseRecord.STATEMENT_TYPE_ATTRIBUTE, description = "If 'Use statement.type Attribute' is selected for the Statement Type property, the value of this attribute " + "will be used to determine the type of statement (INSERT, UPDATE, DELETE, SQL, etc.) to generate and execute.") @WritesAttribute(attribute = PutDatabaseRecord.PUT_DATABASE_RECORD_ERROR, description = "If an error occurs during processing, the flow file will be routed to failure or retry, and this attribute "
" value of the mime.type attribute on the incoming FlowFile to determine the type of content present.") @ReadsAttributes({ @ReadsAttribute(attribute = "mime.type", description = "If configured by property \"Input Content Type\" will" + " use this value to determine what sort of content should be inferred from the incoming FlowFile content."), })
@Tags({"hadoop", "hbase", "put", "record"}) @CapabilityDescription("Adds rows to HBase based on the contents of a flowfile using a configured record reader.") @ReadsAttribute(attribute = "restart.index", description = "Reads restart.index when it needs to replay part of a record set that did not get into HBase.") @WritesAttribute(attribute = "restart.index", description = "Writes restart.index when a batch fails to be insert into HBase") public class PutHBaseRecord extends AbstractPutHBase {
" If any error occurs while reading records from the input, or writing records to the output, " + "the entire dot file will be removed and the flow file will be routed to failure or retry, depending on the error.") @ReadsAttribute(attribute = "filename", description = "The name of the file to write comes from the value of this attribute.") @WritesAttributes({ @WritesAttribute(attribute = "filename", description = "The name of the file is stored in this attribute."),
+ "is set, the same value for the specified attribute. See Processor Usage and Additional Details for more information.") @ReadsAttributes({ @ReadsAttribute(attribute = "fragment.identifier", description = "Applicable only if the <Merge Strategy> property is set to Defragment. " + "All FlowFiles with the same value for this attribute will be bundled together."), @ReadsAttribute(attribute = "fragment.count", description = "Applicable only if the <Merge Strategy> property is set to Defragment. This " + "attribute must be present on all FlowFiles with the same value for the fragment.identifier attribute. All FlowFiles in the same " + "bundle must have the same value for this attribute. The value of this attribute indicates how many FlowFiles should be expected "