@SeeAlso({ ListAzureBlobStorage.class, PutAzureBlobStorage.class, DeleteAzureBlobStorage.class }) @InputRequirement(Requirement.INPUT_REQUIRED) @WritesAttributes({ @WritesAttribute(attribute = "azure.length", description = "The length of the blob fetched") })
private List<WritesAttribute> getWritesAttributes(Processor processor) { List<WritesAttribute> attributes = new ArrayList<>(); WritesAttributes writesAttributes = processor.getClass().getAnnotation(WritesAttributes.class); if (writesAttributes != null) { Collections.addAll(attributes, writesAttributes.value()); } WritesAttribute writeAttribute = processor.getClass().getAnnotation(WritesAttribute.class); if (writeAttribute != null) { attributes.add(writeAttribute); } return attributes; }
"or from the Query parameter.") @Tags({ "elastic", "elasticsearch", "delete", "query"}) @WritesAttributes({ @WritesAttribute(attribute = "elasticsearch.delete.took", description = "The amount of time that it took to complete the delete operation in ms."), @WritesAttribute(attribute = "elasticsearch.delete.error", description = "The error message provided by ElasticSearch if there is an error running the delete.")
/** * Collects the attributes that a processor is writing to. * * @param processor the processor to describe * @return the list of attributes the processor is writing */ private List<WritesAttribute> getWritesAttributes(Processor processor) { List<WritesAttribute> attributes = new ArrayList<>(); WritesAttributes writesAttributes = processor.getClass().getAnnotation(WritesAttributes.class); if (writesAttributes != null) { attributes.addAll(Arrays.asList(writesAttributes.value())); } WritesAttribute writeAttribute = processor.getClass().getAnnotation(WritesAttribute.class); if (writeAttribute != null) { attributes.add(writeAttribute); } return attributes; }
+ "generated copies are sent to the 'success' relationship. In addition, each FlowFile gets an attribute 'copy.index' set to the copy number, where the original FlowFile gets " + "a value of zero, and all copies receive incremented integer values.") @WritesAttributes({ @WritesAttribute(attribute = "copy.index", description = "A zero-based incrementing integer value based on which copy the FlowFile is.") })
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) @WritesAttributes({ @WritesAttribute(attribute = "error.line", description = "The line number of the error."), @WritesAttribute(attribute = "error.msg", description = "The message explaining the error.")
+ "the library's website." + "NOTE: The library being used loads the images into memory so extremely large images may cause problems.") @WritesAttributes({@WritesAttribute(attribute = "<directory name>.<tag name>", description = "The extracted image metadata " + "will be inserted with the attribute name \"<directory name>.<tag name>\". ")}) @SupportsBatching
+ "not contain any records, an empty parquet file is the output. NOTE: Many Avro datatypes (collections, primitives, and unions of primitives, e.g.) can " + "be converted to parquet, but unions of collections and other complex datatypes may not be able to be converted to Parquet.") @WritesAttributes({ @WritesAttribute(attribute = "filename", description = "Sets the filename to the existing filename with the extension replaced by / added to by .parquet"), @WritesAttribute(attribute = "record.count", description = "Sets the number of records in the parquet file.")
@SideEffectFree @Tags({"convert", "record", "generic", "schema", "json", "csv", "avro", "log", "logs", "freeform", "text"}) @WritesAttributes({ @WritesAttribute(attribute = "mime.type", description = "Sets the mime.type attribute to the MIME Type specified by the Record Writer"), @WritesAttribute(attribute = "record.count", description = "The number of records in the FlowFile")
@CapabilityDescription("A record-based version of GetMongo that uses the Record writers to write the MongoDB result set.") @Tags({"mongo", "mongodb", "get", "fetch", "record", "json"}) @InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) @WritesAttributes({ @WritesAttribute(attribute = GetMongo.DB_NAME, description = "The database where the results came from."), @WritesAttribute(attribute = GetMongo.COL_NAME, description = "The collection where the results came from.")
@InputRequirement(Requirement.INPUT_FORBIDDEN) @CapabilityDescription("Retrieves information from SNMP Agent and outputs a FlowFile with information in attributes and without any content") @WritesAttributes({ @WritesAttribute(attribute=SNMPUtils.SNMP_PROP_PREFIX + "*", description="Attributes retrieved from the SNMP response. It may include:" + " snmp$errorIndex, snmp$errorStatus, snmp$errorStatusText, snmp$nonRepeaters, snmp$requestID, snmp$type, snmp$variableBindings"),
+ "the external Tika tools which in turn depend on other projects for metadata extraction. For the more " + "details and the list of supported file types, visit the library's website at http://tika.apache.org/.") @WritesAttributes({@WritesAttribute(attribute = "<Metadata Key Prefix><attribute>", description = "The extracted content metadata " + "will be inserted with the attribute name \"<Metadata Key Prefix><attribute>\", or \"<attribute>\" if " + "\"Metadata Key Prefix\" is not provided.")})
@Tags({ "mongodb", "read", "get" }) @InputRequirement(Requirement.INPUT_ALLOWED) @CapabilityDescription("Creates FlowFiles from documents in MongoDB loaded by a user-specified query.") @WritesAttributes({ @WritesAttribute(attribute = GetMongo.DB_NAME, description = "The database where the results came from."), @WritesAttribute(attribute = GetMongo.COL_NAME, description = "The collection where the results came from.")
"set as large as the largest messages expected to be received, meaning if every 100kb there is a line separator, then " + "the Receive Buffer Size must be greater than 100kb.") @WritesAttributes({ @WritesAttribute(attribute="tcp.sender", description="The sending host of the messages."), @WritesAttribute(attribute="tcp.port", description="The sending port the messages were received.")
@SupportsBatching @InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) @Tags({"parquet", "hadoop", "HDFS", "get", "ingest", "fetch", "source", "record"}) @CapabilityDescription("Reads from a given Parquet file and writes records to the content of the flow file using " + "the selected record writer. The original Parquet file will remain unchanged, and the content of the flow file " + "will be replaced with records of the selected type. This processor can be used with ListHDFS or ListFile to obtain " + "a listing of files to fetch.") @WritesAttributes({ @WritesAttribute(attribute="fetch.failure.reason", description="When a FlowFile is routed to 'failure', this attribute is added " + "indicating why the file could not be fetched from the given filesystem."), @WritesAttribute(attribute = "record.count", description = "The number of records in the resulting flow file") }) @SeeAlso({PutParquet.class}) @Restricted(restrictions = { @Restriction( requiredPermission = RequiredPermission.READ_FILESYSTEM, explanation = "Provides operator the ability to retrieve any file that NiFi has access to in HDFS or the local filesystem.") }) public class FetchParquet extends AbstractFetchHDFSRecord { @Override public HDFSRecordReader createHDFSRecordReader(final ProcessContext context, final FlowFile flowFile, final Configuration conf, final Path path) throws IOException { final ParquetReader.Builder<GenericRecord> readerBuilder = AvroParquetReader.<GenericRecord>builder(path).withConf(conf); return new AvroParquetHDFSRecordReader(readerBuilder.build()); } }
"specific remote host and port by specifying the Sending Host and Sending Host Port properties, otherwise it will listen " + "for datagrams from all hosts and ports.") @WritesAttributes({ @WritesAttribute(attribute="udp.sender", description="The sending host of the messages."), @WritesAttribute(attribute="udp.port", description="The sending port the messages were received.")
@Tags({"rethinkdb", "get", "read", "fetch"}) @CapabilityDescription("Processor to get a JSON document from RethinkDB (https://www.rethinkdb.com/) using the document id. The FlowFile will contain the retrieved document") @WritesAttributes({ @WritesAttribute(attribute = GetRethinkDB.RETHINKDB_ERROR_MESSAGE, description = "RethinkDB error message"), })
" \"http://jsoup.org/apidocs/org/jsoup/select/Selector.html\"") @SeeAlso({GetHTMLElement.class, PutHTMLElement.class}) @WritesAttributes({@WritesAttribute(attribute="NumElementsModified", description="Total number of HTML " + "element modifications made")}) public class ModifyHTMLElement extends AbstractHTMLProcessor {
@SupportsBatching @WritesAttributes({ @WritesAttribute(attribute = "s3.tag.___", description = "The tags associated with the S3 object will be " + "written as part of the FlowFile attributes")})
"Note - The Ignite Kernel periodically outputs node performance statistics to the logs. This message " + " can be turned off by setting the log level for logger 'org.apache.ignite' to WARN in the logback.xml configuration file.") @WritesAttributes({ @WritesAttribute(attribute = GetIgniteCache.IGNITE_GET_FAILED_REASON_ATTRIBUTE_KEY, description = "The reason for getting entry from cache"), })