@EventDriven @SupportsBatching @InputRequirement(Requirement.INPUT_REQUIRED) @SideEffectFree @Tags({"convert", "record", "generic", "schema", "json", "csv", "avro", "log", "logs", "freeform", "text"}) @WritesAttributes({ @WritesAttribute(attribute = "mime.type", description = "Sets the mime.type attribute to the MIME Type specified by the Record Writer"), @WritesAttribute(attribute = "record.count", description = "The number of records in the FlowFile") }) @CapabilityDescription("Converts records from one data format to another using configured Record Reader and Record Write Controller Services. "
/** * Gets the description of the state this component persists. * * @param componentClass the component class * @return state description */ private String getStateDescription(final Class<?> componentClass) { final Stateful capabilityDesc = componentClass.getAnnotation(Stateful.class); if (capabilityDesc != null) { return capabilityDesc.description(); } else { return null; } }
@SupportsBatching @InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) @Tags({"parquet", "hadoop", "HDFS", "get", "ingest", "fetch", "source", "record"}) @CapabilityDescription("Reads from a given Parquet file and writes records to the content of the flow file using " + "the selected record writer. The original Parquet file will remain unchanged, and the content of the flow file " + "will be replaced with records of the selected type. This processor can be used with ListHDFS or ListFile to obtain " + "a listing of files to fetch.") @WritesAttributes({ @WritesAttribute(attribute="fetch.failure.reason", description="When a FlowFile is routed to 'failure', this attribute is added " + "indicating why the file could not be fetched from the given filesystem."), @WritesAttribute(attribute = "record.count", description = "The number of records in the resulting flow file") }) @SeeAlso({PutParquet.class}) @Restricted(restrictions = { @Restriction( requiredPermission = RequiredPermission.READ_FILESYSTEM, explanation = "Provides operator the ability to retrieve any file that NiFi has access to in HDFS or the local filesystem.") }) public class FetchParquet extends AbstractFetchHDFSRecord { @Override public HDFSRecordReader createHDFSRecordReader(final ProcessContext context, final FlowFile flowFile, final Configuration conf, final Path path) throws IOException { final ParquetReader.Builder<GenericRecord> readerBuilder = AvroParquetReader.<GenericRecord>builder(path).withConf(conf); return new AvroParquetHDFSRecordReader(readerBuilder.build()); } }
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) @Tags({"put", "parquet", "hadoop", "HDFS", "filesystem", "record"}) @CapabilityDescription("Reads records from an incoming FlowFile using the provided Record Reader, and writes those records " + " If any error occurs while reading records from the input, or writing records to the output, " + "the entire dot file will be removed and the flow file will be routed to failure or retry, depending on the error.") @ReadsAttribute(attribute = "filename", description = "The name of the file to write comes from the value of this attribute.") @WritesAttributes({ @WritesAttribute(attribute = "filename", description = "The name of the file is stored in this attribute."), @WritesAttribute(attribute = "absolute.hdfs.path", description = "The absolute path to the file is stored in this attribute."), @WritesAttribute(attribute = "record.count", description = "The number of records written to the Parquet file") }) @Restricted(restrictions = { @Restriction( requiredPermission = RequiredPermission.WRITE_FILESYSTEM, explanation = "Provides operator the ability to write any file that NiFi has access to in HDFS or the local filesystem.")
@EventDriven @SupportsBatching @InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) @Tags({"hadoop", "hbase"}) @CapabilityDescription("Adds the Contents of a FlowFile to HBase as the value of a single cell") @SystemResourceConsideration(resource = SystemResource.MEMORY) public class PutHBaseCell extends AbstractPutHBase {
@Stateful(scopes = Scope.CLUSTER, description = "Mock for QueryDatabaseTableRecord processor") private static class MockQueryDatabaseTableRecord extends QueryDatabaseTableRecord { void putColumnType(String colName, Integer colType) { columnTypeMap.put(colName, colType); } } }
private void writeDynamicProperty(final DynamicProperty property) throws IOException { writeStartElement("dynamicProperty"); writeTextElement("name", property.name()); writeTextElement("value", property.value()); writeTextElement("description", property.description()); writeBooleanElement("expressionLanguageSupported", property.supportsExpressionLanguage()); writeTextElement("expressionLanguageScope", property.expressionLanguageScope() == null ? null : property.expressionLanguageScope().name()); writeEndElement(); }
public static Set<Authorizable> getRestrictedComponentsAuthorizable(final Class<?> configurableComponentClass) { final Set<Authorizable> authorizables = new HashSet<>(); final Restricted restricted = configurableComponentClass.getAnnotation(Restricted.class); if (restricted != null) { final Restriction[] restrictions = restricted.restrictions(); if (restrictions != null && restrictions.length > 0) { Arrays.stream(restrictions).forEach(restriction -> authorizables.add(getRestrictedComponentsAuthorizable(restriction.requiredPermission()))); } else { authorizables.add(getRestrictedComponentsAuthorizable()); } } return authorizables; }
@Override protected void writeRestrictedInfo(final Restricted restricted) throws IOException { writeStartElement("restricted"); if (restricted != null) { writeTextElement("generalRestrictionExplanation", restricted.value()); final Restriction[] restrictions = restricted.restrictions(); if (restrictions != null) { writeArray("restrictions", Arrays.asList(restrictions), this::writeRestriction); } } writeEndElement(); }
private void writeRestriction(final Restriction restriction) throws IOException { writeStartElement("restriction"); final RequiredPermission permission = restriction.requiredPermission(); final String label = permission == null ? null : permission.getPermissionLabel(); writeTextElement("requiredPermission", label); writeTextElement("explanation", restriction.explanation()); writeEndElement(); }
private void writeSystemResourceConsideration(final SystemResourceConsideration consideration) throws IOException { writeStartElement("consideration"); writeTextElement("resource", consideration.resource() == null ? null : consideration.resource().name()); writeTextElement("description", consideration.description()); writeEndElement(); }
@Override protected void writeStatefulInfo(final Stateful stateful) throws IOException { writeStartElement("stateful"); if (stateful != null) { writeTextElement("description", stateful.description()); writeArray("scopes", Arrays.asList(stateful.scopes()), scope -> writeTextElement("scope", scope.name())); } writeEndElement(); }
private InputRequirement.Requirement getInputRequirement(final ConfigurableComponent component) { final InputRequirement annotation = component.getClass().getAnnotation(InputRequirement.class); return annotation == null ? null : annotation.value(); }
@Override protected void writeDynamicRelationship(final DynamicRelationship dynamicRelationship) throws IOException { writeStartElement("dynamicRelationship"); if (dynamicRelationship != null) { writeTextElement("name", dynamicRelationship.name()); writeTextElement("description", dynamicRelationship.description()); } writeEndElement(); }
private void writeWritesAttribute(final WritesAttribute attribute) throws IOException { writeStartElement("attribute"); writeTextElement("name", attribute.attribute()); writeTextElement("description", attribute.description()); writeEndElement(); }
private void writeReadsAttribute(final ReadsAttribute attribute) throws IOException { writeStartElement("attribute"); writeTextElement("name", attribute.attribute()); writeTextElement("description", attribute.description()); writeEndElement(); }
@Override protected void writeInputRequirementInfo(final InputRequirement.Requirement requirement) throws IOException { writeTextElement("inputRequirement", requirement == null ? null : requirement.name()); }
@EventDriven @SupportsBatching @Tags({"test", "load", "duplicate"}) @InputRequirement(Requirement.INPUT_REQUIRED) @CapabilityDescription("Intended for load testing, this processor will create the configured number of copies of each incoming FlowFile. The original FlowFile as well as all " + "generated copies are sent to the 'success' relationship. In addition, each FlowFile gets an attribute 'copy.index' set to the copy number, where the original FlowFile gets " + "a value of zero, and all copies receive incremented integer values.") @WritesAttributes({ @WritesAttribute(attribute = "copy.index", description = "A zero-based incrementing integer value based on which copy the FlowFile is.") }) public class DuplicateFlowFile extends AbstractProcessor {
@Stateful(scopes = Scope.CLUSTER, description = "Mock for QueryDatabaseTable processor") private static class MockQueryDatabaseTable extends QueryDatabaseTable { void putColumnType(String colName, Integer colType) { columnTypeMap.put(colName, colType); } } }