@Override protected String getBaseUrl(FlowFile inputFlowFile, ProcessContext context) { return context.getProperty(URL).evaluateAttributeExpressions(inputFlowFile).getValue(); } }
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; } session.adjustCounter(context.getProperty(COUNTER_NAME).evaluateAttributeExpressions(flowFile).getValue(), Long.parseLong(context.getProperty(DELTA).evaluateAttributeExpressions(flowFile).getValue()), false ); session.transfer(flowFile, SUCCESS); } }
@OnScheduled public void onScheduled(final ProcessContext context) { hostname = context.getProperty(DB_HOST).getValue(); port = context.getProperty(DB_PORT).asInteger(); username = context.getProperty(USERNAME).getValue(); password = context.getProperty(PASSWORD).getValue(); databaseName = context.getProperty(DB_NAME).getValue(); tableName = context.getProperty(TABLE_NAME).getValue(); try { rethinkDbConnection = makeConnection(); } catch(Exception e) { getLogger().error("Error while getting connection " + e.getLocalizedMessage(),e); throw new RuntimeException("Error while getting connection" + e.getLocalizedMessage(),e); } getLogger().info("RethinkDB connection created for host {} port {} and db {}", new Object[] {hostname, port,databaseName}); }
@OnScheduled public void setup(ProcessContext context) { // If the query is not set, then an incoming flow file is needed. Otherwise fail the initialization if (!context.getProperty(SQL_SELECT_QUERY).isSet() && !context.hasIncomingConnection()) { final String errorString = "Either the Select Query must be specified or there must be an incoming connection " + "providing flowfile(s) containing a SQL select query"; getLogger().error(errorString); throw new ProcessException(errorString); } dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class); }
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { updateMapping(context); final List<FlowFile> flowFiles = session.get(5); if (flowFiles.isEmpty()) { return; } final ComponentLog logger = getLogger(); final int maxBufferSize = context.getProperty(MAX_BUFFER_SIZE).asDataSize(DataUnit.B).intValue(); for (FlowFile flowFile : flowFiles) { if (flowFile.getSize() > maxBufferSize) { session.transfer(flowFile, REL_FAILURE); continue; } final StopWatch stopWatch = new StopWatch(true); flowFile = session.write(flowFile, new ReplaceTextCallback(context, flowFile, maxBufferSize)); logger.info("Transferred {} to 'success'", new Object[]{flowFile}); session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); session.transfer(flowFile, REL_SUCCESS); } }
/** * Helper method to create InfluxDB instance * @return InfluxDB instance */ protected synchronized InfluxDB getInfluxDB(ProcessContext context) { if ( influxDB.get() == null ) { String username = context.getProperty(USERNAME).evaluateAttributeExpressions().getValue(); String password = context.getProperty(PASSWORD).evaluateAttributeExpressions().getValue(); long connectionTimeout = context.getProperty(INFLUX_DB_CONNECTION_TIMEOUT).asTimePeriod(TimeUnit.SECONDS); String influxDbUrl = context.getProperty(INFLUX_DB_URL).evaluateAttributeExpressions().getValue(); try { influxDB.set(makeConnection(username, password, influxDbUrl, connectionTimeout)); } catch(Exception e) { getLogger().error("Error while getting connection {}", new Object[] { e.getLocalizedMessage() },e); throw new RuntimeException("Error while getting connection " + e.getLocalizedMessage(),e); } getLogger().info("InfluxDB connection created for host {}", new Object[] {influxDbUrl}); } return influxDB.get(); }
ProcessorConfiguration(final ProcessContext context) { conflictResolution = context.getProperty(CONFLICT_RESOLUTION).getValue(); operation = context.getProperty(OPERATION).getValue(); final String outputDirValue = context.getProperty(OUTPUT_DIRECTORY).evaluateAttributeExpressions().getValue(); outputRootDirPath = new Path(outputDirValue); final String fileFilterRegex = context.getProperty(FILE_FILTER_REGEX).getValue(); fileFilterPattern = (fileFilterRegex == null) ? null : Pattern.compile(fileFilterRegex); ignoreDottedFiles = context.getProperty(IGNORE_DOTTED_FILES).asBoolean(); }
protected void initializeResolver(final ProcessContext context ) { final String dnsTimeout = context.getProperty(DNS_TIMEOUT).asTimePeriod(TimeUnit.MILLISECONDS).toString(); final String dnsServer = context.getProperty(DNS_SERVER).getValue(); final String dnsRetries = context.getProperty(DNS_RETRIES).getValue(); String finalServer = ""; Hashtable<String,String> env = new Hashtable<String,String>(); env.put("java.naming.factory.initial", contextFactory); env.put("com.sun.jndi.dns.timeout.initial", dnsTimeout); env.put("com.sun.jndi.dns.timeout.retries", dnsRetries); if (StringUtils.isNotEmpty(dnsServer)) { for (String server : dnsServer.split(",")) { finalServer = finalServer + "dns://" + server + "/. "; } env.put(Context.PROVIDER_URL, finalServer); } try { initializeContext(env); initialized.set(true); } catch (NamingException e) { getLogger().error("Could not initialize JNDI context", e); } }
@OnScheduled public void setup(ProcessContext context) { // If the query is not set, then an incoming flow file is needed. Otherwise fail the initialization if (!context.getProperty(HIVEQL_SELECT_QUERY).isSet() && !context.hasIncomingConnection()) { final String errorString = "Either the Select Query must be specified or there must be an incoming connection " + "providing flowfile(s) containing a SQL select query"; getLogger().error(errorString); throw new ProcessException(errorString); } }
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { final FlowFile flowFile = session.get(); if ( flowFile == null ) { return; eles = doc.select(context.getProperty(CSS_SELECTOR).evaluateAttributeExpressions(flowFile).getValue()); } catch (final Exception ex) { getLogger().error("Failed to extract HTML from {} due to {}; routing to {}", new Object[] {flowFile, ex, REL_INVALID_HTML}, ex); session.transfer(flowFile, REL_INVALID_HTML); return; final String prependValue = context.getProperty(PREPEND_ELEMENT_VALUE).evaluateAttributeExpressions(flowFile).getValue(); final String appendValue = context.getProperty(APPEND_ELEMENT_VALUE).evaluateAttributeExpressions(flowFile).getValue(); final String outputType = context.getProperty(OUTPUT_TYPE).getValue(); final String attributeKey = context.getProperty(ATTRIBUTE_KEY).evaluateAttributeExpressions(flowFile).getValue(); session.transfer(flowFile, REL_NOT_FOUND); } else { FlowFile updatedFF = ff; switch (context.getProperty(DESTINATION).getValue()) { case DESTINATION_ATTRIBUTE: updatedFF = session.putAttribute(ff, HTML_ELEMENT_ATTRIBUTE_NAME, extractedElementValue); session.transfer(updatedFF, REL_SUCCESS);
/** * */ private Integer determinePartition(ProcessContext context, FlowFile flowFile) { String pv = context.getProperty(PARTITION).evaluateAttributeExpressions(flowFile).getValue(); if (pv != null){ return Integer.parseInt(context.getProperty(PARTITION).evaluateAttributeExpressions(flowFile).getValue()); } return null; }
@OnScheduled public void onScheduled(final ProcessContext context) { try { channel = new NifiSinkSessionChannel(SUCCESS, FAILURE); channel.start(); sink = SINK_FACTORY.create(context.getProperty(SOURCE_NAME).getValue(), context.getProperty(SINK_TYPE).getValue()); sink.setChannel(channel); String flumeConfig = context.getProperty(FLUME_CONFIG).getValue(); String agentName = context.getProperty(AGENT_NAME).getValue(); String sinkName = context.getProperty(SOURCE_NAME).getValue(); Configurables.configure(sink, getFlumeSinkContext(flumeConfig, agentName, sinkName)); sink.start(); } catch (Throwable th) { getLogger().error("Error creating sink", th); throw Throwables.propagate(th); } }
@OnScheduled public void setup(ProcessContext context) { // If the query is not set, then an incoming flow file is needed. Otherwise fail the initialization if (!context.getProperty(HIVEQL_SELECT_QUERY).isSet() && !context.hasIncomingConnection()) { final String errorString = "Either the Select Query must be specified or there must be an incoming connection " + "providing flowfile(s) containing a SQL select query"; getLogger().error(errorString); throw new ProcessException(errorString); } }
FlowFile flowFile = session.get(); if (flowFile == null) { return; final String routingKey = context.getProperty(ROUTING_KEY).evaluateAttributeExpressions(flowFile).getValue(); if (routingKey == null) { throw new IllegalArgumentException("Failed to determine 'routing key' with provided value '" + context.getProperty(ROUTING_KEY) + "' after evaluating it as expression against incoming FlowFile."); final String exchange = context.getProperty(EXCHANGE).evaluateAttributeExpressions(flowFile).getValue(); final byte[] messageContent = extractMessage(flowFile, session); session.transfer(flowFile, REL_SUCCESS); session.getProvenanceReporter().send(flowFile, connection.toString() + "/E:" + exchange + "/RK:" + routingKey); } catch (Exception e) { session.transfer(session.penalize(flowFile), REL_FAILURE); getLogger().error("Failed while sending message to AMQP via " + publisher, e);
public void setupVariables(ProcessContext context) { scriptEngineName = context.getProperty(SCRIPT_ENGINE).getValue(); scriptPath = context.getProperty(ScriptingComponentUtils.SCRIPT_FILE).evaluateAttributeExpressions().getValue(); scriptBody = context.getProperty(ScriptingComponentUtils.SCRIPT_BODY).getValue(); String modulePath = context.getProperty(ScriptingComponentUtils.MODULES).evaluateAttributeExpressions().getValue(); if (!StringUtils.isEmpty(modulePath)) { modules = modulePath.split(","); } else { modules = new String[0]; } }
@OnScheduled public void onScheduled(final ProcessContext context) { try { source = SOURCE_FACTORY.create( context.getProperty(SOURCE_NAME).getValue(), context.getProperty(SOURCE_TYPE).getValue()); String flumeConfig = context.getProperty(FLUME_CONFIG).getValue(); String agentName = context.getProperty(AGENT_NAME).getValue(); String sourceName = context.getProperty(SOURCE_NAME).getValue(); Configurables.configure(source, getFlumeSourceContext(flumeConfig, agentName, sourceName)); if (source instanceof PollableSource) { source.setChannelProcessor(new ChannelProcessor( new NifiChannelSelector(pollableSourceChannel))); source.start(); } } catch (Throwable th) { getLogger().error("Error creating source", th); throw Throwables.propagate(th); } }
@OnScheduled public void onScheduled(final ProcessContext context) { super.onScheduled(context); // Either input connection or scheduled query is required if ( ! context.getProperty(INFLUX_DB_QUERY).isSet() && ! context.hasIncomingConnection() ) { String error = "The InfluxDB Query processor requires input connection or scheduled InfluxDB query"; getLogger().error(error); throw new ProcessException(error); } }
@Override protected void doDelete(ProcessContext context, ProcessSession session) throws Exception { final int batchSize = context.getProperty(BATCH_SIZE).asInteger(); final String location = context.getProperty(ROW_ID_LOCATION).getValue(); final int flowFileCount = context.getProperty(FLOWFILE_FETCH_COUNT).asInteger(); final String charset = context.getProperty(CHARSET).getValue(); List<FlowFile> flowFiles = session.get(flowFileCount); final String visibility = context.getProperty(VISIBLITY_LABEL).isSet() ? context.getProperty(VISIBLITY_LABEL).evaluateAttributeExpressions(flowFile).getValue() : null; final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue(); try { if (location.equals(ROW_ID_CONTENT.getValue())) { flowFile = doDeleteFromContent(flowFile, context, session, tableName, batchSize, charset, visibility); if (flowFile.getAttribute(RESTART_INDEX) != null) { session.transfer(flowFile, REL_FAILURE); } else { final String transitUrl = clientService.toTransitUri(tableName, flowFile.getAttribute(ROWKEY_END)); session.transfer(flowFile, REL_SUCCESS); session.getProvenanceReporter().invokeRemoteProcess(flowFile, transitUrl); session.transfer(flowFile, REL_SUCCESS); session.getProvenanceReporter().invokeRemoteProcess(flowFile, transitUrl); getLogger().error(ex.getMessage(), ex); session.transfer(flowFile, REL_FAILURE);
@Override public MediaType contentType() { String contentType = context.getProperty(PROP_CONTENT_TYPE).evaluateAttributeExpressions(requestFlowFile).getValue(); contentType = StringUtils.isBlank(contentType) ? DEFAULT_CONTENT_TYPE : contentType; return MediaType.parse(contentType); }
@OnScheduled public void setup(final ProcessContext context) { int maxTransformsToCache = context.getProperty(TRANSFORM_CACHE_SIZE).asInteger(); transformCache = Caffeine.newBuilder() .maximumSize(maxTransformsToCache) .build(specString -> createTransform(context, specString.orElse(null))); try { if (context.getProperty(MODULES).isSet()) { customClassLoader = ClassLoaderUtils.getCustomClassLoader(context.getProperty(MODULES).getValue(), this.getClass().getClassLoader(), getJarFilenameFilter()); } else { customClassLoader = this.getClass().getClassLoader(); } } catch (final Exception ex) { getLogger().error("Unable to setup processor", ex); } }