@Override public void onTrigger(final ReportingContext context) { final boolean isClustered = context.isClustered(); final String nodeId = context.getClusterNodeIdentifier(); if (nodeId == null && isClustered) { getLogger().debug("This instance of NiFi is configured for clustering, but the Cluster Node Identifier is not yet available. " componentTypeFilter = Pattern.compile(context.getProperty(COMPONENT_TYPE_FILTER_REGEX).evaluateAttributeExpressions().getValue()); componentNameFilter = Pattern.compile(context.getProperty(COMPONENT_NAME_FILTER_REGEX).evaluateAttributeExpressions().getValue()); final ProcessGroupStatus procGroupStatus = context.getEventAccess().getControllerStatus(); final String rootGroupName = procGroupStatus == null ? null : procGroupStatus.getName(); final String nifiUrl = context.getProperty(INSTANCE_URL).evaluateAttributeExpressions().getValue(); URL url; try { final String platform = context.getProperty(PLATFORM).evaluateAttributeExpressions().getValue(); final int batchSize = context.getProperty(BATCH_SIZE).asInteger(); int fromIndex = 0; int toIndex = Math.min(batchSize, jsonArray.size());
@Override public void onTrigger(final ReportingContext context) { final ProcessGroupStatus controllerStatus = context.getEventAccess().getControllerStatus(); final boolean showDeltas = context.getProperty(SHOW_DELTAS).asBoolean(); final StringBuilder builder = new StringBuilder(); builder.append("Processor Statuses:\n"); builder.append(processorBorderLine); builder.append("\n"); builder.append(processorHeader); builder.append(processorBorderLine); builder.append("\n"); printProcessorStatus(controllerStatus, builder, showDeltas); builder.append(processorBorderLine); processorLogger.info(builder.toString()); builder.setLength(0); builder.append("Connection Statuses:\n"); builder.append(connectionBorderLine); builder.append("\n"); builder.append(connectionHeader); builder.append(connectionBorderLine); builder.append("\n"); printConnectionStatus(controllerStatus, builder, showDeltas); builder.append(connectionBorderLine); connectionLogger.info(builder.toString()); }
for (Map.Entry<PropertyDescriptor, String> property : context.getProperties().entrySet()) { if (property.getKey().isDynamic()) { bindings.put(property.getKey().getName(), context.getProperty(property.getKey()));
@Override public void onTrigger(final ReportingContext context) { final String thresholdValue = context.getProperty(DIR_THRESHOLD).getValue(); final Matcher thresholdMatcher = PERCENT_PATTERN.matcher(thresholdValue.trim()); thresholdMatcher.find(); final String thresholdPercentageVal = thresholdMatcher.group(1); final int contentRepoThreshold = Integer.parseInt(thresholdPercentageVal); final File dir = new File(context.getProperty(DIR_LOCATION).getValue()); final String dirName = context.getProperty(DIR_DISPLAY_NAME).getValue(); checkThreshold(dirName, dir.toPath(), contentRepoThreshold, getLogger()); }
@Override public void onTrigger(final ReportingContext context) { final ProcessGroupStatus procGroupStatus = context.getEventAccess().getControllerStatus(); final String rootGroupName = procGroupStatus == null ? null : procGroupStatus.getName(); Long currMaxId = context.getEventAccess().getProvenanceRepository().getMaxEventId(); Map<String, String> state; try { state = context.getStateManager().getState(Scope.LOCAL).toMap(); } catch (IOException e) { getLogger().error("Failed to get state at start up due to {}:"+e.getMessage(), e); events = context.getEventAccess().getProvenanceEvents(firstEventId, context.getProperty(BATCH_SIZE).asInteger()); } catch (final IOException ioe) { getLogger().error("Failed to retrieve Provenance Events from repository due to {}:"+ioe.getMessage(), ioe); final JsonObjectBuilder builder = factory.createObjectBuilder(); final String nifiUrl = context.getProperty(MINIFI_URL).evaluateAttributeExpressions().getValue(); URL url; try { final String lastEventId = String.valueOf(lastEvent.getEventId()); try { StateManager stateManager = context.getStateManager(); StateMap stateMap = stateManager.getState(Scope.LOCAL); Map<String, String> newMapOfState = new HashMap<>();
@Override public void onTrigger(final ReportingContext context) { final boolean isClustered = context.isClustered(); final String nodeId = context.getClusterNodeIdentifier(); if (nodeId == null && isClustered) { getLogger().debug("This instance of NiFi is configured for clustering, but the Cluster Node Identifier is not yet available. " final List<Bulletin> bulletins = context.getBulletinRepository().findBulletins(bulletinQuery); final String platform = context.getProperty(PLATFORM).evaluateAttributeExpressions().getValue();
@Override public void onTrigger(final ReportingContext context) { final ProcessGroupStatus rootGroupStatus = context.getEventAccess().getControllerStatus(); this.latestStatus.set(rootGroupStatus); gangliaReporter.run(); getLogger().info("{} Sent metrics to Ganglia", new Object[] {this}); }
return; final EventAccess eventAccess = context.getEventAccess(); final ProcessGroupStatus procGroupStatus = eventAccess.getControllerStatus(); final ComponentMapHolder componentMapHolder = ComponentMapHolder.createComponentMap(procGroupStatus); final StateManager stateManager = context.getStateManager();
@Override public void onTrigger(ReportingContext context) { final String clusterNodeId = context.getClusterNodeIdentifier(); final boolean isClustered = context.isClustered(); if (isClustered && isEmpty(clusterNodeId)) {
/** * Report the registered metrics. * * @param context used for getting the most recent {@link ProcessGroupStatus}. */ @Override public void onTrigger(ReportingContext context) { String groupId = context.getProperty(PROCESS_GROUP_ID).evaluateAttributeExpressions().getValue(); ProcessGroupStatus statusToReport = groupId == null ? context.getEventAccess().getControllerStatus() : context.getEventAccess().getGroupStatus(groupId); if (statusToReport != null) { currentStatusReference.set(statusToReport); reporter.report(); } else { getLogger().error("Process group with provided group id could not be found."); } }
protected void sendData(final ReportingContext context, final Transaction transaction, Map<String, String> attributes, final JsonArray jsonArray) throws IOException { if(context.getProperty(RECORD_WRITER).isSet()) { transaction.send(getData(context, new ByteArrayInputStream(jsonArray.toString().getBytes(StandardCharsets.UTF_8)), attributes), attributes); } else { transaction.send(jsonArray.toString().getBytes(StandardCharsets.UTF_8), attributes); } }
@Override public void onTrigger(final ReportingContext context) { final boolean isClustered = context.isClustered(); final String nodeId = context.getClusterNodeIdentifier(); if (nodeId == null && isClustered) { getLogger().debug("This instance of NiFi is configured for clustering, but the Cluster Node Identifier is not yet available. " final List<Bulletin> bulletins = context.getBulletinRepository().findBulletins(bulletinQuery); final String platform = context.getProperty(PLATFORM).evaluateAttributeExpressions().getValue();
private void consumeNiFiProvenanceEvents(ReportingContext context, NiFiFlow nifiFlow) { final EventAccess eventAccess = context.getEventAccess(); final AnalysisContext analysisContext = new StandardAnalysisContext(nifiFlow, clusterResolvers, // FIXME: This class cast shouldn't be necessary to query lineage. Possible refactor target in next major update. (ProvenanceRepository)eventAccess.getProvenanceRepository()); consumer.consumeEvents(context, (componentMapHolder, events) -> { for (ProvenanceEventRecord event : events) { try { lineageStrategy.processEvent(analysisContext, nifiFlow, event); } catch (Exception e) { // If something went wrong, log it and continue with other records. getLogger().error("Skipping failed analyzing event {} due to {}.", new Object[]{event, e, e}); } } nifiAtlasHook.commitMessages(); }); }
for (Map.Entry<PropertyDescriptor, String> property : context.getProperties().entrySet()) { if (property.getKey().isDynamic()) { bindings.put(property.getKey().getName(), context.getProperty(property.getKey()));
@Override public void onTrigger(ReportingContext context) { final String clusterNodeId = context.getClusterNodeIdentifier(); final boolean isClustered = context.isClustered(); if (isClustered && isEmpty(clusterNodeId)) {
@Override public void onTrigger(final ReportingContext context) { final boolean isClustered = context.isClustered(); final String nodeId = context.getClusterNodeIdentifier(); if (nodeId == null && isClustered) { getLogger().debug("This instance of NiFi is configured for clustering, but the Cluster Node Identifier is not yet available. " final ProcessGroupStatus procGroupStatus = context.getEventAccess().getControllerStatus(); final String rootGroupName = procGroupStatus == null ? null : procGroupStatus.getName(); final String nifiUrl = context.getProperty(INSTANCE_URL).evaluateAttributeExpressions().getValue(); URL url; try { final String platform = context.getProperty(PLATFORM).evaluateAttributeExpressions().getValue();
@Override public void onTrigger(ReportingContext context) { final ProcessGroupStatus status = context.getEventAccess().getControllerStatus(); metricsPrefix = context.getProperty(METRICS_PREFIX).evaluateAttributeExpressions().getValue(); environment = context.getProperty(ENVIRONMENT).evaluateAttributeExpressions().getValue(); statusId = status.getId(); defaultTags = ImmutableMap.of("env", environment, "dataflow_id", statusId); try { updateDataDogTransport(context); } catch (IOException e) { e.printStackTrace(); } updateAllMetricGroups(status); ddMetricRegistryBuilder.getDatadogReporter().report(); }
private void updateDataDogTransport(ReportingContext context) throws IOException { String dataDogTransport = context.getProperty(DATADOG_TRANSPORT).getValue(); if (dataDogTransport.equalsIgnoreCase(DATADOG_AGENT.getValue())) { ddMetricRegistryBuilder.build("agent"); } else if (dataDogTransport.equalsIgnoreCase(DATADOG_HTTP.getValue()) && context.getProperty(API_KEY).isSet()) { ddMetricRegistryBuilder.build(context.getProperty(API_KEY).getValue()); } }
private void consumeNiFiProvenanceEvents(ReportingContext context, NiFiFlow nifiFlow) { final EventAccess eventAccess = context.getEventAccess(); final AnalysisContext analysisContext = new StandardAnalysisContext(nifiFlow, clusterResolvers, // FIXME: This class cast shouldn't be necessary to query lineage. Possible refactor target in next major update. (ProvenanceRepository)eventAccess.getProvenanceRepository()); consumer.consumeEvents(context, (componentMapHolder, events) -> { for (ProvenanceEventRecord event : events) { try { lineageStrategy.processEvent(analysisContext, nifiFlow, event); } catch (Exception e) { // If something went wrong, log it and continue with other records. getLogger().error("Skipping failed analyzing event {} due to {}.", new Object[]{event, e, e}); } } nifiAtlasHook.commitMessages(); }); }
@Override public void onTrigger(final ReportingContext context) { final boolean isClustered = context.isClustered(); final String nodeId = context.getClusterNodeIdentifier(); if (nodeId == null && isClustered) { getLogger().debug("This instance of NiFi is configured for clustering, but the Cluster Node Identifier is not yet available. " final JsonBuilderFactory factory = Json.createBuilderFactory(config); final String applicationId = context.getProperty(APPLICATION_ID).evaluateAttributeExpressions().getValue(); final String hostname = context.getProperty(HOSTNAME).evaluateAttributeExpressions().getValue(); final ProcessGroupStatus status = context.getEventAccess().getControllerStatus(); final Map<String, String> attributes = new HashMap<>(); if(context.getProperty(FORMAT).getValue().equals(AMBARI_FORMAT.getValue())) { final JsonObject metricsObject = metricsBuilder .applicationId(applicationId)