public ElasticFlowRepository(MetricRegistry metricRegistry, JestClient jestClient, IndexStrategy indexStrategy, DocumentEnricher documentEnricher, ClassificationEngine classificationEngine, TransactionOperations transactionOperations, NodeDao nodeDao, SnmpInterfaceDao snmpInterfaceDao, int bulkRetryCount, long maxFlowDurationMs) { this.client = Objects.requireNonNull(jestClient); this.indexStrategy = Objects.requireNonNull(indexStrategy); this.documentEnricher = Objects.requireNonNull(documentEnricher); this.classificationEngine = Objects.requireNonNull(classificationEngine); this.transactionOperations = Objects.requireNonNull(transactionOperations); this.nodeDao = Objects.requireNonNull(nodeDao); this.snmpInterfaceDao = Objects.requireNonNull(snmpInterfaceDao); this.bulkRetryCount = bulkRetryCount; this.indexSelector = new IndexSelector(TYPE, indexStrategy, maxFlowDurationMs); flowsPersistedMeter = metricRegistry.meter("flowsPersisted"); logConversionTimer = metricRegistry.timer("logConversion"); logEnrichementTimer = metricRegistry.timer("logEnrichment"); logPersistingTimer = metricRegistry.timer("logPersisting"); logMarkingTimer = metricRegistry.timer("logMarking"); flowsPerLog = metricRegistry.histogram("flowsPerLog"); // Pre-populate marker cache with values from DB this.transactionOperations.execute(cb -> { for (final OnmsNode node : this.nodeDao.findAllHavingFlows()) { this.markerCache.put(node.getId(), this.snmpInterfaceDao.findAllHavingFlows(node.getId()).stream() .map(OnmsSnmpInterface::getIfIndex) .collect(Collectors.toCollection(Sets::newConcurrentHashSet))); } return null; }); }
public ElasticFlowRepository(MetricRegistry metricRegistry, JestClient jestClient, IndexStrategy indexStrategy, DocumentEnricher documentEnricher, ClassificationEngine classificationEngine, TransactionOperations transactionOperations, NodeDao nodeDao, SnmpInterfaceDao snmpInterfaceDao, int bulkRetryCount, long maxFlowDurationMs) { this.client = Objects.requireNonNull(jestClient); this.indexStrategy = Objects.requireNonNull(indexStrategy); this.documentEnricher = Objects.requireNonNull(documentEnricher); this.classificationEngine = Objects.requireNonNull(classificationEngine); this.transactionOperations = Objects.requireNonNull(transactionOperations); this.nodeDao = Objects.requireNonNull(nodeDao); this.snmpInterfaceDao = Objects.requireNonNull(snmpInterfaceDao); this.bulkRetryCount = bulkRetryCount; this.indexSelector = new IndexSelector(TYPE, indexStrategy, maxFlowDurationMs); flowsPersistedMeter = metricRegistry.meter("flowsPersisted"); logConversionTimer = metricRegistry.timer("logConversion"); logEnrichementTimer = metricRegistry.timer("logEnrichment"); logPersistingTimer = metricRegistry.timer("logPersisting"); logMarkingTimer = metricRegistry.timer("logMarking"); flowsPerLog = metricRegistry.histogram("flowsPerLog"); // Pre-populate marker cache with values from DB this.transactionOperations.execute(cb -> { for (final OnmsNode node : this.nodeDao.findAllHavingFlows()) { this.markerCache.put(node.getId(), this.snmpInterfaceDao.findAllHavingFlows(node.getId()).stream() .map(OnmsSnmpInterface::getIfIndex) .collect(Collectors.toCollection(Sets::newConcurrentHashSet))); } return null; }); }
@Override public FlowNodeDetails getFlowExporter(Integer nodeId) { final List<FlowSnmpInterface> ifaces = transactionOperations.execute(status -> this.snmpInterfaceDao.findAllHavingFlows(nodeId)).stream() .map(iface -> new FlowSnmpInterface(iface.getIfIndex(), iface.getIfName(), iface.getIfAlias(), iface.getIfDescr())) .collect(Collectors.toList()); return new FlowNodeDetails(nodeId, ifaces); }
@Override public FlowNodeDetails getFlowExporter(Integer nodeId) { final List<FlowSnmpInterface> ifaces = transactionOperations.execute(status -> this.snmpInterfaceDao.findAllHavingFlows(nodeId)).stream() .map(iface -> new FlowSnmpInterface(iface.getIfIndex(), iface.getIfName(), iface.getIfAlias(), iface.getIfDescr())) .collect(Collectors.toList()); return new FlowNodeDetails(nodeId, ifaces); }