private double getElapsed() { return getKnown() + getUnknown(); }
private boolean isValid() { return getUnknown() < (getElapsed() / 2); }
private Accumulation() { reset(); }
public Results<Measurement> process(Iterator<Row<Sample>> samples) { checkNotNull(samples, "samples argument"); // Build chain of iterators to process results as a stream Rate rate = new Rate(samples, m_resultDescriptor.getSourceNames()); PrimaryData primaryData = new PrimaryData(m_resource, m_start.minus(m_resolution), m_end, m_resultDescriptor, rate); Aggregation aggregation = new Aggregation(m_resource, m_start, m_end, m_resultDescriptor, m_resolution, primaryData); Compute compute = new Compute(m_resultDescriptor, aggregation); Export exports = new Export(m_resultDescriptor.getExports(), compute); Results<Measurement> measurements = new Results<>(); for (Row<Measurement> row : exports) { measurements.addRow(row); } return measurements; }
public SampleGenerator(String resource, String metric, Timestamp start, Timestamp end, Duration interval) { m_resource = new Resource(checkNotNull(resource, "resource argument")); m_metric = checkNotNull(metric, "metric argument"); checkNotNull(start, "start argument"); checkNotNull(end, "end argument"); checkNotNull(interval, "interval argument"); m_intervals = new IntervalGenerator(start, end, interval); }
Aggregation(Resource resource, Timestamp start, Timestamp end, ResultDescriptor resultDescriptor, Duration resolution, Iterator<Row<Measurement>> input) { m_resultDescriptor = checkNotNull(resultDescriptor, "result descriptor argument"); m_resource = checkNotNull(resource, "resource argument"); checkNotNull(start, "start argument"); checkNotNull(end, "end argument"); m_resolution = checkNotNull(resolution, "resolution argument"); m_input = checkNotNull(input, "input argument"); Duration interval = resultDescriptor.getInterval(); checkArgument(resolution.isMultiple(interval), "resolution must be a multiple of interval"); m_timestamps = new IntervalGenerator(start.stepFloor(m_resolution), end.stepCeiling(m_resolution), m_resolution); m_intervalsPer = (double) resolution.divideBy(interval); m_working = m_input.hasNext() ? m_input.next() : null; m_nextOut = m_timestamps.hasNext() ? new Row<Measurement>(m_timestamps.next(), m_resource) : null; // If the input stream contains any Samples earlier than what's relevant, iterate past them. if (m_nextOut != null) { while (m_working != null && m_working.getTimestamp().lte(m_nextOut.getTimestamp().minus(m_resolution))) { m_working = nextWorking(); } } }
// in MainApp.java Compute compute = new Compute(); compute.setMainApp(this);
@Override public boolean hasNext() { return m_intervals.hasNext(); }
@Override public Row<Measurement> next() { if (!hasNext()) throw new NoSuchElementException(); Row<Measurement> result = new Row<>(m_current.getTimestamp(), m_current.getResource()); for (String export : m_exports) { result.addElement(getMeasurement(export)); } try { return result; } finally { m_current = m_input.hasNext() ? m_input.next() : null; } }
@Override public Row<Sample> next() { if (!hasNext()) throw new NoSuchElementException(); Row<Sample> working = m_input.next(); Row<Sample> result = new Row<>(working.getTimestamp(), working.getResource()); for (String metricName : m_metrics) { Sample sample = working.getElement(metricName); if (sample == null) { continue; } // Use rate as result if one of counter types, else pass through as-is. result.addElement(COUNTERS.contains(sample.getType()) ? getRate(sample) : sample); m_prevSamples.put(sample.getName(), sample); } return result; }
@Override public Row<Measurement> next() { if (!hasNext()) throw new NoSuchElementException(); Row<Measurement> row = m_input.next(); for (Calculation calc : m_resultDescriptor.getCalculations().values()) { double v = calc.getCalculationFunction().apply(getValues(row, calc.getArgs())); row.addElement(new Measurement(row.getTimestamp(), row.getResource(), calc.getLabel(), v)); } return row; }
private Accumulation getOrCreateAccumulation(String name) { Accumulation result = m_accumulation.get(name); if (result == null) { result = new Accumulation(); m_accumulation.put(name, result); } return result; }
private Measurement getMeasurement(String name) { Measurement measurement = m_current.getElement(name); return (measurement != null) ? measurement : getNan(name); }
private Double getAverage() { return isValid() ? m_value.divideBy(m_known).doubleValue() : Double.NaN; }
private double[] getValues(Row<Measurement> row, String[] names) { double[] values = new double[names.length]; for (int i = 0; i < names.length; i++) { String name = names[i]; Optional<Double> d = parseDouble(name); values[i] = d.isPresent() ? d.get() : checkNotNull(row.getElement(name), "Missing measurement; Upstream iterator is bugged").getValue(); } return values; }
@Override public Optional<Sample> next() { return Optional.of(new Sample(m_intervals.next(), m_resource, m_metric, MetricType.GAUGE, value())); }
PrimaryData(Resource resource, Timestamp start, Timestamp end, ResultDescriptor resultDescriptor, Iterator<Row<Sample>> input) { m_resultDescriptor = checkNotNull(resultDescriptor, "result descriptor argument"); m_resource = checkNotNull(resource, "resource argument"); checkNotNull(start, "start argument"); checkNotNull(end, "end argument"); m_interval = resultDescriptor.getInterval(); m_timestamps = new IntervalGenerator(start.stepFloor(m_interval), end.stepCeiling(m_interval), m_interval); // Gather the whole collection of rows. // We need these since the next sample for a given metric may only appear a few rows ahead Iterators.addAll(m_samples, checkNotNull(input, "input argument")); }
@Override public Timestamp next() { if (!hasNext()) { throw new NoSuchElementException(); } try { return m_current; } finally { m_current = m_reversed ? m_current.minus(m_interval) : m_current.plus(m_interval); } }
@Override void go() throws InterruptedException { createThreads(); for (Timestamp t : new IntervalGenerator(m_config.getStart(), m_config.getEnd(), m_config.getSelectLength(), true)) { for (String resource : m_config.getResources()) { m_queryQueue.put(new Query(resource, t.minus(m_config.getSelectLength()), t, m_config.getResolution())); } } shutdown(); }
private Iterator<com.datastax.driver.core.Row> cassandraSelect(Context context, Resource resource, Timestamp start, Timestamp end) { List<Future<ResultSet>> futures = Lists.newArrayList(); Duration resourceShard = m_contextConfigurations.getResourceShard(context); Timestamp lower = start.stepFloor(resourceShard); Timestamp upper = end.stepFloor(resourceShard); for (Timestamp partition : new IntervalGenerator(lower, upper, resourceShard)) { BoundStatement bindStatement = m_selectStatement.bind(); bindStatement.setString(SchemaConstants.F_CONTEXT, context.getId()); bindStatement.setInt(SchemaConstants.F_PARTITION, (int) partition.asSeconds()); bindStatement.setString(SchemaConstants.F_RESOURCE, resource.getId()); bindStatement.setTimestamp("start", start.asDate()); bindStatement.setTimestamp("end", end.asDate()); // Use the context specific consistency level bindStatement.setConsistencyLevel(m_contextConfigurations.getReadConsistency(context)); futures.add(m_session.executeAsync(bindStatement)); } return new ConcurrentResultWrapper(futures); }