@Override public FloatTimeSeries times(float factor) { final FloatTimeSeries result; synchronized (this) { result = new FloatTreeTimeSeries(this); } result.multiplyBy(factor); return result; }
@Override public FloatTimeSeries times(ReadOnlyTimeSeries other) { final FloatTimeSeries result; synchronized (this) { result = new FloatTreeTimeSeries(this); } result.multiplyBy(other); return result; }
@Override public FloatTimeSeries plus(float addend) { final FloatTimeSeries result; synchronized (this) { result = new FloatTreeTimeSeries(this); } result.add(addend); return result; }
@Override public FloatTimeSeries plus(FloatTimeSeries other) { final FloatTimeSeries result; synchronized (this) { result = new FloatTreeTimeSeries(this); } result.add(other); return result; }
private static ReadOnlyTimeSeries deserialize(final TimeseriesDTO dto) { final TreeTimeSeries ts = new FloatTreeTimeSeries(); if (dto.values == null || dto.timestamps == null) throw new NullPointerException("Timestamps or values are null"); if (dto.values.size() != dto.timestamps.size()) throw new IllegalStateException("Nr of timestamps does not equal nr of values"); final Iterator<Long> tIt = dto.timestamps.iterator(); final Iterator<Float> vIt = dto.values.iterator(); final List<SampledValue> values = new ArrayList<>(dto.timestamps.size()); while (tIt.hasNext()) { values.add(new SampledValue(new FloatValue(vIt.next()), tIt.next(), Quality.GOOD)); } ts.addValues(values); return ts; }
public Aggregation(List<EvaluationInput> input, List<ResultType> requestedResults, Collection<ConfigurationInstance> configurations, EvaluationListener listener, long time) { super(input, requestedResults, configurations, listener, time); this.id = "AggregationEvaluation_" + counter.incrementAndGet(); final List<TimeSeriesData> list = new ArrayList<>(); list.addAll(input.iterator().next().getInputData()); this.input = Collections.unmodifiableList(list); this.inputSize = this.input.size(); values = new HashMap<>(inputSize); integrationBuffer = new HashMap<>(inputSize); intervalBuffer = new HashMap<>(inputSize); nanBuffer = new HashMap<>(inputSize); lastValues = new HashMap<>(inputSize + requestedResults.size()); for (int i=0;i<inputSize;i++) { values.put(i, requestedResults.stream() .collect(Collectors.toMap(type -> (AggregationType) type, type -> { final FloatTimeSeries ts = (FloatTimeSeries) new FloatTreeTimeSeries(); ts.setInterpolationMode(InterpolationMode.STEPS); return ts; }))); integrationBuffer.put(i, new HashMap<>(requestedResults.size())); intervalBuffer.put(i, new HashMap<>(requestedResults.size())); // lastValidValues.put(i, new HashMap<>(requestedResults.size())); nanBuffer.put(i, new HashMap<>(requestedResults.size())); } }
/** * Get a reduced set of points for the specified interval [t0,t1], obtained by downsampling * the original set to a minimum time interval between adjacent points. * @param schedule * @param t0 * @param t1 * @param minimumInterval * @return */ public static List<SampledValue> downsample(ReadOnlyTimeSeries schedule, long t0, long t1, long minimumInterval) { FloatTimeSeries fts = new FloatTreeTimeSeries(); fts.readWithBoundaries(schedule, t0, t1); return fts.downsample(t0, t1, minimumInterval); }
@Override public FloatTimeSeries plus(float addend) { if (isFloatTimeSeries) return ((FloatTimeSeries) input).plus(addend); final FloatTimeSeries result = new FloatTreeTimeSeries(); result.read(input); result.plus(addend); return result; }
public float getAverage(List<SampledValue> values, long t0, long t1) { FloatTimeSeries mem = new FloatTreeTimeSeries(); mem.addValues(values); mem.setInterpolationMode(InterpolationMode.LINEAR); return mem.integrate(t0, t1) / ((float)(t1-t0)); }
@Override public List<SampledValue> downsample(long t0, long t1, long minimumInterval) { if (isFloatTimeSeries) return ((FloatTimeSeries) input).downsample(t0, t1, minimumInterval); FloatTimeSeries ftt = new FloatTreeTimeSeries(); ftt.read(input, t0, t1); return ftt.downsample(t0, t1, minimumInterval); }
@Override public FloatTimeSeries times(ReadOnlyTimeSeries other) { if (isFloatTimeSeries) return ((FloatTimeSeries) input).times(other); final FloatTimeSeries result = new FloatTreeTimeSeries(); result.read(input); result.times(other); return result; }
@Override public List<TimeInterval> getPositiveDomain(TimeInterval searchInterval) { if (isFloatTimeSeries) return ((FloatTimeSeries) input).getPositiveDomain(searchInterval); final FloatTimeSeries result = new FloatTreeTimeSeries(); result.read(input); return result.getPositiveDomain(searchInterval); }
@Override public FloatTimeSeries times(float factor) { if (isFloatTimeSeries) return ((FloatTimeSeries) input).times(factor); final FloatTimeSeries result = new FloatTreeTimeSeries(); result.read(input); result.multiplyBy(factor); return result; }
@Override public FloatTimeSeries plus(FloatTimeSeries other) { if (isFloatTimeSeries) return ((FloatTimeSeries) input).plus(other); final FloatTimeSeries result = new FloatTreeTimeSeries(); result.read(input); result.plus(other); return result; }
/** * Like {@link #integrate(ReadOnlyTimeSeries, long, long)}, but with an explicitly set interpolation mode. * @param schedule * @param startTime * @param endTime * @param mode * Interpolation mode for integration. If null, the default schedule interpolation mode is used. * @return */ public static float integrate(ReadOnlyTimeSeries schedule, long startTime, long endTime, InterpolationMode mode) { FloatTimeSeries fts = new FloatTreeTimeSeries(); fts.readWithBoundaries(schedule, startTime, endTime); if (mode != null) fts.setInterpolationMode(mode); return fts.integrate(startTime, endTime); }
/** * Downsampling based on the integration function in memory-timeseries... taking into account the interpolation mode * We can assume that there are at least two data points in oldSubset * * We always take the last data point equal to the last old one... such that in case of NEAREST or STEPS * interpolation mode, the error introduced by downsampling is minimized. * @param oldSubset * @param newValues */ private static void downsample(List<SampledValue> oldSubset, List<SampledValue> newValues, long minInterval) { long t0 = oldSubset.get(0).getTimestamp(); long t1 = oldSubset.get(oldSubset.size()-2).getTimestamp(); Quality quality = oldSubset.get(0).getQuality(); // const long delta = t1-t0; int nr = (int) (delta/minInterval); if (nr == 0) nr = 1; // in this case, the distance between the first and second (=last) point may be smaller than minInterval FloatTimeSeries fts = new FloatTreeTimeSeries(); fts.addValues(oldSubset); for (int i=0;i<nr-1;i++) { float value = fts.integrate(t0 + i*minInterval, t0 + (i+1)*minInterval) / minInterval; if (Float.isNaN(value) || Float.isInfinite(value)) LoggerFactory.getLogger(FloatTreeTimeSeries.class).warn("Downsampling led to a non-finite value: " + value + "; this may cause problems"); newValues.add(new SampledValue(new FloatValue(value), t0 + i*minInterval, quality)); } float value = fts.integrate(t0 + (nr-1)*minInterval, t1) / (t1 - t0 - (nr-1)*minInterval); newValues.add(new SampledValue(new FloatValue(value), t0 + (nr-1)*minInterval, quality)); newValues.add(new SampledValue(oldSubset.get(oldSubset.size()-1))); }
@Override public void apply(TimeSeries schedule, long ageThreshold) { long current = am.getFrameworkTime(); long boundary; try { boundary = subtract(current, ageThreshold); } catch (ArithmeticException e) { logger.error("Arithmetic exception",e); return; } FloatTimeSeries fts = new FloatTreeTimeSeries(); fts.read(schedule, Long.MIN_VALUE, boundary); schedule.replaceValues(Long.MIN_VALUE, boundary, fts.downsample(Long.MIN_VALUE, boundary, minInterval)); }
@Override public synchronized SampledValue getMin(long t0, long t1) { final FloatTreeTimeSeries f = new FloatTreeTimeSeries(this); f.multiplyBy(-1.f); final SampledValue negMax = f.getMax(t0, t1); return new SampledValue(new FloatValue(negMax.getValue().getFloatValue()), negMax.getTimestamp(), negMax .getQuality()); }
@Override public FloatTimeSeries getAbsolute() { if (isFloatTimeSeries) return ((FloatTimeSeries) input).getAbsolute(); final FloatTimeSeries result = new FloatTreeTimeSeries(); if (input.getInterpolationMode() == InterpolationMode.LINEAR) { result.read(input); return result.getAbsolute(); } result.setInterpolationMode(getInterpolationMode()); if (input.isEmpty()) { return result; } final Iterator<SampledValue> it = input.iterator(); SampledValue sv; while (it.hasNext()) { sv = it.next(); final float x = sv.getValue().getFloatValue(); if (x >= 0) { result.addValue(sv); } else { result.addValue(new SampledValue(new FloatValue(-x), sv.getTimestamp(), sv.getQuality())); } } return result; }
@Override public FloatTimeSeries getAbsolute() { final FloatTimeSeries result = new FloatTreeTimeSeries(); result.setInterpolationMode(getInterpolationMode()); if (getValues().isEmpty()) { return result; } if (getInterpolationMode() == InterpolationMode.LINEAR) { return getAbsoluteLinear(result); } for (SampledValue value : getValues()) { final float x = value.getValue().getFloatValue(); if (x >= 0) { result.addValue(value); } else { result.addValue(new SampledValue(new FloatValue(-x), value.getTimestamp(), value.getQuality())); } } return result; }