timeSeries.sort(); int size = timeSeries.size(); Arrays.fill(usePoint, (byte) 1); long[] rawTimeStamps = timeSeries.getTimestampsAsArray(); double[] rawValues = timeSeries.getValuesAsArray(); timeSeries.clear(); timeSeries.add(rawTimeStamps[i], rawValues[i]);
/** * Copies a given metric time series. * * @param ts the time series * @return builder preconfigured with values from the given time series */ public MetricTimeSeries.Builder copy(MetricTimeSeries ts) { MetricTimeSeries.Builder result = new MetricTimeSeries.Builder(ts.getName(), ts.getType()); result.start(ts.getStart()); result.end(ts.getEnd()); result.points(ts.getTimestamps(), ts.getValues()); result.attributes(ts.attributes()); return result; } }
@Override public int hashCode() { return new HashCodeBuilder() .append(getMetric()) .toHashCode(); }
@Override public void execute(List<ChronixTimeSeries<MetricTimeSeries>> timeSeriesList, FunctionCtx functionCtx) { for (ChronixTimeSeries<MetricTimeSeries> chronixTimeSeries : timeSeriesList) { MetricTimeSeries timeSeries = chronixTimeSeries.getRawTimeSeries(); //get a copy of the values double[] values = timeSeries.getValuesAsArray(); //get a copy of timestamps long[] times = timeSeries.getTimestampsAsArray(); for (int i = 0; i < timeSeries.size(); i++) { //scale the original value values[i] = values[i] * value; } //clear and delete the time series timeSeries.clear(); timeSeries.addAll(times, values); functionCtx.add(this, chronixTimeSeries.getJoinKey()); } }
MultivariateTimeSeries multivariateTimeSeries = new MultivariateTimeSeries(1); if (timeSeries.size() > 0) { timeSeries.sort(); long formerTimestamp = timeSeries.getTime(0); double formerValue = timeSeries.getValue(0); int timesSameTimestamp = 0; for (int i = 1; i < timeSeries.size(); i++) { if (formerTimestamp == timeSeries.getTime(i)) { formerValue += timeSeries.getValue(i); timesSameTimestamp++; } else { formerTimestamp = timeSeries.getTime(i); formerValue = timeSeries.getValue(i);
@Override public BinaryTimeSeries to(MetricTimeSeries timeSeries) { LOGGER.debug("Converting {} to BinaryTimeSeries", timeSeries); BinaryTimeSeries.Builder builder = new BinaryTimeSeries.Builder(); //serialize byte[] serializedPoints = ProtoBufMetricTimeSeriesSerializer.to(timeSeries.points().iterator()); byte[] compressedPoints = Compression.compress(serializedPoints); //Add the minimum required fields builder.start(timeSeries.getStart()) .end(timeSeries.getEnd()) .data(compressedPoints); //Currently we only have a metric builder.field(MetricTSSchema.METRIC, timeSeries.getMetric()); //Add a list of user defined attributes timeSeries.attributes().forEach(builder::field); return builder.build(); } }
MetricTimeSeries timeSeries = chronixTimeSeries.getRawTimeSeries(); if (timeSeries.isEmpty()) { continue; timeSeries.sort(); LongList timeList = new LongList(timeSeries.size()); DoubleList valueList = new DoubleList(timeSeries.size()); for (int i = 0; i < timeSeries.size(); i++) { double value = timeSeries.getValue(i); timeList.add(timeSeries.getTime(i)); valueList.add(value); distinct.add(value); timeSeries.clear(); timeSeries.addAll(timeList, valueList);
/** * Serializes the collection of metric data points to json * * @param timeSeries - the time series whose points should be serialized. * @return a json serialized collection of metric data points */ public byte[] toJson(MetricTimeSeries timeSeries) { if (!timeSeries.isEmpty()) { try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); JsonWriter writer = new JsonWriter(new OutputStreamWriter(baos, UTF_8)); List[] data = new List[]{new ArrayList<>(timeSeries.size()), new ArrayList<>(timeSeries.size())}; for (int i = 0; i < timeSeries.size(); i++) { data[0].add(timeSeries.getTime(i)); data[1].add(timeSeries.getValue(i)); } gson.toJson(data, List[].class, writer); writer.close(); baos.flush(); return baos.toByteArray(); } catch (IOException e) { LOGGER.error("Could not serialize data to json", e); } } return EMPTY_JSON; }
/** * Calculate the difference between the first and the last value of a given time series * * @param timeSeriesList list with time series * @return the average or 0 if the list is empty */ @Override public void execute(List<ChronixTimeSeries<MetricTimeSeries>> timeSeriesList, FunctionCtx functionCtx) { for (ChronixTimeSeries<MetricTimeSeries> chronixTimeSeries : timeSeriesList) { MetricTimeSeries timeSeries = chronixTimeSeries.getRawTimeSeries(); //If it is empty, we return NaN if (timeSeries.size() <= 0) { functionCtx.add(this, Double.NaN, chronixTimeSeries.getJoinKey()); continue; } //we need to sort the time series timeSeries.sort(); //get the first and the last value double firstValue = timeSeries.getValue(0); double lastValue = timeSeries.getValue(timeSeries.size() - 1); functionCtx.add(this, Math.abs(firstValue - lastValue), chronixTimeSeries.getJoinKey()); } }
@Override public void execute(List<ChronixTimeSeries<MetricTimeSeries>> timeSeriesList, FunctionCtx functionCtx) { for (ChronixTimeSeries<MetricTimeSeries> chronixTimeSeries : timeSeriesList) { MetricTimeSeries timeSeries = chronixTimeSeries.getRawTimeSeries(); if (timeSeries.isEmpty()) { functionCtx.add(this, Double.NaN, chronixTimeSeries.getJoinKey()); continue; } //Else calculate the analysis value int size = timeSeries.size(); double current = 0; for (int i = 0; i < size; i++) { current += timeSeries.getValue(i); } functionCtx.add(this, current / timeSeries.size(), chronixTimeSeries.getJoinKey()); } }
@Test public void test() throws Exception { try (InputStream stream = KairosDbFormatParserTest.class.getResourceAsStream("/kairosdb.json")) { assertNotNull(stream); List<MetricTimeSeries> series = Lists.newArrayList(sut.parse(stream)); // We should have two metrics assertThat(series.size(), is(2)); MetricTimeSeries first = series.get(0); assertThat(first.getName(), is("archive.file.tracked")); assertThat(first.getTimestamps().size(), is(1)); assertThat(first.getValues().get(0), is(123.0)); assertThat(first.getTimestamps().get(0), is(1349109376L)); assertThat(first.getAttributesReference().get("host"), is("test")); MetricTimeSeries second = series.get(1); assertThat(second.getName(), is("archive.file.search")); assertThat(second.getTimestamps().size(), is(1)); assertThat(second.getValues().get(0), is(32.1)); assertThat(second.getTimestamps().get(0), is(1349109665L)); assertThat(second.getAttributesReference().get("host"), is("test")); } } }
int size = ts.size(); if (size < 1000) { timestamps.addAll(ts.getTimestampsAsArray()); values.addAll(ts.getValuesAsArray()); name = ts.getName(); type = ts.getType(); merge(attributes, ts.getAttributesReference());
/** * Calculates the maximum value of the first time series. * * @param timeSeriesList list with time series * @return the maximum or 0 if the list is empty */ @Override public void execute(List<ChronixTimeSeries<MetricTimeSeries>> timeSeriesList, FunctionCtx functionCtx) { for (ChronixTimeSeries<MetricTimeSeries> chronixTimeSeries : timeSeriesList) { MetricTimeSeries timeSeries = chronixTimeSeries.getRawTimeSeries(); //If it is empty, we return NaN if (timeSeries.size() <= 0) { functionCtx.add(this, Double.NaN, chronixTimeSeries.getJoinKey()); continue; } //Else calculate the analysis value int size = timeSeries.size(); double max = timeSeries.getValue(0); for (int i = 1; i < size; i++) { double next = timeSeries.getValue(i); if (next > max) { max = next; } } functionCtx.add(this, max, chronixTimeSeries.getJoinKey()); } }
@Test public void testParse() throws Exception { try (InputStream stream = GraphiteFormatParserTest.class.getResourceAsStream("/prometheus-text.txt")) { assertNotNull(stream); List<MetricTimeSeries> series = Lists.newArrayList(sut.parse(stream)); assertThat(series.size(), is(3)); MetricTimeSeries metricWithoutTimestampAndLabel = series.stream().filter(s -> s.getName().equals("metric_without_timestamp_and_labels")).findFirst().get(); assertThat(metricWithoutTimestampAndLabel.getTime(0), is(NOW.toEpochMilli())); assertThat(metricWithoutTimestampAndLabel.getValue(0), is(12.47)); MetricTimeSeries httpRequestsTotal200 = series.stream().filter(s -> s.getName().equals("http_requests_total") && s.getAttributesReference().get("code").equals("200")).findFirst().get(); assertThat(httpRequestsTotal200.getTime(0), is(1395066363000L)); assertThat(httpRequestsTotal200.getValue(0), is(1027.0)); MetricTimeSeries httpRequestsTotal400 = series.stream().filter(s -> s.getName().equals("http_requests_total") && s.getAttributesReference().get("code").equals("400")).findFirst().get(); assertThat(httpRequestsTotal400.getTime(0), is(1395066363000L)); assertThat(httpRequestsTotal400.getValue(0), is(3.0)); } } }
/** * Calculates the standard deviation of the first time series. * * @param timeSeriesList list with time series * @return the percentile or 0 if the list is empty */ @Override public void execute(List<ChronixTimeSeries<MetricTimeSeries>> timeSeriesList, FunctionCtx functionCtx) { for (ChronixTimeSeries<MetricTimeSeries> chronixTimeSeries : timeSeriesList) { MetricTimeSeries timeSeries = chronixTimeSeries.getRawTimeSeries(); //If it is empty, we return NaN if (timeSeries.size() <= 0) { functionCtx.add(this, Double.NaN, chronixTimeSeries.getJoinKey()); continue; } //Else calculate the analysis value functionCtx.add(this, de.qaware.chronix.solr.type.metric.functions.math.StdDev.dev(timeSeries.getValues()), chronixTimeSeries.getJoinKey()); } }
/** * Gets the first value in the time series. * It first orders the time series. * * @param timeSeriesList list with time series * @return the average or 0 if the list is empty */ @Override public void execute(List<ChronixTimeSeries<MetricTimeSeries>> timeSeriesList, FunctionCtx functionCtx) { for (ChronixTimeSeries<MetricTimeSeries> chronixTimeSeries : timeSeriesList) { MetricTimeSeries timeSeries = chronixTimeSeries.getRawTimeSeries(); //If it is empty, we return NaN if (timeSeries.isEmpty()) { functionCtx.add(this, Double.NaN, chronixTimeSeries.getJoinKey()); continue; } //we need to sort the time series timeSeries.sort(); functionCtx.add(this, timeSeries.getValue(0), chronixTimeSeries.getJoinKey()); } }
assertThat(cpu0series.getName(), is("sys.cpu.user")); assertThat(cpu0series.getTimestamps().size(), is(2)); assertThat(cpu0series.getTimestamps().get(0), is(1356998400000L)); assertThat(cpu0series.getTimestamps().get(1), is(1356998401000L)); assertThat(cpu0series.getAttributesReference().get("cpu"), is("0")); assertThat(cpu0series.getAttributesReference().get("host"), is("webserver01")); assertThat(cpu1series.getName(), is("sys.cpu.user")); assertThat(cpu1series.getTimestamps().size(), is(1)); assertThat(cpu1series.getTimestamps().get(0), is(1356998400000L)); assertThat(cpu1series.getAttributesReference().get("cpu"), is("1")); assertThat(cpu1series.getAttributesReference().get("host"), is("webserver01")); assertThat(cpu2series.getName(), is("sys.cpu.user")); assertThat(cpu2series.getTimestamps().size(), is(1)); assertThat(cpu2series.getTimestamps().get(0), is(1356998400000L)); assertThat(cpu2series.getAttributesReference().get("cpu"), is("2")); assertThat(cpu2series.getAttributesReference().get("host"), is("webserver01"));
@Override public TimeSeries<Long, Double> from(BinaryTimeSeries binaryTimeSeries, long queryStart, long queryEnd) { //This is a hack MetricTimeSeries metricTimeSeries = new MetricTimeSeriesConverter().from(binaryTimeSeries, queryStart, queryEnd); TimeSeries<Long, Double> timeSeries = new TimeSeries<>(map(metricTimeSeries.points())); metricTimeSeries.getAttributesReference().forEach(timeSeries::addAttribute); return timeSeries; }
LongList timestamps = timeSeries.getTimestamps(); for (int i = 1; i < timeSeries.size(); i++) { long current = timestamps.get(i);
@Override public byte[] dataAsBlob() { byte[] data = ProtoBufMetricTimeSeriesSerializer.to(timeSeries.points().iterator()); //compress data return Compression.compress(data); }