@Override public long getTimestampFromEpoch() { return row.getTimestampFromEpoch(); }
@Override public boolean apply(InputRow input) { return interval.contains(input.getTimestampFromEpoch()); } }
@Override public long getTimestampFromEpoch() { final RowFunction transform = transforms.get(ColumnHolder.TIME_COLUMN_NAME); if (transform != null) { return Rows.objectToNumber(ColumnHolder.TIME_COLUMN_NAME, transform.eval(row)).longValue(); } else { return row.getTimestampFromEpoch(); } }
/** * Return a shardSpec for the given interval and input row. * * @param interval interval for shardSpec * @param row input row * * @return a shardSpec */ ShardSpec getShardSpec(Interval interval, InputRow row) { final List<ShardSpec> shardSpecs = map.get(interval); if (shardSpecs == null || shardSpecs.isEmpty()) { throw new ISE("Failed to get shardSpec for interval[%s]", interval); } return shardSpecs.get(0).getLookup(shardSpecs).getShardSpec(row.getTimestampFromEpoch(), row); } }
@Override protected void innerMap( InputRow inputRow, Context context ) throws IOException, InterruptedException { final Map<String, Iterable<String>> dims = new HashMap<>(); for (final String dim : inputRow.getDimensions()) { dims.put(dim, inputRow.getDimension(dim)); } helper.emitDimValueCounts(context, DateTimes.utc(inputRow.getTimestampFromEpoch()), dims); } }
@Override public IncrementalIndexAddResult add(InputRow row, Supplier<Committer> committerSupplier) throws IndexSizeExceededException { Sink sink = getSink(row.getTimestampFromEpoch()); if (sink == null) { return Plumber.THROWAWAY; } final IncrementalIndexAddResult addResult = sink.add(row, false); if (!sink.canAppendRow()) { persist(committerSupplier.get()); } return addResult; }
interval = config.getGranularitySpec() .getSegmentGranularity() .bucket(DateTimes.utc(inputRow.getTimestampFromEpoch())); .bucketInterval(DateTimes.utc(inputRow.getTimestampFromEpoch())); throw new ISE("WTF?! No bucket found for timestamp: %s", inputRow.getTimestampFromEpoch());
/** * Get the proper bucket for some input row. * * @param inputRow an InputRow * * @return the Bucket that this row belongs to */ public Optional<Bucket> getBucket(InputRow inputRow) { final Optional<Interval> timeBucket = schema.getDataSchema().getGranularitySpec().bucketInterval( DateTimes.utc(inputRow.getTimestampFromEpoch()) ); if (!timeBucket.isPresent()) { return Optional.absent(); } final DateTime bucketStart = timeBucket.get().getStart(); final ShardSpec actualSpec = shardSpecLookups.get(bucketStart.getMillis()) .getShardSpec( rollupGran.bucketStart(inputRow.getTimestamp()).getMillis(), inputRow ); final HadoopyShardSpec hadoopyShardSpec = hadoopShardSpecLookup.get(bucketStart.getMillis()).get(actualSpec); return Optional.of( new Bucket( hadoopyShardSpec.getShardNum(), bucketStart, actualSpec.getPartitionNum() ) ); }
public boolean assertExistsInOneSpec(List<ShardSpec> specs, InputRow row) { for (ShardSpec spec : specs) { if (spec.isInChunk(row.getTimestampFromEpoch(), row)) { return true; } } throw new ISE("None of the partition matches"); }
@Override public IncrementalIndexAddResult add(InputRow row, Supplier<Committer> committerSupplier) throws IndexSizeExceededException { if (row == null) { return Plumber.THROWAWAY; } Sink sink = getSink(row.getTimestampFromEpoch()); if (sink == null) { return Plumber.THROWAWAY; } return sink.add(row, false); }
@Override public IncrementalIndexAddResult add(InputRow row, Supplier<Committer> committerSupplier) throws IndexSizeExceededException { long messageTimestamp = row.getTimestampFromEpoch(); final Sink sink = getSink(messageTimestamp); metrics.reportMessageMaxTimestamp(messageTimestamp); if (sink == null) { return Plumber.THROWAWAY; } final IncrementalIndexAddResult addResult = sink.add(row, false); if (config.isReportParseExceptions() && addResult.getParseException() != null) { throw addResult.getParseException(); } if (!sink.canAppendRow() || System.currentTimeMillis() > nextFlush) { persist(committerSupplier.get()); } return addResult; }
@Override public IncrementalIndexAddResult add(InputRow row, Supplier<Committer> committerSupplier) throws IndexSizeExceededException { final SegmentIdWithShardSpec identifier = getSegmentIdentifier(row.getTimestampFromEpoch()); if (identifier == null) { return Plumber.THROWAWAY; } try { final Appenderator.AppenderatorAddResult addResult = appenderator.add(identifier, row, committerSupplier); lastCommitterSupplier = committerSupplier; return new IncrementalIndexAddResult(addResult.getNumRowsInSegment(), 0, addResult.getParseException()); } catch (SegmentNotWritableException e) { // Segment already started handoff return Plumber.NOT_WRITABLE; } }
@Test public void testStringInputRowParserSerdeMultiCharset() throws Exception { Charset[] testCharsets = { StandardCharsets.US_ASCII, StandardCharsets.ISO_8859_1, StandardCharsets.UTF_8, StandardCharsets.UTF_16BE, StandardCharsets.UTF_16LE, StandardCharsets.UTF_16 }; for (Charset testCharset : testCharsets) { InputRow parsed = testCharsetParseHelper(testCharset); Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions()); Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo")); Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar")); Assert.assertEquals(DateTimes.of("3000").getMillis(), parsed.getTimestampFromEpoch()); } }
@Test public void testTransformTimeFromOtherFields() { final TransformSpec transformSpec = new TransformSpec( null, ImmutableList.of( new ExpressionTransform("__time", "(a + b) * 3600000", TestExprMacroTable.INSTANCE) ) ); final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER); final InputRow row = parser.parseBatch(ROW1).get(0); Assert.assertNotNull(row); Assert.assertEquals(DateTimes.of("1970-01-01T05:00:00Z"), row.getTimestamp()); Assert.assertEquals(DateTimes.of("1970-01-01T05:00:00Z").getMillis(), row.getTimestampFromEpoch()); }
@Test public void testTransformTimeFromTime() { final TransformSpec transformSpec = new TransformSpec( null, ImmutableList.of( new ExpressionTransform("__time", "__time + 3600000", TestExprMacroTable.INSTANCE) ) ); final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER); final InputRow row = parser.parseBatch(ROW1).get(0); Assert.assertNotNull(row); Assert.assertEquals(DateTimes.of("2000-01-01T01:00:00Z"), row.getTimestamp()); Assert.assertEquals(DateTimes.of("2000-01-01T01:00:00Z").getMillis(), row.getTimestampFromEpoch()); }
@Test public void testStringInputRowParserSerde() throws Exception { final StringInputRowParser parser = new StringInputRowParser( new JSONParseSpec( new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("foo", "bar")), null, null), null, null ), null ); final ByteBufferInputRowParser parser2 = jsonMapper.readValue( jsonMapper.writeValueAsBytes(parser), ByteBufferInputRowParser.class ); final InputRow parsed = parser2.parseBatch( ByteBuffer.wrap(StringUtils.toUtf8("{\"foo\":\"x\",\"bar\":\"y\",\"qux\":\"z\",\"timestamp\":\"2000\"}")) ).get(0); Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions()); Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo")); Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar")); Assert.assertEquals(DateTimes.of("2000").getMillis(), parsed.getTimestampFromEpoch()); }
Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo")); Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar")); Assert.assertEquals(1000, parsed.getTimestampFromEpoch());
@Test public void testTransformOverwriteField() { // Transforms are allowed to overwrite fields, and to refer to the fields they overwrite; double-check this. final TransformSpec transformSpec = new TransformSpec( null, ImmutableList.of( new ExpressionTransform("x", "concat(x,y)", TestExprMacroTable.INSTANCE) ) ); final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER); final InputRow row = parser.parseBatch(ROW1).get(0); Assert.assertNotNull(row); Assert.assertEquals(DateTimes.of("2000-01-01").getMillis(), row.getTimestampFromEpoch()); Assert.assertEquals(DateTimes.of("2000-01-01"), row.getTimestamp()); Assert.assertEquals(ImmutableList.of("f", "x", "y"), row.getDimensions()); Assert.assertEquals(ImmutableList.of("foobar"), row.getDimension("x")); Assert.assertEquals(3.0, row.getMetric("b").doubleValue(), 0); Assert.assertNull(row.getRaw("f")); }
@Test public void testTransforms() { final TransformSpec transformSpec = new TransformSpec( null, ImmutableList.of( new ExpressionTransform("f", "concat(x,y)", TestExprMacroTable.INSTANCE), new ExpressionTransform("g", "a + b", TestExprMacroTable.INSTANCE), new ExpressionTransform("h", "concat(f,g)", TestExprMacroTable.INSTANCE) ) ); final InputRowParser<Map<String, Object>> parser = transformSpec.decorate(PARSER); final InputRow row = parser.parseBatch(ROW1).get(0); Assert.assertNotNull(row); Assert.assertEquals(DateTimes.of("2000-01-01").getMillis(), row.getTimestampFromEpoch()); Assert.assertEquals(DateTimes.of("2000-01-01"), row.getTimestamp()); Assert.assertEquals(ImmutableList.of("f", "x", "y"), row.getDimensions()); Assert.assertEquals(ImmutableList.of("foo"), row.getDimension("x")); Assert.assertEquals(3.0, row.getMetric("b").doubleValue(), 0); Assert.assertEquals("foobar", row.getRaw("f")); Assert.assertEquals(ImmutableList.of("foobar"), row.getDimension("f")); Assert.assertEquals(ImmutableList.of("5.0"), row.getDimension("g")); Assert.assertEquals(ImmutableList.of(), row.getDimension("h")); Assert.assertEquals(5L, row.getMetric("g").longValue()); }
@Test public void testCombiningfirehose() throws IOException { List<InputRow> list1 = Arrays.asList(makeRow(1, 1), makeRow(2, 2)); List<InputRow> list2 = Arrays.asList(makeRow(3, 3), makeRow(4, 4), makeRow(5, 5)); FirehoseFactory combiningFactory = new CombiningFirehoseFactory( Arrays.asList( new ListFirehoseFactory(list1), new ListFirehoseFactory(list2) ) ); final Firehose firehose = combiningFactory.connect(null, null); for (int i = 1; i < 6; i++) { Assert.assertTrue(firehose.hasMore()); final InputRow inputRow = firehose.nextRow(); Assert.assertEquals(i, inputRow.getTimestampFromEpoch()); Assert.assertEquals(i, inputRow.getMetric("test").floatValue(), 0); } Assert.assertFalse(firehose.hasMore()); }