@JsonIgnore public String getTimeColumnName() { return (_timeFieldSpec != null) ? _timeFieldSpec.getName() : null; }
@JsonIgnore @Nonnull public String getOutgoingTimeColumnName() { return getName(); }
@Deprecated @JsonIgnore @Nonnull public String getOutGoingTimeColumnName() { return getName(); }
private List<GenericRow> createTestDataWithTimespec(TimeFieldSpec timeFieldSpec) { List<GenericRow> rows = new ArrayList<>(); Random random = new Random(); Map<String, Object> fields; for (int i = 0; i < NUM_ROWS; i++) { fields = new HashMap<>(); fields.put(D1, RandomStringUtils.randomAlphabetic(2)); fields.put(D2, RandomStringUtils.randomAlphabetic(5)); fields.put(M1, Math.abs(random.nextInt())); fields.put(M2, Math.abs(random.nextFloat())); long timestamp = System.currentTimeMillis(); Object timeColumnValue = timeFieldSpec.getIncomingGranularitySpec().fromMillis(timestamp); fields.put(timeFieldSpec.getName(), timeColumnValue); GenericRow row = new GenericRow(); row.init(fields); rows.add(row); } return rows; }
continue; String timeColumnName = timeFieldSpec.getName(); TimeUnit timeUnit = timeFieldSpec.getOutgoingGranularitySpec().getTimeType(); if (timeUnit != TimeUnit.DAYS) {
/** * Returns a new schema based on the original one. The new schema removes columns as needed (for ex, virtual cols) * and adds the new timespec to the schema. */ @VisibleForTesting public Schema getUpdatedSchema(Schema original) { TimeFieldSpec tfs = original.getTimeFieldSpec(); // Use outgoing granularity for creating segment TimeGranularitySpec outgoing = tfs.getOutgoingGranularitySpec(); TimeFieldSpec newTimeSpec = new TimeFieldSpec(outgoing); Schema newSchema = new Schema(); newSchema.addField(newTimeSpec); for (String col : original.getPhysicalColumnNames()) { if (!col.equals(tfs.getName())) { newSchema.addField(original.getFieldSpecFor(col)); } } return newSchema; } }
/** * Reads the next row from the baseRecordReader, and adds a dateTimeFieldSPec column to it * {@inheritDoc} * @see org.apache.pinot.core.data.readers.RecordReader#next(org.apache.pinot.core.data.GenericRow) */ @Override public GenericRow next(GenericRow reuse) throws IOException { reuse = _baseRecordReader.next(reuse); Long timeColumnValue = (Long) reuse.getValue(_timeFieldSpec.getName()); Object dateTimeColumnValue = convertTimeFieldToDateTimeFieldSpec(timeColumnValue); reuse.putField(_dateTimeFieldSpec.getName(), dateTimeColumnValue); return reuse; }
private List<GenericRow> createTestDataWithTimespec(TimeFieldSpec timeFieldSpec, DateTimeFieldSpec dateTimeFieldSpec) { List<GenericRow> rows = new ArrayList<>(); Random random = new Random(); Map<String, Object> fields; for (int i = 0; i < NUM_ROWS; i++) { fields = new HashMap<>(); fields.put(D1, RandomStringUtils.randomAlphabetic(2)); fields.put(D2, RandomStringUtils.randomAlphabetic(5)); fields.put(M1, Math.abs(random.nextInt())); fields.put(M2, Math.abs(random.nextFloat())); long timestamp = System.currentTimeMillis(); Object timeColumnValue = timeFieldSpec.getIncomingGranularitySpec().fromMillis(timestamp); fields.put(timeFieldSpec.getName(), timeColumnValue); DateTimeFormatSpec toFormat = new DateTimeFormatSpec(dateTimeFieldSpec.getFormat()); Object dateTimeColumnValue = toFormat.fromMillisToFormat(timestamp, Object.class); fields.put(dateTimeFieldSpec.getName(), dateTimeColumnValue); GenericRow row = new GenericRow(); row.init(fields); rows.add(row); } return rows; }
@Test(dataProvider = "backfillRecordReaderDataProvider") public void testBackfillDateTimeRecordReader(RecordReader baseRecordReader, TimeFieldSpec timeFieldSpec, DateTimeFieldSpec dateTimeFieldSpec, Schema schemaExpected) throws Exception { BackfillDateTimeColumn backfillDateTimeColumn = new BackfillDateTimeColumn(new File("original"), new File("backup"), timeFieldSpec, dateTimeFieldSpec); try (BackfillDateTimeRecordReader wrapperReader = backfillDateTimeColumn .getBackfillDateTimeRecordReader(baseRecordReader)) { // check that schema has new column Schema schemaActual = wrapperReader.getSchema(); Assert.assertEquals(schemaActual, schemaExpected); DateTimeFieldSpec dateTimeFieldSpecActual = schemaActual.getDateTimeSpec(dateTimeFieldSpec.getName()); TimeFieldSpec timeFieldSpecActual = schemaActual.getTimeFieldSpec(); Assert.assertEquals(dateTimeFieldSpecActual, dateTimeFieldSpec); Assert.assertEquals(timeFieldSpecActual, timeFieldSpec); while (wrapperReader.hasNext()) { GenericRow next = wrapperReader.next(); // check that new datetime column is generated Object dateTimeColumnValueActual = next.getValue(dateTimeFieldSpec.getName()); Assert.assertNotNull(dateTimeColumnValueActual); Object timeColumnValueActual = next.getValue(timeFieldSpec.getName()); Assert.assertNotNull(timeColumnValueActual); // check that datetime column has correct value as per its format Long timeColumnValueMS = timeFieldSpec.getIncomingGranularitySpec().toMillis(timeColumnValueActual); DateTimeFormatSpec toFormat = new DateTimeFormatSpec(dateTimeFieldSpec.getFormat()); Object dateTimeColumnValueExpected = toFormat.fromMillisToFormat(timeColumnValueMS, Object.class); Assert.assertEquals(dateTimeColumnValueActual, dateTimeColumnValueExpected); } } }
Assert.assertNotNull(timeFieldSpec); Assert.assertEquals(timeFieldSpec.getFieldType(), FieldSpec.FieldType.TIME); Assert.assertEquals(timeFieldSpec.getName(), "time"); Assert.assertEquals(timeFieldSpec.getDataType(), FieldSpec.DataType.LONG); Assert.assertEquals(timeFieldSpec.isSingleValueField(), true);