@JsonCreator public AppendTask( @JsonProperty("id") String id, @JsonProperty("dataSource") String dataSource, @JsonProperty("segments") List<DataSegment> segments, @JsonProperty("aggregations") List<AggregatorFactory> aggregators, @JsonProperty("indexSpec") IndexSpec indexSpec, // This parameter is left for compatibility when reading existing JSONs, to be removed in Druid 0.12. @JsonProperty("buildV9Directly") Boolean buildV9Directly, @JsonProperty("segmentWriteOutMediumFactory") @Nullable SegmentWriteOutMediumFactory segmentWriteOutMediumFactory, @JsonProperty("context") Map<String, Object> context ) { super(id, dataSource, segments, segmentWriteOutMediumFactory, context); this.indexSpec = indexSpec == null ? new IndexSpec() : indexSpec; this.aggregators = aggregators; }
@Test public void testSerde() throws Exception { final ObjectMapper objectMapper = new DefaultObjectMapper(); final String json = "{ \"bitmap\" : { \"type\" : \"roaring\" }, \"dimensionCompression\" : \"lz4\", \"metricCompression\" : \"lzf\"" + ", \"longEncoding\" : \"auto\" }"; final IndexSpec spec = objectMapper.readValue(json, IndexSpec.class); Assert.assertEquals(new RoaringBitmapSerdeFactory(null), spec.getBitmapSerdeFactory()); Assert.assertEquals(CompressionStrategy.LZ4, spec.getDimensionCompression()); Assert.assertEquals(CompressionStrategy.LZF, spec.getMetricCompression()); Assert.assertEquals(CompressionFactory.LongEncodingStrategy.AUTO, spec.getLongEncoding()); Assert.assertEquals(spec, objectMapper.readValue(objectMapper.writeValueAsBytes(spec), IndexSpec.class)); }
static GenericColumnSerializer createLongColumnSerializer( SegmentWriteOutMedium segmentWriteOutMedium, String columnName, IndexSpec indexSpec ) { // If using default values for null use LongColumnSerializer to allow rollback to previous versions. if (NullHandling.replaceWithDefault()) { return LongColumnSerializer.create( segmentWriteOutMedium, columnName, indexSpec.getMetricCompression(), indexSpec.getLongEncoding() ); } else { return LongColumnSerializerV2.create( segmentWriteOutMedium, columnName, indexSpec.getMetricCompression(), indexSpec.getLongEncoding(), indexSpec.getBitmapSerdeFactory() ); } }
static GenericColumnSerializer createFloatColumnSerializer( SegmentWriteOutMedium segmentWriteOutMedium, String columnName, IndexSpec indexSpec ) { // If using default values for null use FloatColumnSerializer to allow rollback to previous versions. if (NullHandling.replaceWithDefault()) { return FloatColumnSerializer.create( segmentWriteOutMedium, columnName, indexSpec.getMetricCompression() ); } else { return FloatColumnSerializerV2.create( segmentWriteOutMedium, columnName, indexSpec.getMetricCompression(), indexSpec.getBitmapSerdeFactory() ); } }
@Test public void testDefaults() { final IndexSpec spec = new IndexSpec(); Assert.assertEquals(CompressionStrategy.LZ4, spec.getDimensionCompression()); Assert.assertEquals(CompressionStrategy.LZ4, spec.getMetricCompression()); Assert.assertEquals(CompressionFactory.LongEncodingStrategy.LONGS, spec.getLongEncoding()); } }
toPersist1.getInterval(), toPersist1, indexSpec.getBitmapSerdeFactory() .getBitmapFactory() ); Assert.assertEquals(3, index1.getColumnNames().size()); IndexSpec newSpec = new IndexSpec( indexSpec.getBitmapSerdeFactory(), CompressionStrategy.LZ4.equals(indexSpec.getDimensionCompression()) ? CompressionStrategy.LZF : CompressionStrategy.LZ4, CompressionStrategy.LZ4.equals(indexSpec.getDimensionCompression()) ? CompressionStrategy.LZF : CompressionStrategy.LZ4, CompressionFactory.LongEncodingStrategy.LONGS.equals(indexSpec.getLongEncoding()) ? CompressionFactory.LongEncodingStrategy.AUTO : CompressionFactory.LongEncodingStrategy.LONGS assertDimCompression(index1, indexSpec.getDimensionCompression()); assertDimCompression(merged, newSpec.getDimensionCompression());
@Override public ColumnDescriptor makeColumnDescriptor() { // Now write everything boolean hasMultiValue = capabilities.hasMultipleValues(); final CompressionStrategy compressionStrategy = indexSpec.getDimensionCompression(); final BitmapSerdeFactory bitmapSerdeFactory = indexSpec.getBitmapSerdeFactory(); final ColumnDescriptor.Builder builder = ColumnDescriptor.builder(); builder.setValueType(ValueType.STRING); builder.setHasMultipleValues(hasMultiValue); final DictionaryEncodedColumnPartSerde.SerializerBuilder partBuilder = DictionaryEncodedColumnPartSerde .serializerBuilder() .withDictionary(dictionaryWriter) .withValue( encodedValueSerializer, hasMultiValue, compressionStrategy != CompressionStrategy.UNCOMPRESSED ) .withBitmapSerdeFactory(bitmapSerdeFactory) .withBitmapIndex(bitmapWriter) .withSpatialIndex(spatialWriter) .withByteOrder(IndexIO.BYTE_ORDER); final ColumnDescriptor serdeficator = builder .addSerde(partBuilder.build()) .build(); //log.info("Completed dimension column[%s] in %,d millis.", dimensionName, System.currentTimeMillis() - dimStartTime); return serdeficator; }
public StringDimensionMergerV9( String dimensionName, IndexSpec indexSpec, SegmentWriteOutMedium segmentWriteOutMedium, ColumnCapabilities capabilities, ProgressIndicator progress, Closer closer ) { this.dimensionName = dimensionName; this.indexSpec = indexSpec; this.capabilities = capabilities; this.segmentWriteOutMedium = segmentWriteOutMedium; nullRowsBitmap = indexSpec.getBitmapSerdeFactory().getBitmapFactory().makeEmptyMutableBitmap(); this.progress = progress; this.closer = closer; }
@Test public void testSerdeUncompressed() throws Exception { final ObjectMapper objectMapper = new DefaultObjectMapper(); final String json = "{ \"dimensionCompression\" : \"uncompressed\" }"; final IndexSpec spec = objectMapper.readValue(json, IndexSpec.class); Assert.assertEquals(CompressionStrategy.UNCOMPRESSED, spec.getDimensionCompression()); Assert.assertEquals(spec, objectMapper.readValue(objectMapper.writeValueAsBytes(spec), IndexSpec.class)); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final ClientCompactQueryTuningConfig that = (ClientCompactQueryTuningConfig) o; if (maxRowsInMemory != that.maxRowsInMemory) { return false; } if (maxTotalRows != that.maxTotalRows) { return false; } if (!indexSpec.equals(that.indexSpec)) { return false; } if (maxPendingPersists != that.maxPendingPersists) { return false; } return publishTimeout == that.publishTimeout; }
toPersist1.getInterval(), toPersist1, indexSpec.getBitmapSerdeFactory() .getBitmapFactory() ); IndexSpec newSpec = new IndexSpec( indexSpec.getBitmapSerdeFactory(), CompressionStrategy.LZ4.equals(indexSpec.getDimensionCompression()) ? CompressionStrategy.LZF : CompressionStrategy.LZ4, CompressionStrategy.LZ4.equals(indexSpec.getDimensionCompression()) ? CompressionStrategy.LZF : CompressionStrategy.LZ4, CompressionFactory.LongEncodingStrategy.LONGS.equals(indexSpec.getLongEncoding()) ? CompressionFactory.LongEncodingStrategy.AUTO : CompressionFactory.LongEncodingStrategy.LONGS assertDimCompression(index1, indexSpec.getDimensionCompression()); assertDimCompression(converted, newSpec.getDimensionCompression());
toPersist1.getInterval(), toPersist1, indexSpec.getBitmapSerdeFactory() .getBitmapFactory() ); assertDimCompression(index1, indexSpec.getDimensionCompression()); assertDimCompression(merged, indexSpec.getDimensionCompression());
static GenericColumnSerializer createLongColumnSerializer( SegmentWriteOutMedium segmentWriteOutMedium, String columnName, IndexSpec indexSpec ) { // If using default values for null use LongColumnSerializer to allow rollback to previous versions. if (NullHandling.replaceWithDefault()) { return LongColumnSerializer.create( segmentWriteOutMedium, columnName, indexSpec.getMetricCompression(), indexSpec.getLongEncoding() ); } else { return LongColumnSerializerV2.create( segmentWriteOutMedium, columnName, indexSpec.getMetricCompression(), indexSpec.getLongEncoding(), indexSpec.getBitmapSerdeFactory() ); } }
GenericIndexed<String> dims = GenericIndexed.fromIterable(finalDimensions, GenericIndexed.STRING_STRATEGY); final String bitmapSerdeFactoryType = mapper.writeValueAsString(indexSpec.getBitmapSerdeFactory()); final long numBytes = cols.getSerializedSize() + dims.getSerializedSize()
static GenericColumnSerializer createDoubleColumnSerializer( SegmentWriteOutMedium segmentWriteOutMedium, String columnName, IndexSpec indexSpec ) { // If using default values for null use DoubleColumnSerializer to allow rollback to previous versions. if (NullHandling.replaceWithDefault()) { return DoubleColumnSerializer.create( segmentWriteOutMedium, columnName, indexSpec.getMetricCompression() ); } else { return DoubleColumnSerializerV2.create( segmentWriteOutMedium, columnName, indexSpec.getMetricCompression(), indexSpec.getBitmapSerdeFactory() ); } }
protected void setupEncodedValueWriter() throws IOException { final CompressionStrategy compressionStrategy = indexSpec.getDimensionCompression(); String filenameBase = StringUtils.format("%s.forward_dim", dimensionName); if (capabilities.hasMultipleValues()) { if (compressionStrategy != CompressionStrategy.UNCOMPRESSED) { encodedValueSerializer = V3CompressedVSizeColumnarMultiIntsSerializer.create( segmentWriteOutMedium, filenameBase, cardinality, compressionStrategy ); } else { encodedValueSerializer = new VSizeColumnarMultiIntsSerializer(segmentWriteOutMedium, cardinality); } } else { if (compressionStrategy != CompressionStrategy.UNCOMPRESSED) { encodedValueSerializer = CompressedVSizeColumnarIntsSerializer.create( segmentWriteOutMedium, filenameBase, cardinality, compressionStrategy ); } else { encodedValueSerializer = new VSizeColumnarIntsSerializer(segmentWriteOutMedium, cardinality); } } encodedValueSerializer.open(); }
@JsonCreator public MergeTask( @JsonProperty("id") String id, @JsonProperty("dataSource") String dataSource, @JsonProperty("segments") List<DataSegment> segments, @JsonProperty("aggregations") List<AggregatorFactory> aggregators, @JsonProperty("rollup") Boolean rollup, @JsonProperty("indexSpec") IndexSpec indexSpec, // This parameter is left for compatibility when reading existing JSONs, to be removed in Druid 0.12. @JsonProperty("buildV9Directly") Boolean buildV9Directly, @JsonProperty("segmentWriteOutMediumFactory") @Nullable SegmentWriteOutMediumFactory segmentWriteOutMediumFactory, @JsonProperty("context") Map<String, Object> context ) { super(id, dataSource, segments, segmentWriteOutMediumFactory, context); this.aggregators = Preconditions.checkNotNull(aggregators, "null aggregations"); this.rollup = rollup == null ? Boolean.TRUE : rollup; this.indexSpec = indexSpec == null ? new IndexSpec() : indexSpec; }
toPersist1.getInterval(), toPersist1, indexSpec.getBitmapSerdeFactory() .getBitmapFactory() ); assertDimCompression(index1, indexSpec.getDimensionCompression()); assertDimCompression(merged, indexSpec.getDimensionCompression());
@Before public void setUp() throws IndexSizeExceededException { long timestamp = 0L; for (Map<String, Object> event : events1) { incrementalIndex1.add(new MapBasedInputRow(timestamp++, Lists.newArrayList(event.keySet()), event)); } timestamp = 0L; for (Map<String, Object> event : events2) { incrementalIndex2.add(new MapBasedInputRow(timestamp++, Lists.newArrayList(event.keySet()), event)); } adapter2 = new IncrementalIndexAdapter( DEFAULT_INTERVAL, incrementalIndex2, INDEX_SPEC.getBitmapSerdeFactory().getBitmapFactory() ); adapter1 = new IncrementalIndexAdapter( DEFAULT_INTERVAL, incrementalIndex1, INDEX_SPEC.getBitmapSerdeFactory().getBitmapFactory() ); }
static GenericColumnSerializer createDoubleColumnSerializer( SegmentWriteOutMedium segmentWriteOutMedium, String columnName, IndexSpec indexSpec ) { // If using default values for null use DoubleColumnSerializer to allow rollback to previous versions. if (NullHandling.replaceWithDefault()) { return DoubleColumnSerializer.create( segmentWriteOutMedium, columnName, indexSpec.getMetricCompression() ); } else { return DoubleColumnSerializerV2.create( segmentWriteOutMedium, columnName, indexSpec.getMetricCompression(), indexSpec.getBitmapSerdeFactory() ); } }