@Override public File persist( final IncrementalIndex index, final Interval dataInterval, File outDir, IndexSpec indexSpec, @Nullable SegmentWriteOutMediumFactory segmentWriteOutMediumFactory ) throws IOException { return persist(index, dataInterval, outDir, indexSpec, new BaseProgressIndicator(), segmentWriteOutMediumFactory); }
) throws IOException return mergeQueryableIndex( indexes, rollup, ) throws IOException return merge( IndexMerger.toIndexableAdapters(indexes), rollup,
public static IndexMergerV9 getTestIndexMergerV9(SegmentWriteOutMediumFactory segmentWriteOutMediumFactory) { return new IndexMergerV9(JSON_MAPPER, getTestIndexIO(), segmentWriteOutMediumFactory); }
@Override public File convert( final File inDir, final File outDir, final IndexSpec indexSpec, final ProgressIndicator progress, final @Nullable SegmentWriteOutMediumFactory segmentWriteOutMediumFactory ) throws IOException { try (QueryableIndex index = indexIO.loadIndex(inDir)) { final IndexableAdapter adapter = new QueryableIndexIndexableAdapter(index); return makeIndexFiles( ImmutableList.of(adapter), null, outDir, progress, Lists.newArrayList(adapter.getDimensionNames()), Lists.newArrayList(adapter.getMetricNames()), Iterables::getOnlyElement, false, indexSpec, segmentWriteOutMediumFactory ); } }
return makeIndexFiles( indexes, sortedMetricAggs,
mergedFile.mkdirs(); indexMergerV9.persist(first, DATA_INTERVAL, firstFile, indexSpec, null); indexMergerV9.persist(second, DATA_INTERVAL, secondFile, indexSpec, null); indexMergerV9.persist(third, DATA_INTERVAL, thirdFile, indexSpec, null); indexMergerV9.mergeQueryableIndex( Arrays.asList( indexIO.loadIndex(firstFile),
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void mergeV9(Blackhole blackhole) throws Exception { File tmpFile = File.createTempFile("IndexMergeBenchmark-MERGEDFILE-V9-" + System.currentTimeMillis(), ".TEMPFILE"); tmpFile.delete(); tmpFile.mkdirs(); try { log.info(tmpFile.getAbsolutePath() + " isFile: " + tmpFile.isFile() + " isDir:" + tmpFile.isDirectory()); File mergedFile = INDEX_MERGER_V9.mergeQueryableIndex( indexesToMerge, rollup, schemaInfo.getAggsArray(), tmpFile, new IndexSpec(), null ); blackhole.consume(mergedFile); } finally { tmpFile.delete(); } } }
try { final QueryableIndex merged = TestHelper.getTestIndexIO().loadIndex( TestHelper.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance()).merge( indexes.stream().map(QueryableIndexIndexableAdapter::new).collect(Collectors.toList()), false,
case LONG: builder.setValueType(ValueType.LONG); builder.addSerde(createLongColumnPartSerde(writer, indexSpec)); break; case FLOAT: builder.setValueType(ValueType.FLOAT); builder.addSerde(createFloatColumnPartSerde(writer, indexSpec)); break; case DOUBLE: builder.setValueType(ValueType.DOUBLE); builder.addSerde(createDoubleColumnPartSerde(writer, indexSpec)); break; case COMPLEX: throw new ISE("Unknown type[%s]", type); makeColumn(v9Smoosher, metric, builder.build()); log.info("Completed metric column[%s] in %,d millis.", metric, System.currentTimeMillis() - metricStartTime);
@Override public ColumnDescriptor makeColumnDescriptor() { final ColumnDescriptor.Builder builder = ColumnDescriptor.builder(); builder.setValueType(ValueType.DOUBLE); ColumnPartSerde serde = IndexMergerV9.createDoubleColumnPartSerde(serializer, indexSpec); builder.addSerde(serde); return builder.build(); } }
indexMergerV9.mergeQueryableIndex( indexes, schema.getGranularitySpec().isRollup(),
return merge( Collections.singletonList( new IncrementalIndexAdapter(
case LONG: builder.setValueType(ValueType.LONG); builder.addSerde(createLongColumnPartSerde(writer, indexSpec)); break; case FLOAT: builder.setValueType(ValueType.FLOAT); builder.addSerde(createFloatColumnPartSerde(writer, indexSpec)); break; case DOUBLE: builder.setValueType(ValueType.DOUBLE); builder.addSerde(createDoubleColumnPartSerde(writer, indexSpec)); break; case COMPLEX: throw new ISE("Unknown type[%s]", type); makeColumn(v9Smoosher, metric, builder.build()); log.info("Completed metric column[%s] in %,d millis.", metric, System.currentTimeMillis() - metricStartTime);
return makeIndexFiles( indexes, sortedMetricAggs,
@Override public File convert( final File inDir, final File outDir, final IndexSpec indexSpec, final ProgressIndicator progress, final @Nullable SegmentWriteOutMediumFactory segmentWriteOutMediumFactory ) throws IOException { try (QueryableIndex index = indexIO.loadIndex(inDir)) { final IndexableAdapter adapter = new QueryableIndexIndexableAdapter(index); return makeIndexFiles( ImmutableList.of(adapter), null, outDir, progress, Lists.newArrayList(adapter.getDimensionNames()), Lists.newArrayList(adapter.getMetricNames()), Iterables::getOnlyElement, false, indexSpec, segmentWriteOutMediumFactory ); } }
@Override public ColumnDescriptor makeColumnDescriptor() { final ColumnDescriptor.Builder builder = ColumnDescriptor.builder(); builder.setValueType(ValueType.DOUBLE); ColumnPartSerde serde = IndexMergerV9.createDoubleColumnPartSerde(serializer, indexSpec); builder.addSerde(serde); return builder.build(); } }
@Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) public void persistV9(Blackhole blackhole) throws Exception { File tmpDir = Files.createTempDir(); log.info("Using temp dir: " + tmpDir.getAbsolutePath()); try { File indexFile = INDEX_MERGER_V9.persist( incIndex, tmpDir, new IndexSpec(), null ); blackhole.consume(indexFile); } finally { FileUtils.deleteDirectory(tmpDir); } } }
) throws IOException return mergeQueryableIndex( indexes, rollup, ) throws IOException return merge( IndexMerger.toIndexableAdapters(indexes), rollup,
null, null, new IndexMergerV9(objectMapper, indexIO, OffHeapMemorySegmentWriteOutMediumFactory.instance()), null, null,
indexMergerV9.mergeQueryableIndex( indexes, schema.getGranularitySpec().isRollup(),