@Override public QueryableIndex apply(@Nullable File input) { try { return toolbox.getIndexIO().loadIndex(input); } catch (Exception e) { throw Throwables.propagate(e); } } }
public void validateTwoSegments(File dir1, File dir2) throws IOException { try (QueryableIndex queryableIndex1 = loadIndex(dir1)) { try (QueryableIndex queryableIndex2 = loadIndex(dir2)) { validateTwoSegments( new QueryableIndexIndexableAdapter(queryableIndex1), new QueryableIndexIndexableAdapter(queryableIndex2) ); } } }
@Override public QueryableIndex apply(@Nullable File input) { try { return indexIO.loadIndex(input); } catch (IOException e) { throw Throwables.propagate(e); } } }
@Override public Segment factorize(DataSegment dataSegment, File parentDir) throws SegmentLoadingException { try { return new QueryableIndexSegment(indexIO.loadIndex(parentDir), dataSegment.getId()); } catch (IOException e) { throw new SegmentLoadingException(e, "%s", e.getMessage()); } } }
private static List<Pair<QueryableIndex, DataSegment>> loadSegments( List<TimelineObjectHolder<String, DataSegment>> timelineObjectHolders, Map<DataSegment, File> segmentFileMap, IndexIO indexIO ) throws IOException { final List<Pair<QueryableIndex, DataSegment>> segments = new ArrayList<>(); for (TimelineObjectHolder<String, DataSegment> timelineObjectHolder : timelineObjectHolders) { final PartitionHolder<DataSegment> partitionHolder = timelineObjectHolder.getObject(); for (PartitionChunk<DataSegment> chunk : partitionHolder) { final DataSegment segment = chunk.getObject(); final QueryableIndex queryableIndex = indexIO.loadIndex( Preconditions.checkNotNull(segmentFileMap.get(segment), "File for segment %s", segment.getId()) ); segments.add(Pair.of(queryableIndex, segment)); } } return segments; }
public static QueryableIndex persistRealtimeAndLoadMMapped(IncrementalIndex index) { try { File someTmpFile = File.createTempFile("billy", "yay"); someTmpFile.delete(); someTmpFile.mkdirs(); someTmpFile.deleteOnExit(); INDEX_MERGER.persist(index, someTmpFile, indexSpec, null); return INDEX_IO.loadIndex(someTmpFile); } catch (IOException e) { throw Throwables.propagate(e); } } }
private static QueryableIndex makeQueryableIndex(IndexSpec indexSpec, IndexMerger indexMerger, IndexIO indexIO) throws IOException { IncrementalIndex theIndex = makeIncrementalIndex(); File tmpFile = File.createTempFile("billy", "yay"); tmpFile.delete(); tmpFile.mkdirs(); tmpFile.deleteOnExit(); indexMerger.persist(theIndex, tmpFile, indexSpec, null); return indexIO.loadIndex(tmpFile); }
private static QueryableIndex makeQueryableIndex(IndexSpec indexSpec) throws IOException { IncrementalIndex theIndex = makeIncrementalIndex(); File tmpFile = File.createTempFile("billy", "yay"); tmpFile.delete(); tmpFile.mkdirs(); tmpFile.deleteOnExit(); INDEX_MERGER.persist(theIndex, tmpFile, indexSpec, null); return INDEX_IO.loadIndex(tmpFile); }
@Override public File convert( final File inDir, final File outDir, final IndexSpec indexSpec, final ProgressIndicator progress, final @Nullable SegmentWriteOutMediumFactory segmentWriteOutMediumFactory ) throws IOException { try (QueryableIndex index = indexIO.loadIndex(inDir)) { final IndexableAdapter adapter = new QueryableIndexIndexableAdapter(index); return makeIndexFiles( ImmutableList.of(adapter), null, outDir, progress, Lists.newArrayList(adapter.getDimensionNames()), Lists.newArrayList(adapter.getMetricNames()), Iterables::getOnlyElement, false, indexSpec, segmentWriteOutMediumFactory ); } }
private static QueryableIndex makeQueryableIndex(IndexSpec indexSpec, IndexMergerV9 indexMergerV9, IndexIO indexIO) throws IOException { IncrementalIndex theIndex = makeIncrementalIndex(); File tmpFile = File.createTempFile("billy", "yay"); tmpFile.delete(); tmpFile.mkdirs(); try { indexMergerV9.persist(theIndex, tmpFile, indexSpec, null); return indexIO.loadIndex(tmpFile); } finally { FileUtils.deleteDirectory(tmpFile); } }
@Override public Segment apply(File segmentDir) { try { return new QueryableIndexSegment(indexIO.loadIndex(segmentDir), SegmentId.dummy("")); } catch (IOException ex) { throw Throwables.propagate(ex); } } }
public QueryableIndex getMergedIncrementalIndex(int[] indexes) { synchronized (log) { if (rowPersistedIndexes.isEmpty()) { makeRowPersistedIndexes(); } try { File tmpFile = File.createTempFile("yay", "who"); tmpFile.delete(); File mergedFile = new File(tmpFile, "merged"); mergedFile.mkdirs(); mergedFile.deleteOnExit(); List<QueryableIndex> indexesToMerge = new ArrayList<>(); for (int index : indexes) { indexesToMerge.add(rowPersistedIndexes.get(index)); } return indexIO.loadIndex( indexMerger.mergeQueryableIndex(indexesToMerge, true, METRIC_AGGS, mergedFile, indexSpec, null) ); } catch (IOException e) { throw Throwables.propagate(e); } } }
public QueryableIndex buildMMappedIndex() { Preconditions.checkNotNull(indexMerger, "indexMerger"); Preconditions.checkNotNull(tmpDir, "tmpDir"); try (final IncrementalIndex incrementalIndex = buildIncrementalIndex()) { return TestHelper.getTestIndexIO().loadIndex( indexMerger.persist( incrementalIndex, new File(tmpDir, StringUtils.format("testIndex-%s", ThreadLocalRandom.current().nextInt(Integer.MAX_VALUE))), indexSpec, null ) ); } catch (IOException e) { throw Throwables.propagate(e); } }
@Override public WindowedStorageAdapter apply(WindowedDataSegment segment) { try { logger.info("Getting storage path for segment [%s]", segment.getSegment().getId()); Path path = new Path(JobHelper.getURIFromSegment(segment.getSegment())); logger.info("Fetch segment files from [%s]", path); File dir = Files.createTempDir(); tmpSegmentDirs.add(dir); logger.info("Locally storing fetched segment at [%s]", dir); JobHelper.unzipNoGuava(path, context.getConfiguration(), dir, context, null); logger.info("finished fetching segment files"); QueryableIndex index = HadoopDruidIndexerConfig.INDEX_IO.loadIndex(dir); indexes.add(index); numRows += index.getNumRows(); return new WindowedStorageAdapter( new QueryableIndexStorageAdapter(index), segment.getInterval() ); } catch (IOException ex) { throw Throwables.propagate(ex); } } }
@Test public void testSimpleReprocess() throws IOException { final IndexableAdapter adapter = new QueryableIndexIndexableAdapter( closer.closeLater( indexIO.loadIndex( persistTmpDir ) ) ); Assert.assertEquals(events.size(), adapter.getNumRows()); reprocessAndValidate(persistTmpDir, new File(tmpDir, "reprocessed")); }
@Test public void testPersistWithSegmentMetadata() throws IOException { File outDir = Files.createTempDir(); QueryableIndex index = null; try { outDir = Files.createTempDir(); index = indexIO.loadIndex(indexMerger.persist(toPersist, outDir, INDEX_SPEC, null)); Assert.assertEquals("value", index.getMetadata().get("key")); } finally { if (index != null) { index.close(); } if (outDir != null) { FileUtils.deleteDirectory(outDir); } } }
@Test public void testIdempotentReprocess() throws IOException { final IndexableAdapter adapter = new QueryableIndexIndexableAdapter( closer.closeLater( indexIO.loadIndex( persistTmpDir ) ) ); Assert.assertEquals(events.size(), adapter.getNumRows()); final File tmpDir1 = new File(tmpDir, "reprocessed1"); reprocessAndValidate(persistTmpDir, tmpDir1); final File tmpDir2 = new File(tmpDir, "reprocessed2"); final IndexableAdapter adapter2 = new QueryableIndexIndexableAdapter(closer.closeLater(indexIO.loadIndex(tmpDir1))); Assert.assertEquals(events.size(), adapter2.getNumRows()); reprocessAndValidate(tmpDir1, tmpDir2); final File tmpDir3 = new File(tmpDir, "reprocessed3"); final IndexableAdapter adapter3 = new QueryableIndexIndexableAdapter(closer.closeLater(indexIO.loadIndex(tmpDir2))); Assert.assertEquals(events.size(), adapter3.getNumRows()); reprocessAndValidate(tmpDir2, tmpDir3); } }
private QueryableIndex persistAndLoad(List<DimensionSchema> schema, InputRow... rows) throws IOException { IncrementalIndex toPersist = IncrementalIndexTest.createIndex(null, new DimensionsSpec(schema, null, null)); for (InputRow row : rows) { toPersist.add(row); } final File tempDir = temporaryFolder.newFolder(); return closer.closeLater(indexIO.loadIndex(indexMerger.persist(toPersist, tempDir, indexSpec, null))); }
@Test public void testPersistWithSegmentMetadata() throws Exception { final long timestamp = System.currentTimeMillis(); IncrementalIndex toPersist = IncrementalIndexTest.createIndex(null); IncrementalIndexTest.populateIndex(timestamp, toPersist); Map<String, Object> metadataElems = ImmutableMap.of("key", "value"); toPersist.getMetadata().putAll(metadataElems); final File tempDir = temporaryFolder.newFolder(); QueryableIndex index = closer.closeLater( indexIO.loadIndex(indexMerger.persist(toPersist, tempDir, indexSpec, null)) ); Assert.assertEquals(2, index.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME).getLength()); Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index.getAvailableDimensions())); Assert.assertEquals(3, index.getColumnNames().size()); assertDimCompression(index, indexSpec.getDimensionCompression()); Assert.assertEquals( new Metadata( metadataElems, IncrementalIndexTest.getDefaultCombiningAggregatorFactories(), null, Granularities.NONE, Boolean.TRUE ), index.getMetadata() ); }
@Test public void testPersist() throws Exception { final long timestamp = System.currentTimeMillis(); IncrementalIndex toPersist = IncrementalIndexTest.createIndex(null); IncrementalIndexTest.populateIndex(timestamp, toPersist); final File tempDir = temporaryFolder.newFolder(); QueryableIndex index = closer.closeLater( indexIO.loadIndex(indexMerger.persist(toPersist, tempDir, indexSpec, null)) ); Assert.assertEquals(2, index.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME).getLength()); Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index.getAvailableDimensions())); Assert.assertEquals(3, index.getColumnNames().size()); assertDimCompression(index, indexSpec.getDimensionCompression()); Assert.assertArrayEquals( IncrementalIndexTest.getDefaultCombiningAggregatorFactories(), index.getMetadata().getAggregators() ); Assert.assertEquals( Granularities.NONE, index.getMetadata().getQueryGranularity() ); }