@Override public QueryableIndex apply(@Nullable File input) { try { return toolbox.getIndexIO().loadIndex(input); } catch (Exception e) { throw Throwables.propagate(e); } } }
public void validateTwoSegments(File dir1, File dir2) throws IOException { try (QueryableIndex queryableIndex1 = loadIndex(dir1)) { try (QueryableIndex queryableIndex2 = loadIndex(dir2)) { validateTwoSegments( new QueryableIndexIndexableAdapter(queryableIndex1), new QueryableIndexIndexableAdapter(queryableIndex2) ); } } }
public static IndexIO getTestIndexIO() { return new IndexIO( JSON_MAPPER, new ColumnConfig() { @Override public int columnCacheSizeBytes() { return 0; } } ); }
if (isNullRow(vals1) ^ isNullRow(vals2)) { throw notEqualValidationException(dim1Name, vals1, vals2); if (vals1IsList) { if (((List) vals1).size() != 1 || !Objects.equals(((List) vals1).get(0), vals2)) { throw notEqualValidationException(dim1Name, vals1, vals2); throw notEqualValidationException(dim1Name, vals1, vals2); throw notEqualValidationException(dim1Name, vals1, vals2);
@Override public void run() { if (directories.size() != 2) { throw new IAE("Please provide two segment directories to compare"); } final Injector injector = makeInjector(); final IndexIO indexIO = injector.getInstance(IndexIO.class); try { String dir1 = directories.get(0); String dir2 = directories.get(1); indexIO.validateTwoSegments(new File(dir1), new File(dir2)); log.info("Segments [%s] and [%s] are identical", dir1, dir2); } catch (Exception e) { throw Throwables.propagate(e); } }
writer.close(); IndexIO.checkFileSize(new File(outDir, "index.drd")); log.info("Completed index.drd in %,d millis.", System.currentTimeMillis() - startTime);
private File reprocessAndValidate(File inDir, File tmpDir) throws IOException { final File outDir = indexMerger.convert( inDir, tmpDir, INDEX_SPEC ); indexIO.validateTwoSegments(persistTmpDir, outDir); return outDir; }
if (isNullRow(vals1) ^ isNullRow(vals2)) { throw notEqualValidationException(dim1Name, vals1, vals2); if (vals1IsList) { if (((List) vals1).size() != 1 || !Objects.equals(((List) vals1).get(0), vals2)) { throw notEqualValidationException(dim1Name, vals1, vals2); throw notEqualValidationException(dim1Name, vals1, vals2); throw notEqualValidationException(dim1Name, vals1, vals2);
writer.close(); IndexIO.checkFileSize(new File(outDir, "index.drd")); log.info("Completed index.drd in %,d millis.", System.currentTimeMillis() - startTime);
@Override public QueryableIndex apply(@Nullable File input) { try { return indexIO.loadIndex(input); } catch (IOException e) { throw Throwables.propagate(e); } } }
indexIO.loadIndex(indexMerger.persist(toPersist1, tempDir1, indexSpec, null)) ); indexIO.validateTwoSegments(incrementalAdapter, queryableAdapter); indexIO.loadIndex( indexMerger.mergeQueryableIndex( ImmutableList.of(index1), Assert.assertEquals(3, merged.getColumnNames().size()); indexIO.validateTwoSegments(tempDir1, mergedDir);
@Test public void testRowValidatorEquals() throws Exception { Exception ex = null; try { TestHelper.getTestIndexIO().validateTwoSegments(adapter1, adapter2); } catch (Exception e) { ex = e; } if (exception != null) { Assert.assertNotNull("Exception was not thrown", ex); if (!exception.isAssignableFrom(ex.getClass())) { throw ex; } } else { if (ex != null) { throw ex; } } } }
ObjectMapper mapper = new DefaultObjectMapper(); mapper.registerModule(new SegmentizerModule()); IndexIO indexIO = new IndexIO( mapper, new ColumnConfig()
@Override public Segment factorize(DataSegment dataSegment, File parentDir) throws SegmentLoadingException { try { return new QueryableIndexSegment(indexIO.loadIndex(parentDir), dataSegment.getId()); } catch (IOException e) { throw new SegmentLoadingException(e, "%s", e.getMessage()); } } }
indexIO.loadIndex(indexMerger.persist(toPersist1, tempDir1, indexSpec, null)) ); indexIO.validateTwoSegments(incrementalAdapter, queryableAdapter); indexIO.loadIndex(indexMerger.convert(tempDir1, convertDir, indexSpec)) ); Assert.assertEquals(4, converted.getColumnNames().size()); indexIO.validateTwoSegments(tempDir1, convertDir);
HadoopDruidConverterConfig.INDEX_IO.validateTwoSegments(inDir, outDir);
final Closer closer = factoryAndCloser.rhs; IndexIO indexIO = new IndexIO( mapper, new ColumnConfig()
private static List<Pair<QueryableIndex, DataSegment>> loadSegments( List<TimelineObjectHolder<String, DataSegment>> timelineObjectHolders, Map<DataSegment, File> segmentFileMap, IndexIO indexIO ) throws IOException { final List<Pair<QueryableIndex, DataSegment>> segments = new ArrayList<>(); for (TimelineObjectHolder<String, DataSegment> timelineObjectHolder : timelineObjectHolders) { final PartitionHolder<DataSegment> partitionHolder = timelineObjectHolder.getObject(); for (PartitionChunk<DataSegment> chunk : partitionHolder) { final DataSegment segment = chunk.getObject(); final QueryableIndex queryableIndex = indexIO.loadIndex( Preconditions.checkNotNull(segmentFileMap.get(segment), "File for segment %s", segment.getId()) ); segments.add(Pair.of(queryableIndex, segment)); } } return segments; }
indexIO.loadIndex(indexMerger.append(ImmutableList.of(incrementalAdapter), null, tempDir1, indexSpec, null)) ); final IndexableAdapter queryableAdapter = new QueryableIndexIndexableAdapter(index1); indexIO.validateTwoSegments(incrementalAdapter, queryableAdapter); indexIO.loadIndex( indexMerger.mergeQueryableIndex( ImmutableList.of(index1), Assert.assertEquals(3, merged.getColumnNames().size()); indexIO.validateTwoSegments(tempDir1, mergedDir);
@Override public void run() { if (directories.size() != 2) { throw new IAE("Please provide two segment directories to compare"); } final Injector injector = makeInjector(); final IndexIO indexIO = injector.getInstance(IndexIO.class); try { String dir1 = directories.get(0); String dir2 = directories.get(1); indexIO.validateTwoSegments(new File(dir1), new File(dir2)); log.info("Segments [%s] and [%s] are identical", dir1, dir2); } catch (Exception e) { throw Throwables.propagate(e); } }