@Override public void run() { if (directories.size() != 2) { throw new IAE("Please provide two segment directories to compare"); } final Injector injector = makeInjector(); final IndexIO indexIO = injector.getInstance(IndexIO.class); try { String dir1 = directories.get(0); String dir2 = directories.get(1); indexIO.validateTwoSegments(new File(dir1), new File(dir2)); log.info("Segments [%s] and [%s] are identical", dir1, dir2); } catch (Exception e) { throw Throwables.propagate(e); } }
private File reprocessAndValidate(File inDir, File tmpDir) throws IOException { final File outDir = indexMerger.convert( inDir, tmpDir, INDEX_SPEC ); indexIO.validateTwoSegments(persistTmpDir, outDir); return outDir; }
public void validateTwoSegments(File dir1, File dir2) throws IOException { try (QueryableIndex queryableIndex1 = loadIndex(dir1)) { try (QueryableIndex queryableIndex2 = loadIndex(dir2)) { validateTwoSegments( new QueryableIndexIndexableAdapter(queryableIndex1), new QueryableIndexIndexableAdapter(queryableIndex2) ); } } }
@Test public void testRowValidatorEquals() throws Exception { Exception ex = null; try { TestHelper.getTestIndexIO().validateTwoSegments(adapter1, adapter2); } catch (Exception e) { ex = e; } if (exception != null) { Assert.assertNotNull("Exception was not thrown", ex); if (!exception.isAssignableFrom(ex.getClass())) { throw ex; } } else { if (ex != null) { throw ex; } } } }
HadoopDruidConverterConfig.INDEX_IO.validateTwoSegments(inDir, outDir);
indexIO.validateTwoSegments(incrementalAdapter, queryableAdapter); Assert.assertEquals(3, merged.getColumnNames().size()); indexIO.validateTwoSegments(tempDir1, mergedDir);
final IndexableAdapter queryableAdapter = new QueryableIndexIndexableAdapter(index1); indexIO.validateTwoSegments(incrementalAdapter, queryableAdapter); Assert.assertEquals(3, merged.getColumnNames().size()); indexIO.validateTwoSegments(tempDir1, mergedDir);
indexIO.validateTwoSegments(incrementalAdapter, queryableAdapter); Assert.assertEquals(4, converted.getColumnNames().size()); indexIO.validateTwoSegments(tempDir1, convertDir);
indexIO.validateTwoSegments(incrementalAdapter, queryableAdapter); Assert.assertEquals(3, merged.getColumnNames().size()); indexIO.validateTwoSegments(tempDir1, mergedDir);
indexIO.validateTwoSegments(incrementalAdapter, queryableAdapter); Assert.assertEquals(4, converted.getColumnNames().size()); indexIO.validateTwoSegments(tempDir1, convertDir);
@Override public void run() { if (directories.size() != 2) { throw new IAE("Please provide two segment directories to compare"); } final Injector injector = makeInjector(); final IndexIO indexIO = injector.getInstance(IndexIO.class); try { String dir1 = directories.get(0); String dir2 = directories.get(1); indexIO.validateTwoSegments(new File(dir1), new File(dir2)); log.info("Segments [%s] and [%s] are identical", dir1, dir2); } catch (Exception e) { throw Throwables.propagate(e); } }
public void validateTwoSegments(File dir1, File dir2) throws IOException { try (QueryableIndex queryableIndex1 = loadIndex(dir1)) { try (QueryableIndex queryableIndex2 = loadIndex(dir2)) { validateTwoSegments( new QueryableIndexIndexableAdapter(queryableIndex1), new QueryableIndexIndexableAdapter(queryableIndex2) ); } } }
HadoopDruidConverterConfig.INDEX_IO.validateTwoSegments(inDir, outDir);