throw new SegmentLoadingException("Unable to kill segment, failed to delete dir [%s]", partitionNumDir.toString()); throw new SegmentLoadingException("Unknown file type[%s]", path); throw new SegmentLoadingException(e, "Unable to kill segment");
new Path(dataSegmentPusher.getPathForHadoop(), dataSegmentPusher.makeIndexPathName(dataSegmentBuilder.build(), DruidStorageHandlerUtils.INDEX_ZIP)); DataSegment dataSegment = dataSegmentBuilder.loadSpec(dataSegmentPusher.makeLoadSpec(finalPath.toUri())).build();
Path segmentPath = new Path(dataSegmentPusher.getPathForHadoop(), dataSegmentPusher.makeIndexPathName(segment, DruidStorageHandlerUtils.INDEX_ZIP)); FileUtils.writeStringToFile(new File(segmentPath.toUri()), "dummy"); Path expectedFinalHadoopPath = new Path(dataSegmentPusher.getPathForHadoop(), dataSegmentPusher.makeIndexPathName(persistedSegment, DruidStorageHandlerUtils.INDEX_ZIP));
public SegmentLoader manufacturate(File storageDir) { return loader.withConfig( new SegmentLoaderConfig().withLocations(Collections.singletonList(new StorageLocationConfig().setPath(storageDir))) ); } }
public SegmentLoader manufacturate(File storageDir) { return loader.withConfig( new SegmentLoaderConfig().withLocations(Arrays.asList(new StorageLocationConfig().setPath(storageDir))) ); } }
@Override public DataSegment restore(DataSegment segment) throws SegmentLoadingException { return getArchiver(segment).restore(segment); }
@Override public DataSegment archive(DataSegment segment) throws SegmentLoadingException { return getArchiver(segment).archive(segment); }
@Override public File getSegmentFiles(DataSegment segment) throws SegmentLoadingException { StorageLocation loc = findStorageLocationIfLoaded(segment); String storageDir = DataSegmentPusher.getDefaultStorageDir(segment, false); if (loc == null) { loc = loadSegmentWithRetry(segment, storageDir); } loc.addSegment(segment); return new File(loc.getPath(), storageDir); }
private StorageLocation findStorageLocationIfLoaded(final DataSegment segment) { for (StorageLocation location : getSortedList(locations)) { File localStorageDir = new File(location.getPath(), DataSegmentPusher.getDefaultStorageDir(segment, false)); if (localStorageDir.exists()) { return location; } } return null; }
null); LocalFileSystem localFileSystem = FileSystem.getLocal(config); DataSegmentPusher dataSegmentPusher = new LocalDataSegmentPusher(new LocalDataSegmentPusherConfig() { @Override public File getStorageDirectory() { return segmentOutputDir; Assert.assertEquals(1, dataSegmentList.size()); File tmpUnzippedSegmentDir = temporaryFolder.newFolder(); new LocalDataSegmentPuller().getSegmentFiles(dataSegmentList.get(0), tmpUnzippedSegmentDir); final QueryableIndex queryableIndex = DruidStorageHandlerUtils.INDEX_IO.loadIndex(tmpUnzippedSegmentDir);
@Override public boolean isSegmentLoaded(final DataSegment segment) { return findStorageLocationIfLoaded(segment) != null; }
Path expectedFinalHadoopPath = new Path(dataSegmentPusher.getPathForHadoop(), dataSegmentPusher.makeIndexPathName(persistedSegment, DruidStorageHandlerUtils.INDEX_ZIP)); Assert.assertEquals(ImmutableMap.of("type", "hdfs", "path", expectedFinalHadoopPath.toString()), persistedSegment.getLoadSpec());
@Override public DataSegment restore(DataSegment segment) throws SegmentLoadingException { return getArchiver(segment).restore(segment); }
@Override public DataSegment archive(DataSegment segment) throws SegmentLoadingException { return getArchiver(segment).archive(segment); }
new Path(dataSegmentPusher.getPathForHadoop(), dataSegmentPusher.makeIndexPathName(persistedSegment, DruidStorageHandlerUtils.INDEX_ZIP));
new Path(dataSegmentPusher.getPathForHadoop(), dataSegmentPusher.makeIndexPathName(persistedSegment, DruidStorageHandlerUtils.INDEX_ZIP));