dataSegmentPusher.makeIndexPathName(dataSegmentBuilder.build(), DruidStorageHandlerUtils.INDEX_ZIP));
expectedFinalHadoopPath = new Path(dataSegmentPusher.getPathForHadoop(), dataSegmentPusher.makeIndexPathName(persistedSegment, DruidStorageHandlerUtils.INDEX_ZIP)); Assert.assertEquals(ImmutableMap.of("type", "hdfs", "path", expectedFinalHadoopPath.toString()), persistedSegment.getLoadSpec());
segmentPath = new Path(dataSegmentPusher.getPathForHadoop(), dataSegmentPusher.makeIndexPathName(segment, DruidStorageHandlerUtils.INDEX_ZIP)); FileUtils.writeStringToFile(new File(segmentPath.toUri()), "dummy"); expectedFinalHadoopPath = new Path(dataSegmentPusher.getPathForHadoop(), dataSegmentPusher.makeIndexPathName(persistedSegment, DruidStorageHandlerUtils.INDEX_ZIP));
expectedFinalHadoopPath = new Path(dataSegmentPusher.getPathForHadoop(), dataSegmentPusher.makeIndexPathName(persistedSegment, DruidStorageHandlerUtils.INDEX_ZIP));
expectedFinalHadoopPath = new Path(dataSegmentPusher.getPathForHadoop(), dataSegmentPusher.makeIndexPathName(persistedSegment, DruidStorageHandlerUtils.INDEX_ZIP));
public static Path makeFileNamePath( final Path basePath, final FileSystem fs, final DataSegment segmentTemplate, final String baseFileName, DataSegmentPusher dataSegmentPusher ) { return new Path( prependFSIfNullScheme(fs, basePath), dataSegmentPusher.makeIndexPathName(segmentTemplate, baseFileName) ); }
public static Path makeTmpPath( final Path basePath, final FileSystem fs, final DataSegment segmentTemplate, final TaskAttemptID taskAttemptID, DataSegmentPusher dataSegmentPusher ) { return new Path( prependFSIfNullScheme(fs, basePath), StringUtils.format( "./%s.%d", dataSegmentPusher.makeIndexPathName(segmentTemplate, JobHelper.INDEX_ZIP), taskAttemptID.getId() ) ); }