@Test public void testPathForHadoopAbsolute() { config.storageDirectory = new File("/druid"); Assert.assertEquals( "file:/druid", new LocalDataSegmentPusher(config, new ObjectMapper()).getPathForHadoop() ); }
final File outDir = new File(baseStorageDir, this.getStorageDir(segment, useUniquePath)); return createDescriptorFile( segment.withLoadSpec(makeLoadSpec(outDir.toURI())) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)), final File tmpOutDir = new File(baseStorageDir, makeIntermediateDir()); log.info("Creating intermediate directory[%s] for segment[%s]", tmpOutDir.toString(), segment.getId()); FileUtils.forceMkdir(tmpOutDir); final long size = compressSegment(dataSegmentFile, tmpIndexFile); DataSegment dataSegment = createDescriptorFile( segment.withLoadSpec(makeLoadSpec(new File(outDir, INDEX_FILENAME).toURI())) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)),
@Override public DataSegment push(final File dataSegmentFile, final DataSegment segment, final boolean useUniquePath) throws IOException { final DataSegment returnSegment = super.push(dataSegmentFile, segment, useUniquePath); segments.add(returnSegment); return returnSegment; } };
@Test public void testPush() throws IOException { /* DataSegment - Used to create LoadSpec and Create outDir (Local Deep Storage location in this case) File dataSegmentFile - Used to get location of segment files like version.bin, meta.smoosh and xxxxx.smoosh */ final DataSegment dataSegment2 = dataSegment.withVersion("v2"); DataSegment returnSegment1 = localDataSegmentPusher.push(dataSegmentFiles, dataSegment, false); DataSegment returnSegment2 = localDataSegmentPusher.push(dataSegmentFiles, dataSegment2, false); Assert.assertNotNull(returnSegment1); Assert.assertEquals(dataSegment, returnSegment1); Assert.assertNotNull(returnSegment2); Assert.assertEquals(dataSegment2, returnSegment2); Assert.assertNotEquals( localDataSegmentPusher.getStorageDir(dataSegment, false), localDataSegmentPusher.getStorageDir(dataSegment2, false) ); for (DataSegment returnSegment : ImmutableList.of(returnSegment1, returnSegment2)) { File outDir = new File( config.getStorageDirectory(), localDataSegmentPusher.getStorageDir(returnSegment, false) ); File versionFile = new File(outDir, "index.zip"); File descriptorJson = new File(outDir, "descriptor.json"); Assert.assertTrue(versionFile.exists()); Assert.assertTrue(descriptorJson.exists()); } }
@Deprecated @Override public String getPathForHadoop(String dataSource) { return getPathForHadoop(); }
@Before public void setUp() throws IOException { config = new LocalDataSegmentPusherConfig(); config.storageDirectory = temporaryFolder.newFolder(); localDataSegmentPusher = new LocalDataSegmentPusher(config, TestHelper.makeJsonMapper()); dataSegmentFiles = temporaryFolder.newFolder(); Files.asByteSink(new File(dataSegmentFiles, "version.bin")).write(Ints.toByteArray(0x9)); }
@Deprecated @Override public String getPathForHadoop(String dataSource) { return getPathForHadoop(); }
actionClient, null, new LocalDataSegmentPusher( new LocalDataSegmentPusherConfig()
final File outDir = new File(baseStorageDir, this.getStorageDir(segment, useUniquePath)); return createDescriptorFile( segment.withLoadSpec(makeLoadSpec(outDir.toURI())) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)), final File tmpOutDir = new File(baseStorageDir, makeIntermediateDir()); log.info("Creating intermediate directory[%s] for segment[%s]", tmpOutDir.toString(), segment.getIdentifier()); FileUtils.forceMkdir(tmpOutDir); final long size = compressSegment(dataSegmentFile, tmpIndexFile); DataSegment dataSegment = createDescriptorFile( segment.withLoadSpec(makeLoadSpec(new File(outDir, INDEX_FILENAME).toURI())) .withSize(size) .withBinaryVersion(SegmentUtils.getVersionFromDir(dataSegmentFile)),
@Test public void testPathForHadoopRelative() { config.storageDirectory = new File("druid"); Assert.assertEquals( StringUtils.format("file:%s/druid", System.getProperty("user.dir")), new LocalDataSegmentPusher(config, new ObjectMapper()).getPathForHadoop() ); } }
@Override public DataSegment push(File file, DataSegment segment, boolean useUniquePath) throws IOException { segments.add(segment); return super.push(file, segment, useUniquePath); } };
@Test public void testPushUseUniquePath() throws IOException { DataSegment segment = localDataSegmentPusher.push(dataSegmentFiles, dataSegment, true); String path = segment.getLoadSpec().get("path").toString(); Pattern pattern = Pattern.compile( ".*/ds/1970-01-01T00:00:00\\.000Z_1970-01-01T00:00:00\\.001Z/v1/0/[A-Za-z0-9-]{36}/index\\.zip" ); Assert.assertTrue(path, pattern.matcher(path).matches()); Assert.assertTrue(new File(path).exists()); }
@Test public void testPushCannotCreateDirectory() throws IOException { exception.expect(IOException.class); exception.expectMessage("Unable to create directory"); config.storageDirectory = new File(config.storageDirectory, "xxx"); Assert.assertTrue(config.storageDirectory.mkdir()); config.storageDirectory.setWritable(false); localDataSegmentPusher.push(dataSegmentFiles, dataSegment, false); }
@Test public void testLastPushWinsForConcurrentPushes() throws IOException { File replicatedDataSegmentFiles = temporaryFolder.newFolder(); Files.asByteSink(new File(replicatedDataSegmentFiles, "version.bin")).write(Ints.toByteArray(0x8)); DataSegment returnSegment1 = localDataSegmentPusher.push(dataSegmentFiles, dataSegment, false); DataSegment returnSegment2 = localDataSegmentPusher.push(replicatedDataSegmentFiles, dataSegment2, false); Assert.assertEquals(dataSegment.getDimensions(), returnSegment1.getDimensions()); Assert.assertEquals(dataSegment2.getDimensions(), returnSegment2.getDimensions()); File unzipDir = new File(config.storageDirectory, "unzip"); FileUtils.forceMkdir(unzipDir); CompressionUtils.unzip( new File(config.storageDirectory, "/ds/1970-01-01T00:00:00.000Z_1970-01-01T00:00:00.001Z/v1/0/index.zip"), unzipDir ); Assert.assertEquals(0x8, Ints.fromByteArray(Files.toByteArray(new File(unzipDir, "version.bin")))); }