if ( !fileSystem.fileExists( sourceToCompress ) ) List<FileHandle> fileHandles = fileSystem.streamFilesRecursive( sourceToCompress ).collect( toList() ); for ( FileHandle fileHandle : fileHandles ) Path zipFsPath = fileSystem.isDirectory( sourceToCompress ) ? zipFs.getPath( rootPath.relativize( sourcePath ).toString() ) : zipFs.getPath( sourcePath.getFileName().toString() ); if ( zipFsPath.getParent() != null )
private File emptyFile( FileSystemAbstraction fs ) throws IOException { File shortFile = directory.file( "empty" ); fs.deleteFile( shortFile ); fs.create( shortFile ).close(); return shortFile; }
private static File clean( FileSystemAbstraction fs, File dir ) throws IOException { if ( fs.fileExists( dir ) ) { fs.deleteRecursively( dir ); } fs.mkdirs( dir ); return dir; }
private boolean isNotEmptyDirectory( File file ) { if ( fileSystem.isDirectory( file ) ) { File[] files = fileSystem.listFiles( file ); return files != null && files.length > 0; } return false; }
private boolean rotationThresholdExceeded() { return fileSystem.fileExists( outputFile ) && rotationThresholdBytes > 0 && fileSystem.getFileSize( outputFile ) >= rotationThresholdBytes; }
private void create( File file, FileSystemAbstraction fileSystem, long indexVersion ) throws IOException { if ( fileSystem.fileExists( file ) && fileSystem.getFileSize( file ) > 0 ) { throw new IllegalArgumentException( file + " already exist" ); } try ( StoreChannel fileChannel = fileSystem.open( file, OpenMode.READ_WRITE ) ) { write( fileChannel, System.currentTimeMillis(), random.nextLong(), 0, 1, indexVersion ); } }
private List<File> listFolders( File rootFolder ) { File[] files = fileSystem.listFiles( rootFolder ); return files == null ? Collections.emptyList() : Stream.of( files ) .filter( f -> fileSystem.isDirectory( f ) && StringUtils.isNumeric( f.getName() ) ) .sorted( FILE_COMPARATOR ) .collect( toList() ); }
private List<File> listFiles( File dir ) { File[] cachedFiles = pageCache.getCachedFileSystem().listFiles( dir ); File[] fsaFiles = fs.listFiles( dir ); if ( cachedFiles == null && fsaFiles == null ) { // This probably means 'dir' is actually a file, or it does not exist. return null; } Stream<File> files = Stream.concat( ofNullable( cachedFiles ).map( Arrays::stream ).orElse( empty() ), ofNullable( fsaFiles ).map( Arrays::stream ).orElse( empty() ) ); return files.distinct().collect( toList() ); }
static void saveFulltextIndexSettings( FulltextIndexDescriptor descriptor, PartitionedIndexStorage indexStorage, FileSystemAbstraction fs ) throws IOException { File indexConfigFile = new File( indexStorage.getIndexFolder(), INDEX_CONFIG_FILE ); Properties settings = new Properties(); settings.getProperty( INDEX_CONFIG_EVENTUALLY_CONSISTENT, Boolean.toString( descriptor.isEventuallyConsistent() ) ); settings.setProperty( INDEX_CONFIG_ANALYZER, descriptor.analyzerName() ); settings.setProperty( INDEX_CONFIG_PROPERTY_NAMES, descriptor.propertyNames().stream().collect( Collectors.joining( ", ", "[", "]" )) ); settings.setProperty( "_propertyIds", Arrays.toString( descriptor.properties() ) ); settings.setProperty( "_name", descriptor.name() ); settings.setProperty( "_schema_entityType", descriptor.schema().entityType().name() ); settings.setProperty( "_schema_entityTokenIds", Arrays.toString( descriptor.schema().getEntityTokenIds() ) ); try ( StoreChannel channel = fs.create( indexConfigFile ); Writer writer = fs.openAsWriter( indexConfigFile, StandardCharsets.UTF_8, false ) ) { settings.store( writer, "Auto-generated file. Do not modify!" ); writer.flush(); channel.force( true ); } } }
@Test void streamFilesRecursiveMustCreateMissingPathDirectoriesImpliedByFileRename() throws Exception { File a = existingFile( "a" ); File sub = new File( path, "sub" ); // does not exists File target = new File( sub, "b" ); FileHandle handle = fsa.streamFilesRecursive( a ).findAny().get(); handle.rename( target ); assertTrue( fsa.isDirectory( sub ) ); assertTrue( fsa.fileExists( target ) ); }
@Test void streamFilesRecursiveMustDeleteSubDirectoriesEmptiedByFileDelete() throws Exception { File sub = existingDirectory( "sub" ); File x = new File( sub, "x" ); ensureExists( x ); fsa.streamFilesRecursive( sub ).forEach( HANDLE_DELETE ); assertFalse( fsa.isDirectory( sub ) ); assertFalse( fsa.fileExists( sub ) ); }
@Test public void shouldReturnOldChannelWhenThereIsNoNextChannel() throws IOException { // given final ReaderLogVersionBridge bridge = new ReaderLogVersionBridge( logFiles ); when( channel.getVersion() ).thenReturn( version ); when( fs.open( any( File.class ), eq( OpenMode.READ ) ) ).thenThrow( new FileNotFoundException() ); // when final LogVersionedStoreChannel result = bridge.next( channel ); // then assertEquals( channel, result ); verify( channel, never() ).close(); }
@Test void streamFilesRecursiveMustThrowWhenDeletingNonExistingFile() throws Exception { File a = existingFile( "a" ); FileHandle handle = fsa.streamFilesRecursive( a ).findAny().get(); fsa.deleteFile( a ); assertThrows( NoSuchFileException.class, handle::delete ); }
@Test public void skipEmptyIndexStorageMigration() throws IOException { when( fs.listFiles( originalIndexStore ) ).thenReturn( null ); ExplicitIndexProvider indexProviders = getExplicitIndexProvider(); ExplicitIndexMigrator indexMigrator = new TestExplicitIndexMigrator( fs, indexProviders, logProvider, true ); indexMigrator.migrate( storeLayout, migrationLayout, progressMonitor, StandardV2_3.STORE_VERSION, StandardV3_0.STORE_VERSION ); verify( fs, never() ).deleteRecursively( originalIndexStore ); verify( fs, never() ).moveToDirectory( migratedIndexStore, storeLayout.databaseDirectory() ); }
@Test void streamFilesRecursiveRenameMustNotChangeSourceFileContentsWithReplaceExisting() throws Exception { File a = existingFile( "a" ); File b = existingFile( "b" ); generateFileWithRecords( a, recordCount ); generateFileWithRecords( b, recordCount + recordsPerFilePage ); // Fill 'b' with random data try ( StoreChannel channel = fsa.open( b, OpenMode.READ_WRITE ) ) { ThreadLocalRandom rng = ThreadLocalRandom.current(); int fileSize = (int) channel.size(); ByteBuffer buffer = ByteBuffer.allocate( fileSize ); for ( int i = 0; i < fileSize; i++ ) { buffer.put( i, (byte) rng.nextInt() ); } buffer.rewind(); channel.writeAll( buffer ); } // Do the rename FileHandle handle = fsa.streamFilesRecursive( a ).findAny().get(); handle.rename( b, REPLACE_EXISTING ); // Then verify that the old random data we put in 'b' has been replaced with the contents of 'a' verifyRecordsInFile( b, recordCount ); }
@Before public void setUp() { when( originalIndexStore.getParentFile() ).thenReturn( storeLayout.databaseDirectory() ); when( fs.isDirectory( originalIndexStore ) ).thenReturn( true ); when( fs.listFiles( originalIndexStore ) ).thenReturn( new File[]{mock( File.class )} ); }
@Test( expected = IllegalArgumentException.class ) public void shouldThrowWhenTryingToCreateFileThatAlreadyExists() { // Given FileSystemAbstraction fs = mock( FileSystemAbstraction.class ); when( fs.fileExists( file ) ).thenReturn( false ).thenReturn( true ); when( fs.getFileSize( file ) ).thenReturn( 42L ); // When new IndexProviderStore( file, fs, MetaDataStore.versionStringToLong( "3.5" ), false ); // Then // exception is thrown }
@Test public void cleanupMigrationDirectory() throws IOException { when( fs.fileExists( migratedIndexStore ) ).thenReturn( true ); ExplicitIndexProvider indexProviders = getExplicitIndexProvider(); ExplicitIndexMigrator indexMigrator = new TestExplicitIndexMigrator( fs, indexProviders, logProvider, true ); indexMigrator.migrate( storeLayout, migrationLayout, progressMonitor, StandardV2_3.STORE_VERSION, StandardV3_0.STORE_VERSION ); indexMigrator.cleanup( migrationLayout ); verify( fs ).deleteRecursively( migratedIndexStore ); }
@Test public void shouldReturnTrueWhenASingleFileSizeIsGreaterOrEqualThanMaxSize() { // given long sixteenGigabytes = 16L * 1024 * 1024 * 1024; final FileSizeThreshold threshold = new FileSizeThreshold( fs, sixteenGigabytes ); when( fs.getFileSize( file ) ).thenReturn( sixteenGigabytes ); // when threshold.init(); final boolean result = threshold.reached( file, version, source ); // then assertTrue( result ); }
@Test void streamFilesRecursiveMustDeleteFiles() throws Exception { File a = existingFile( "a" ); File b = existingFile( "b" ); File c = existingFile( "c" ); File base = a.getParentFile(); fsa.streamFilesRecursive( base ).forEach( HANDLE_DELETE ); assertFalse( fsa.fileExists( a ) ); assertFalse( fsa.fileExists( b ) ); assertFalse( fsa.fileExists( c ) ); }