@Override public FileStatus[] listStatus(Path f) throws IOException { return originalFs.listStatus(f); }
@Override public FileStatus[] listStatus(Path f) throws IOException { return unsafeFileSystem.listStatus(f); }
fileStatuses = fileSystem.listStatus(path);
/** * This recreates the new working directory of the recovered RocksDB instance and links/copies the contents from * a local state. */ private void restoreInstanceDirectoryFromPath(Path source) throws IOException { FileSystem fileSystem = source.getFileSystem(); final FileStatus[] fileStatuses = fileSystem.listStatus(source); if (fileStatuses == null) { throw new IOException("Cannot list file statues. Directory " + source + " does not exist."); } for (FileStatus fileStatus : fileStatuses) { final Path filePath = fileStatus.getPath(); final String fileName = filePath.getName(); File restoreFile = new File(source.getPath(), fileName); File targetFile = new File(stateBackend.instanceRocksDBPath.getPath(), fileName); if (fileName.endsWith(SST_FILE_SUFFIX)) { // hardlink'ing the immutable sst-files. Files.createLink(targetFile.toPath(), restoreFile.toPath()); } else { // true copy for all other files. Files.copy(restoreFile.toPath(), targetFile.toPath(), StandardCopyOption.REPLACE_EXISTING); } } }
private static void internalCopyDirectory(Path sourcePath, Path targetPath, boolean executable, FileSystem sFS, FileSystem tFS) throws IOException { tFS.mkdirs(targetPath); FileStatus[] contents = sFS.listStatus(sourcePath); for (FileStatus content : contents) { String distPath = content.getPath().toString(); if (content.isDir()) { if (distPath.endsWith("/")) { distPath = distPath.substring(0, distPath.length() - 1); } } String localPath = targetPath + distPath.substring(distPath.lastIndexOf("/")); copy(content.getPath(), new Path(localPath), executable); } }
private static void addToZip(Path fileOrDirectory, FileSystem fs, Path rootDir, ZipOutputStream out) throws IOException { String relativePath = fileOrDirectory.getPath().replace(rootDir.getPath() + '/', ""); if (fs.getFileStatus(fileOrDirectory).isDir()) { out.putNextEntry(new ZipEntry(relativePath + '/')); for (FileStatus containedFile : fs.listStatus(fileOrDirectory)) { addToZip(containedFile.getPath(), fs, rootDir, out); } } else { ZipEntry entry = new ZipEntry(relativePath); out.putNextEntry(entry); try (FSDataInputStream in = fs.open(fileOrDirectory)) { IOUtils.copyBytes(in, out, false); } out.closeEntry(); } }
private List<String> listNewFiles(FileSystem fileSystem) throws IOException { List<String> files = new ArrayList<String>(); FileStatus[] statuses = fileSystem.listStatus(new Path(path)); if (statuses == null) { LOG.warn("Path does not exist: {}", path); } else { for (FileStatus status : statuses) { Path filePath = status.getPath(); String fileName = filePath.getName(); long modificationTime = status.getModificationTime(); if (!isFiltered(fileName, modificationTime)) { files.add(filePath.toString()); modificationTimes.put(fileName, modificationTime); } } } return files; }
protected List<FileStatus> getFiles() throws IOException { // get all the files that are involved in the splits List<FileStatus> files = new ArrayList<>(); for (Path filePath: getFilePaths()) { final FileSystem fs = filePath.getFileSystem(); final FileStatus pathFile = fs.getFileStatus(filePath); if (pathFile.isDir()) { // input is directory. list all contained files final FileStatus[] partials = fs.listStatus(filePath); for (FileStatus partial : partials) { if (!partial.isDir()) { files.add(partial); } } } else { files.add(pathFile); } } return files; }
jobArchives = refreshFS.listStatus(refreshDir); } catch (IOException e) { LOG.error("Failed to access job archive location for path {}.", refreshDir, e);
statuses = fileSystem.listStatus(path); } catch (IOException e) {
FileStatus[] fss = fs.listStatus(filePath); files.ensureCapacity(files.size() + fss.length);
FileStatus[] fss = fs.listStatus(filePath); files.ensureCapacity(files.size() + fss.length);
for(FileStatus dir: fs.listStatus(path)) { if (dir.isDir()) { if (acceptFile(dir) && enumerateNestedFiles) {
private static void getCopyTasks(Path p, String rel, List<FileCopyTask> tasks) throws IOException { FileStatus[] res = p.getFileSystem().listStatus(p); if (res == null) { return; } for (FileStatus fs : res) { if (fs.isDir()) { getCopyTasks(fs.getPath(), rel + fs.getPath().getName() + "/", tasks); } else { Path cp = fs.getPath(); tasks.add(new FileCopyTask(cp, rel + cp.getName())); } } } }
private Map<Path, String> getFileContentByPath(Path directory) throws Exception { Map<Path, String> contents = new HashMap<>(); final FileStatus[] filesInBucket = getFileSystem().listStatus(directory); for (FileStatus file : filesInBucket) { final long fileLength = file.getLen(); byte[] serContents = new byte[(int) fileLength]; FSDataInputStream stream = getFileSystem().open(file.getPath()); stream.read(serContents); contents.put(file.getPath(), new String(serContents, StandardCharsets.UTF_8)); } return contents; }
verify(fs).listStatus(path);
assertEquals(0, fs.listStatus(directory).length); FileStatus[] files = fs.listStatus(directory); assertNotNull(files); assertEquals(3, files.length);
FileStatus[] filestatuses = fs.listStatus(bucketPath); Set<String> paths = new HashSet<>(filestatuses.length); for (FileStatus filestatus : filestatuses) {
FileStatus[] filestatuses = fs.listStatus(bucketPath); Set<String> paths = new HashSet<>(filestatuses.length); for (FileStatus filestatus : filestatuses) {
@Test public void testSerializationOnlyInProgress() throws IOException { final File testFolder = tempFolder.newFolder(); final FileSystem fs = FileSystem.get(testFolder.toURI()); final Path testBucket = new Path(testFolder.getPath(), "test"); final RecoverableWriter writer = fs.createRecoverableWriter(); final RecoverableFsDataOutputStream stream = writer.open(testBucket); stream.write(IN_PROGRESS_CONTENT.getBytes(Charset.forName("UTF-8"))); final RecoverableWriter.ResumeRecoverable current = stream.persist(); final BucketState<String> bucketState = new BucketState<>( "test", testBucket, Long.MAX_VALUE, current, new HashMap<>()); final SimpleVersionedSerializer<BucketState<String>> serializer = new BucketStateSerializer<>( writer.getResumeRecoverableSerializer(), writer.getCommitRecoverableSerializer(), SimpleVersionedStringSerializer.INSTANCE ); final byte[] bytes = SimpleVersionedSerialization.writeVersionAndSerialize(serializer, bucketState); // to simulate that everything is over for file. stream.close(); final BucketState<String> recoveredState = SimpleVersionedSerialization.readVersionAndDeSerialize(serializer, bytes); Assert.assertEquals(testBucket, recoveredState.getBucketPath()); FileStatus[] statuses = fs.listStatus(testBucket.getParent()); Assert.assertEquals(1L, statuses.length); Assert.assertTrue( statuses[0].getPath().getPath().startsWith( (new Path(testBucket.getParent(), ".test.inprogress")).toString()) ); }