Refine search
/** * @param fs * @param dir Directory to create file in. * @return random filename inside passed <code>dir</code> */ static Path getUniqueFile(final FileSystem fs, final Path dir) throws IOException { if (!fs.getFileStatus(dir).isDirectory()) { throw new IOException("Expecting " + dir.toString() + " to be a directory"); } return new Path(dir, dash.matcher(UUID.randomUUID().toString()).replaceAll("")); }
public void listDictSlicePath(FileSystem fs, FileStatus path, List<Path> list) throws IOException { if (path.isDirectory()) { for (FileStatus status : fs.listStatus(path.getPath())) { listDictSlicePath(fs, status, list); } } else { if (path.getPath().getName().startsWith(GlobalDictHDFSStore.IndexFormatV1.SLICE_PREFIX)) { list.add(path.getPath()); } } }
private void addHdfsJars(String hdfsJarFileList, Configuration conf) throws IOException { for (String jarFile : SPLITTER.split(hdfsJarFileList)) { FileStatus[] status = this.fs.listStatus(new Path(jarFile)); for (FileStatus fileStatus : status) { if (!fileStatus.isDirectory()) { Path path = new Path(jarFile, fileStatus.getPath().getName()); LOG.info(String.format("Adding %s to classpath", path)); DistributedCache.addFileToClassPath(path, conf, this.fs); } } } }
public void run() throws Exception { Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); Path path = new Path(_segmentPath); FileStatus[] fileStatusArr = fs.globStatus(path); for (FileStatus fileStatus : fileStatusArr) { if (fileStatus.isDirectory()) { pushDir(fs, fileStatus.getPath()); } else { pushOneTarFile(fs, fileStatus.getPath()); } } }
private long getSpace(FileStatus f) { Long cached = directorySpaces.get(f); if (cached != null) { return cached; } try { long space = f.isDirectory() ? fs.getContentSummary(f.getPath()).getSpaceConsumed() : f.getLen(); directorySpaces.put(f, space); return space; } catch (IOException e) { LOG.trace("Failed to get space consumed by path={}", f, e); return -1; } } });
try { FileSystem fs = FileSystem.get(rootDirExportFile, conf); Path toPath = new Path(rootDirExportFile.getScheme(), rootDirExportFile.getAuthority(), rootDirExportFile.getPath()); try { FileStatus tgt = fs.getFileStatus(toPath); if (!tgt.isDirectory()) { throw new SemanticException( astRepresentationForErrorMsg + ": " + "Target is not a directory : " + rootDirExportFile); } else { FileStatus[] files = fs.listStatus(toPath, FileUtils.HIDDEN_FILES_PATH_FILTER); if (files != null && files.length != 0) { throw new SemanticException(
@Override protected void processPath(PathData item) throws IOException { if (!item.stat.isDirectory()) { throw new PathIsNotDirectoryException(item.toString()); } if (item.fs.listStatus(item.path).length == 0) { if (!item.fs.delete(item.path, false)) { throw new PathIOException(item.toString()); } } else if (!ignoreNonEmpty) { throw new PathIsNotEmptyDirectoryException(item.toString()); } } }
public static String getFirstDataFilePathInDir(String dirInHdfs) throws IOException { FileStatus[] fileStatuses = getFileSystem().listStatus(new Path(dirInHdfs)); for (FileStatus fileStatus : fileStatuses) { Path dataFilePath = fileStatus.getPath(); if (!fileStatus.isDirectory() && !dataFilePath.getName().startsWith("_")) { return dataFilePath.toString(); } } String message = dirInHdfs + " does not contain a valid data file."; LOG.error(message); throw new RuntimeException(message); } }
public static FileStatus[] matchFilesOrDir(FileSystem fs, Path path) throws IOException { FileStatus[] srcs = fs.globStatus(path, new PathFilter() { @Override public boolean accept(Path p) { String name = p.getName(); return name.equals(EximUtil.METADATA_NAME) || (!name.startsWith("_") && !name.startsWith(".")); } }); if ((srcs != null) && srcs.length == 1) { if (srcs[0].isDirectory()) { srcs = fs.listStatus(srcs[0].getPath(), new PathFilter() { @Override public boolean accept(Path p) { String name = p.getName(); return !name.startsWith("_") && !name.startsWith("."); } }); } } return (srcs); }
public void run() throws Exception { Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); Path path = new Path(_segmentPath); FileStatus[] fileStatusArr = fs.globStatus(path); for (FileStatus fileStatus : fileStatusArr) { if (fileStatus.isDirectory()) { pushDir(fs, fileStatus.getPath()); } else { pushOneTarFile(fs, fileStatus.getPath()); } } }
@Override public boolean isDirectory(URI uri) { FileStatus fileStatus = new FileStatus(); fileStatus.setPath(new Path(uri)); return fileStatus.isDirectory(); }
/** * Recursive helper to log the state of the FS * * @see #logFileSystemState(FileSystem, Path, Logger) */ private static void logFSTree(Logger LOG, final FileSystem fs, final Path root, String prefix) throws IOException { FileStatus[] files = listStatus(fs, root, null); if (files == null) { return; } for (FileStatus file : files) { if (file.isDirectory()) { LOG.debug(prefix + file.getPath().getName() + "/"); logFSTree(LOG, fs, file.getPath(), prefix + "---"); } else { LOG.debug(prefix + file.getPath().getName()); } } }
private void checkRestoreTmpDir(Configuration conf, String restoreTmpDir, int expectedCount) throws IOException { FileSystem fs = FileSystem.get(conf); FileStatus[] subDirectories = fs.listStatus(new Path(restoreTmpDir)); assertNotNull(subDirectories); assertEquals(subDirectories.length, expectedCount); for (int i = 0; i < expectedCount; i++) { assertTrue(subDirectories[i].isDirectory()); } }
public void pushDir(FileSystem fs, Path path) throws Exception { LOGGER.info("******** Now uploading segments tar from dir: {}", path); FileStatus[] fileStatusArr = fs.listStatus(new Path(path.toString() + "/")); for (FileStatus fileStatus : fileStatusArr) { if (fileStatus.isDirectory()) { pushDir(fs, fileStatus.getPath()); } else { pushOneTarFile(fs, fileStatus.getPath()); } } }
private static void walk(List<FileStatus> results, FileSystem fileSystem, Path path) throws IOException { for (FileStatus status : fileSystem.listStatus(path)) { if (!status.isDirectory()) { results.add(status); } else { walk(results, fileSystem, status.getPath()); } } }
Path includesFile = new Path(datasetDir, INCLUDES_CONF_FILE_NAME); if (!includesFileStatus.isDirectory()) { try (InputStream includesConfInStream = this.fs.open(includesFileStatus.getPath())) { configKeyPaths.addAll(getResolvedConfigKeyPaths(includesConfInStream, runtimeConfig));
private static void split(final Configuration conf, final Path p) throws IOException { FileSystem fs = FSUtils.getWALFileSystem(conf); if (!fs.exists(p)) { throw new FileNotFoundException(p.toString()); } if (!fs.getFileStatus(p).isDirectory()) { throw new IOException(p + " is not a directory"); } final Path baseDir = FSUtils.getWALRootDir(conf); Path archiveDir = new Path(baseDir, HConstants.HREGION_OLDLOGDIR_NAME); if (conf.getBoolean(AbstractFSWALProvider.SEPARATE_OLDLOGDIR, AbstractFSWALProvider.DEFAULT_SEPARATE_OLDLOGDIR)) { archiveDir = new Path(archiveDir, p.getName()); } WALSplitter.split(baseDir, p, archiveDir, fs, conf, WALFactory.getInstance(conf)); }
@Override public boolean accept(FileStatus f) { return accept(f.getPath(), f.isDirectory()); }
public void pushDir(FileSystem fs, Path path) throws Exception { LOGGER.info("******** Now uploading segments tar from dir: {}", path); FileStatus[] fileStatusArr = fs.listStatus(new Path(path.toString() + "/")); for (FileStatus fileStatus : fileStatusArr) { if (fileStatus.isDirectory()) { pushDir(fs, fileStatus.getPath()); } else { pushOneTarFile(fs, fileStatus.getPath()); } } }
private static void getAllNestedAvroFiles(FileStatus dir, List<FileStatus> files, FileSystem fs) throws IOException { if (dir.isDirectory()) { FileStatus[] filesInDir = fs.listStatus(dir.getPath()); if (filesInDir != null) { for (FileStatus f : filesInDir) { getAllNestedAvroFiles(f, files, fs); } } } else if (dir.getPath().getName().endsWith(AVRO_SUFFIX)) { files.add(dir); } }