@Override public void run() { LOG.debug("starting {}", getClass().getSimpleName()); List<ResourceInfo> entries = list().stream() .sorted(Comparator.comparing(ResourceInfo::getPath)) .collect(Collectors.toList()); try (PrintWriter writer = outputParameter.open()) { if (verboseParameter.isRequired()) { writer.printf("total %,d%n", entries.size()); entries.forEach(it -> { writer.printf("%s%n", it.getPath()); writer.printf(" data source: %s%n", it.getId()); writer.printf(" directory: %s%n", it.isDirectory()); }); } else { entries.forEach(it -> writer.println(it.getPath())); } } }
private void delete(PrintWriter writer, DirectIoPath dpath) { LOG.debug("delete: {} (recursive={})", dpath, recursive); if (verboseParameter.isRequired()) { FileListCommand.list(dpath).forEach(info -> { if (recursive || info.isDirectory() == false) { writer.printf("delete: %s%n", info.getPath()); } else { writer.printf("skip: %s%n", info.getPath()); } }); } long count = delete(dpath); if (count == 0) { LOG.warn("cannot delete any files: {}", dpath); } }
private void validate(List<ResourceInfo> files, Path destination) { Set<Path> ancestors = new HashSet<>(); for (Path path = qualify(destination); path != null; path = path.getParent()) { ancestors.add(path); } for (ResourceInfo file : files) { Path source = qualify(asHadoopPath(file.getPath())); LOG.debug("validate: {} -> {}", source, destination); if (ancestors.contains(source)) { throw new CommandConfigurationException(MessageFormat.format( "cannot copy directory into its sub-directories: {0} -> {1}", source, destination)); } } }
for (FileStatus stat : stats) { counter.add(1); ResourceInfo resource = new ResourceInfo( profile.getId(), stat.getPath().toString(),
private void copyOnto(List<ResourceInfo> sources, java.nio.file.Path destination) { sources.stream() .filter(it -> recursive || it.isDirectory() == false) .collect(Collectors.groupingBy(it -> asHadoopPath(it.getPath()).getName())) .forEach((k, v) -> { java.nio.file.Path dst = destination.resolve(k); "destination file already exists: {0} ({1})", dst, src.getPath())); executorParameter.execute(sources.stream() .map(info -> { org.apache.hadoop.fs.Path src = asHadoopPath(info.getPath()); org.apache.hadoop.fs.Path dst = asHadoopPath(destination.resolve(src.getName())); return new Copy(
List<ResourceInfo> list = source.list(basePath, pattern, new Counter()); for (ResourceInfo info : list) { System.out.println(info.getPath());
static List<ResourceInfo> list(DirectIoPath path) { LOG.debug("listing: {} ({})", path, path.getBarePath()); try { if (path.isComponentRoot()) { return Collections.singletonList(new ResourceInfo( path.getSource().getId(), path.getSource().getEntity().path(path.getComponentPath().getPathString()), true)); } else if (path.getResourcePattern().isPresent()) { return path.getSource().getEntity().list( path.getComponentPath().getPathString(), path.getResourcePattern().get(), new Counter()); } else { return path.getSource().getEntity().list( BasePath.EMPTY.getPathString(), path.getComponentPath().asFilePattern(), new Counter()); } } catch (IOException | InterruptedException e) { throw new CommandExecutionException(MessageFormat.format( "error occurred while resolving path: {0} ({1})", path, path.getBarePath()), e); } } }
private void copyOnto(List<ResourceInfo> sources, Path destination) { sources.stream() .filter(it -> isRecursive() || it.isDirectory() == false) .collect(Collectors.groupingBy(it -> asHadoopPath(it.getPath()).getName())) .forEach((k, v) -> { Path dst = resolve(destination, k); "destination file already exists: {0} ({1})", dst, src.getPath())); executorParameter.execute(sources.stream() .map(source -> { Path src = asHadoopPath(source.getPath()); Path dst = resolve(destination, src.getName()); return new Copy(
private void copyTo(ResourceInfo source, java.nio.file.Path destination) { assert Files.isDirectory(destination) == false; try (PrintWriter writer = outputParameter.open()) { org.apache.hadoop.fs.Path src = asHadoopPath(source.getPath()); org.apache.hadoop.fs.Path dst = asHadoopPath(destination); executorParameter.execute(new Copy( writer, dataSourceParameter.getHadoopFileSystem(src), src, dataSourceParameter.getHadoopFileSystem(dst), dst)); } }
private void copyTo(ResourceInfo source, Path destination) { try (PrintWriter writer = outputParameter.open()) { org.apache.hadoop.fs.Path src = asHadoopPath(source.getPath()); Path dst = destination; executorParameter.execute(new Copy( writer, dataSourceParameter.getHadoopFileSystem(src), src, dataSourceParameter.getHadoopFileSystem(dst), dst)); } }