@Override public OutputStream create(String url, Boolean forceCreateParentDirs) throws IOException { // TODO implement properly. return this.create(url); }
@Override public boolean delete(String url, boolean isRecursiveDelete) throws IOException { return this.getHdfs(url).delete(new Path(url), isRecursiveDelete); } }
private org.apache.hadoop.fs.FileSystem getHdfs(String uri) { this.ensureInitialized(); try { Configuration conf = new Configuration(true); return org.apache.hadoop.fs.FileSystem.get(new URI(uri), conf); } catch (IOException | URISyntaxException e) { throw new RheemException(String.format("Could not obtain an HDFS client for %s.", uri), e); } }
@Override public OutputStream create(String url) throws IOException { return this.getHdfs(url).create(new Path(url), true); }
@Override public InputStream open(String url) throws IOException { return this.getHdfs(url).open(new Path(url)); }
@Override public long getFileSize(String fileUrl) throws FileNotFoundException { try { final FileStatus fileStatus = this.getHdfs(fileUrl).getFileStatus(new Path(fileUrl)); return fileStatus.getLen(); } catch (IOException e) { throw new FileNotFoundException(String.format("Could not access %s.", fileUrl)); } }
@Override public Collection<String> listChildren(String url) { try { final FileStatus[] fileStatuses = this.getHdfs(url).listStatus(new Path(url)); return Arrays.stream(fileStatuses) .map(status -> status.getPath().toString()) .collect(Collectors.toList()); } catch (IOException e) { throw new RheemException(String.format("Could not access %s.", url), e); } }
@Override public boolean isDirectory(String url) { try { final FileStatus fileStatus = this.getHdfs(url).getFileStatus(new Path(url)); return fileStatus.isDirectory(); } catch (IOException e) { throw new RheemException(String.format("Could not access %s.", url), e); } }