Configuration configuration = new Configuration(); FileSystem hdfs = FileSystem.get( new URI( "hdfs://localhost:54310" ), configuration ); Path file = new Path("hdfs://localhost:54310/s2013/batch/table.html"); if ( hdfs.exists( file )) { hdfs.delete( file, true ); } OutputStream os = hdfs.create( file, new Progressable() { public void progress() { out.println("...bytes written: [ "+bytesWritten+" ]"); } }); BufferedWriter br = new BufferedWriter( new OutputStreamWriter( os, "UTF-8" ) ); br.write("Hello World"); br.close(); hdfs.close();
fs.delete(out, true);
FileSystem fs = dir.getFileSystem(getConf()); RemoteIterator<LocatedFileStatus> it = fs.listFiles(dir, false); while (it.hasNext()) { fs.delete(it.next().getPath(), false); }
outputDir.getFileSystem( jobConf ).delete( outputDir, true ); //to FileSystem fs = FileSystem.get(jobConf); fs.delete(outputDir, true);
import org.apache.hadoop.fs.*; ... Configuration conf = new Configuration(); Path output = new Path("/the/folder/to/delete"); FileSystem hdfs = FileSystem.get(conf); // delete existing directory if (hdfs.exists(output)) { hdfs.delete(output, true); } Job job = Job.getInstance(conf, "word count"); ...
Configuration conf = new Configuration(); conf.set("fs.hdfs.impl",org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()); conf.set("fs.file.impl",org.apache.hadoop.fs.LocalFileSystem.class.getName()); FileSystem hdfs = FileSystem.get(URI.create("hdfs://<namenode-hostname>:<port>"), conf); hdfs.delete("/path/to/your/file", isRecusrive);
public boolean delete() { if (isInvalid()) return false; return fs.delete(this); }
//Set the root of the files I will work with in the local file system String root = getServletContext().getRealPath("/") + "WEB-INF"; //Set the root of the files I will work with in Hadoop DFS String hroot = "/home/tomcat/output/mrjob"; //Path to the files I will work with String src = hroot + "/part-00000.avro"; String dest = root + "/classes/avro/result.avro"; //Open the HDFS file system Configuration hdfsconf = new Configuration(); //Fake Address, replace with yours! hdfsconf.set("fs.default.name", "hdfs://hadoop-host:54310"); hdfsconf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem"); hdfsconf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem"); FileSystem hdfs = FileSystem.get(hdfsconf); //Copy the result to local hdfs.copyToLocalFile(new Path(src), new Path(dest)); //Delete result hdfs.delete(new Path(hroot), true); //Close the file system handler hdfs.close();
public int run(String[] arg) throws Exception { Configuration conf=getConf(); FileSystem fs = FileSystem.get(conf); //estimate reducers Job job = new Job(conf); job.setJarByClass(WholeFileDriver.class); job.setJobName("WholeFile"); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setInputFormatClass(WholeFileInputFormat.class); job.setMapperClass(WholeFileMapper.class); job.setNumReduceTasks(0); FileInputFormat.addInputPath(job, new Path(arg[0])); Path output=new Path(arg[1]); try { fs.delete(output, true); } catch (IOException e) { LOG.warn("Failed to delete temporary path", e); } FileOutputFormat.setOutputPath(job, output); boolean ret=job.waitForCompletion(true); if(!ret){ throw new Exception("Job Failed"); }
public int run(String[] args) throws Exception { Configuration c=new Configuration(); String[] files=new GenericOptionsParser(c,args).getRemainingArgs(); Path p1=new Path(files[0]); Path p2=new Path(files[1]); Path p3=new Path(files[2]); FileSystem fs = FileSystem.get(c); if(fs.exists(p3)){ fs.delete(p3, true); } Job job = new Job(c,"Multiple Job"); job.setJarByClass(MultipleFiles.class); MultipleInputs.addInputPath(job, p1, TextInputFormat.class, MultipleMap1.class); MultipleInputs.addInputPath(job,p2, TextInputFormat.class, MultipleMap2.class); job.setReducerClass(MultipleReducer.class); . . }
fs.delete(new Path(otherArgs[1]), true);
/** * @param inputFiles a glob expression of the files to be merged * @param outputFile a destination file path * @param deleteSource delete source files after merging * @return * @throws IOException */ private static Path mergeTextFiles(String inputFiles,String outputFile,boolean deleteSource,boolean deleteDestinationFileIfExist) throws IOException { JobConf conf=new JobConf(FileMerger.class); FileSystem fs=FileSystem.get(conf); Path inputPath=new Path(inputFiles); Path outputPath=new Path(outputFile); if (deleteDestinationFileIfExist) { if (fs.exists(outputPath)) { fs.delete(outputPath,false); sLogger.info("Warning: remove destination file since it already exists..."); } } else { Preconditions.checkArgument(!fs.exists(outputPath),new IOException("Destination file already exists...")); } FileUtil.copyMerge(fs,inputPath,fs,outputPath,deleteSource,conf,FILE_CONTENT_DELIMITER); sLogger.info("Successfully merge " + inputPath.toString() + " to "+ outputFile); return outputPath; }
String outputFile = args[1]; Path outPath = new Path(outputFile); fs.delete(outPath, true); FileOutputFormat.setOutputPath(job, new Path(outputFile));
fs.delete(out, true);
fs = dstFilePath.getFileSystem(conf); if (fs.exists(dstFilePath)) fs.delete(dstFilePath, true); } catch (IOException e1) { e1.printStackTrace();
fs.delete(out, true);
hdfs.delete(outputDir, true);