this.checkSumDigestValue[chunkId] = CheckSum.getInstance(checkSumType); this.position[chunkId] = 0; this.taskIndexFileName[chunkId] = new Path(FileOutputFormat.getOutputPath(conf), getStoreName() + "." + Integer.toString(chunkId) + "_" + this.taskId + INDEX_FILE_EXTENSION + fileExtension); this.taskValueFileName[chunkId] = new Path(FileOutputFormat.getOutputPath(conf), getStoreName() + "." + Integer.toString(chunkId) + "_"
FileSystem fs = FileOutputFormat.getOutputPath(conf).getFileSystem(conf); fs.delete(FileOutputFormat.getOutputPath(conf), true);
public void checkOutputSpecs(FileSystem fs, JobConf job) throws IOException { Path out = FileOutputFormat.getOutputPath(job); if ((out == null) && (job.getNumReduceTasks() != 0)) { throw new InvalidJobConfException( "Output directory not set in JobConf."); } if (fs == null) { fs = out.getFileSystem(job); } if (fs.exists(new Path(out, CrawlDatum.PARSE_DIR_NAME))) throw new IOException("Segment already parsed!"); }
Path out = FileOutputFormat.getOutputPath(job);
@Override public void commitJob(JobContext context) throws IOException { JobConf conf = context.getJobConf(); org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter wrapped = new CommitterFailedFirst(FileOutputFormat.getOutputPath(conf), context); wrapped.commitJob(context); } }
@Override public void check() { // see for more details https://github.com/stratosphere/stratosphere/pull/531 Preconditions.checkNotNull(FileOutputFormat.getOutputPath(jobConf), "The HadoopDataSink currently expects a correct outputPath."); }
private void writeFile(JobConf conf , String filename) throws IOException { System.out.println("writing file ----" + filename); Path outputPath = FileOutputFormat.getOutputPath(conf); FileSystem fs = outputPath.getFileSystem(conf); fs.create(new Path(outputPath, filename)).close(); } }
private void markSuccessfulOutputDir(JobConf conf) throws IOException { Path outputPath = FileOutputFormat.getOutputPath(conf); if (outputPath != null) { FileSystem fileSys = outputPath.getFileSystem(conf); // create a file in the folder to mark it if (fileSys.exists(outputPath)) { Path filePath = new Path(outputPath, SUCCEEDED_FILE_NAME); fileSys.create(filePath).close(); } } }
public void setupJob(JobContext context) throws IOException { JobConf conf = context.getJobConf(); Path outputPath = FileOutputFormat.getOutputPath(conf); if (outputPath != null) { Path tmpDir = new Path(outputPath, FileOutputCommitter.TEMP_DIR_NAME); FileSystem fileSys = tmpDir.getFileSystem(conf); if (!fileSys.mkdirs(tmpDir)) { LOG.error("Mkdirs failed to create " + tmpDir.toString()); } } }
public static void makeTempPath( Configuration conf ) throws IOException { // create job specific temporary directory in output path Path outputPath = FileOutputFormat.getOutputPath( asJobConfInstance( conf ) ); if( outputPath != null ) { Path tmpDir = new Path( outputPath, TEMPORARY_PATH ); FileSystem fileSys = tmpDir.getFileSystem( conf ); if( !fileSys.exists( tmpDir ) && !fileSys.mkdirs( tmpDir ) ) LOG.error( "mkdirs failed to create {}", tmpDir ); } }
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { InputSplit[] result = new InputSplit[numSplits]; Path outDir = FileOutputFormat.getOutputPath(job); for(int i=0; i < result.length; ++i) { result[i] = new FileSplit(new Path(outDir, "dummy-split-" + i), 0, 1, (String[])null); } return result; }
@Override public void cleanupJob(JobContext context) throws IOException { System.err.println("---- HERE ----"); JobConf conf = context.getJobConf(); Path outputPath = FileOutputFormat.getOutputPath(conf); FileSystem fs = outputPath.getFileSystem(conf); fs.create(new Path(outputPath, CUSTOM_CLEANUP_FILE_NAME)).close(); } }
public static Vector retrieveTimesSquaredOutputVector(Configuration conf) throws IOException { Path outputPath = FileOutputFormat.getOutputPath(new JobConf(conf)); Path outputFile = new Path(outputPath, "part-00000"); SequenceFileValueIterator<VectorWritable> iterator = new SequenceFileValueIterator<VectorWritable>(outputFile, true, conf); try { return iterator.next().get(); } finally { Closeables.close(iterator, true); } }
Path getTempTaskOutputPath(TaskAttemptContext taskContext) throws IOException { JobConf conf = taskContext.getJobConf(); Path outputPath = FileOutputFormat.getOutputPath(conf); if (outputPath != null) { Path p = new Path(outputPath, (FileOutputCommitter.TEMP_DIR_NAME + Path.SEPARATOR + "_" + taskContext.getTaskAttemptID().toString())); FileSystem fs = p.getFileSystem(conf); return p.makeQualified(fs); } return null; }
public PailRecordWriter(JobConf conf, String unique, Progressable p) throws IOException { PailSpec spec = (PailSpec) Utils.getObject(conf, SPEC_ARG); Path path = getOutputPath(conf); FileSystem fs = path.getFileSystem(conf); Pail.create(fs, path.toString(), spec, false); // this is a hack to get the work output directory since it's not exposed directly. instead it only // provides a path to a particular file. _pail = Pail.create(fs, FileOutputFormat.getTaskOutputPath(conf, unique).getParent().toString(), spec, false); _unique = unique; }
@Override public void commitJob(JobContext jobContext) throws IOException { super.commitJob(jobContext); Configuration conf = ContextUtil.getConfiguration(jobContext); Path outputPath = FileOutputFormat.getOutputPath(new JobConf(conf)); ParquetOutputCommitter.writeMetaDataFile(conf, outputPath); } }
/** * May only be called once. should only be called if not in a flow * * @param conf */ public static void cleanupJob( Configuration conf ) throws IOException { if( HadoopUtil.isInflow( conf ) ) return; Path outputPath = FileOutputFormat.getOutputPath( asJobConfInstance( conf ) ); cleanTempPath( conf, outputPath ); }
@Override public void cleanupJob(JobContext context) throws IOException { System.err.println("---- HERE ----"); JobConf conf = context.getJobConf(); Path outputPath = FileOutputFormat.getOutputPath(conf); FileSystem fs = outputPath.getFileSystem(conf); fs.create(new Path(outputPath, CUSTOM_CLEANUP_FILE_NAME)).close(); }
/** * May only be called once. should only be called if not in a flow * * @param conf */ public static void cleanupJob( Configuration conf ) throws IOException { if( HadoopUtil.isInflow( conf ) ) return; Path outputPath = FileOutputFormat.getOutputPath( asJobConfInstance( conf ) ); cleanTempPath( conf, outputPath ); }
@Override public void abortJob(JobContext context, int state) throws IOException { JobConf conf = context.getJobConf(); ; Path outputPath = FileOutputFormat.getOutputPath(conf); FileSystem fs = outputPath.getFileSystem(conf); String fileName = (state == JobStatus.FAILED) ? TestJobCleanup.ABORT_FAILED_FILE_NAME : TestJobCleanup.ABORT_KILLED_FILE_NAME; fs.create(new Path(outputPath, fileName)).close(); } }