public static void writeFloat(FileSystem fs, String path, float val) { try { FSDataOutputStream out = fs.create(new Path(path), true); out.writeFloat(val); out.close(); } catch (IOException e) { e.printStackTrace(); } }
@Override public void cleanup(Context context) throws IOException { Configuration conf = context.getConfiguration(); String taskId = conf.get("mapred.task.id"); String path = conf.get("PageRankMassPath"); Preconditions.checkNotNull(taskId); Preconditions.checkNotNull(path); // Write to a file the amount of PageRank mass we've seen in this reducer. FileSystem fs = FileSystem.get(context.getConfiguration()); FSDataOutputStream out = fs.create(new Path(path + "/" + taskId), false); out.writeFloat(totalMass); out.close(); } }
@Override public void cleanup(Context context) throws IOException { Configuration conf = context.getConfiguration(); String taskId = conf.get("mapred.task.id"); String path = conf.get("PageRankMassPath"); Preconditions.checkNotNull(taskId); Preconditions.checkNotNull(path); // Write to a file the amount of PageRank mass we've seen in this reducer. FileSystem fs = FileSystem.get(context.getConfiguration()); FSDataOutputStream out = fs.create(new Path(path + "/" + taskId), false); out.writeFloat(totalMass); out.close(); } }
@Override public void cleanup(Context context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); String taskId = conf.get("mapred.task.id"); String path = conf.get("PageRankMassPath"); Preconditions.checkNotNull(taskId); Preconditions.checkNotNull(path); FileSystem fs = FileSystem.get(conf); FSDataOutputStream out = fs.create(new Path(path + "/" + taskId), false); out.writeFloat(totalMass); out.close(); // If the HDFS node structure file is ahead, we want to emit the current node structure. if (hdfsAhead) { hdfsNode.setPageRank(Float.NEGATIVE_INFINITY); context.write(hdfsNid, hdfsNode); hdfsAhead = false; } // We have to write out the rest of the nodes we haven't finished reading yet (i.e., these are // the ones who don't have any messages sent to them) while (reader.next(hdfsNid, hdfsNode)) { hdfsNode.setPageRank(Float.NEGATIVE_INFINITY); context.write(hdfsNid, hdfsNode); } reader.close(); } }
@Override public void cleanup(Context context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); String taskId = conf.get("mapred.task.id"); String path = conf.get("PageRankMassPath"); Preconditions.checkNotNull(taskId); Preconditions.checkNotNull(path); FileSystem fs = FileSystem.get(conf); FSDataOutputStream out = fs.create(new Path(path + "/" + taskId), false); out.writeFloat(totalMass); out.close(); // If the HDFS node structure file is ahead, we want to emit the current node structure. if (hdfsAhead) { hdfsNode.setPageRank(Float.NEGATIVE_INFINITY); context.write(hdfsNid, hdfsNode); hdfsAhead = false; } // We have to write out the rest of the nodes we haven't finished reading yet (i.e., these are // the ones who don't have any messages sent to them) while (reader.next(hdfsNid, hdfsNode)) { hdfsNode.setPageRank(Float.NEGATIVE_INFINITY); context.write(hdfsNid, hdfsNode); } reader.close(); } }
public void serialize(Path output, Configuration conf) throws IOException { FileSystem fs = output.getFileSystem(conf); FSDataOutputStream out = fs.create(new Path(output, "naiveBayesModel.bin")); try { out.writeFloat(alphaI); VectorWritable.writeVector(out, weightsPerFeature); VectorWritable.writeVector(out, weightsPerLabel); VectorWritable.writeVector(out, perlabelThetaNormalizer); for (int row = 0; row < weightsPerLabelAndFeature.numRows(); row++) { VectorWritable.writeVector(out, weightsPerLabelAndFeature.viewRow(row)); } } finally { Closeables.close(out, false); } }
public void serialize(Path output, Configuration conf) throws IOException { FileSystem fs = output.getFileSystem(conf); FSDataOutputStream out = fs.create(new Path(output, "naiveBayesModel.bin")); try { out.writeFloat(alphaI); out.writeBoolean(isComplementary); VectorWritable.writeVector(out, weightsPerFeature); VectorWritable.writeVector(out, weightsPerLabel); if (isComplementary){ VectorWritable.writeVector(out, perlabelThetaNormalizer); } for (int row = 0; row < weightsPerLabelAndFeature.numRows(); row++) { VectorWritable.writeVector(out, weightsPerLabelAndFeature.viewRow(row)); } } finally { Closeables.close(out, false); } }
public void serialize(Path output, Configuration conf) throws IOException { FileSystem fs = output.getFileSystem(conf); try (FSDataOutputStream out = fs.create(new Path(output, "naiveBayesModel.bin"))) { out.writeFloat(alphaI); out.writeBoolean(isComplementary); VectorWritable.writeVector(out, weightsPerFeature); VectorWritable.writeVector(out, weightsPerLabel); if (isComplementary){ VectorWritable.writeVector(out, perlabelThetaNormalizer); } for (int row = 0; row < weightsPerLabelAndFeature.numRows(); row++) { VectorWritable.writeVector(out, weightsPerLabelAndFeature.viewRow(row)); } } }