@Override public void map(IntWritable key, WeightedVectorWritable val, Context context) throws IOException, InterruptedException { // by pivoting on the cluster mapping value, we can make sure that each unique cluster goes to it's own reducer, // since they are numbered from 0 to k-1, where k is the number of clusters outputVector.set(val.getVector()); context.write(new IntWritable(newClusterMappings.get(key.get())), outputVector); } }
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); int size = in.readInt(); if (size > 0) { properties = new HashMap<Text, Text>(); for (int i = 0; i < size; i++) { Text key = new Text(in.readUTF()); Text val = new Text(in.readUTF()); properties.put(key, val); } } }
@Override public void write(DataOutput out) throws IOException { super.write(out); out.writeInt(properties != null ? properties.size() : 0); if (properties != null) { for (Map.Entry<Text, Text> entry : properties.entrySet()) { out.writeUTF(entry.getKey().toString()); out.writeUTF(entry.getValue().toString()); } } }
@Override public void map(IntWritable key, WeightedVectorWritable val, Context context) throws IOException, InterruptedException { // by pivoting on the cluster mapping value, we can make sure that each unique cluster goes to it's own reducer, // since they are numbered from 0 to k-1, where k is the number of clusters outputVector.set(val.getVector()); context.write(new IntWritable(newClusterMappings.get(key.get())), outputVector); } }
@Override public void write(DataOutput out) throws IOException { super.write(out); out.writeInt(properties != null ? properties.size() : 0); if (properties != null) { for (Map.Entry<Text, Text> entry : properties.entrySet()) { out.writeUTF(entry.getKey().toString()); out.writeUTF(entry.getValue().toString()); } } }
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); int size = in.readInt(); if (size > 0) { properties = new HashMap<Text, Text>(); for (int i = 0; i < size; i++) { Text key = new Text(in.readUTF()); Text val = new Text(in.readUTF()); properties.put(key, val); } } }
@Override public void map(IntWritable key, WeightedVectorWritable val, Context context) throws IOException, InterruptedException { // by pivoting on the cluster mapping value, we can make sure that each unique cluster goes to it's own reducer, // since they are numbered from 0 to k-1, where k is the number of clusters outputVector.set(val.getVector()); context.write(new IntWritable(newClusterMappings.get(key.get())), outputVector); } }
@Override public void write(DataOutput out) throws IOException { super.write(out); out.writeInt(properties != null ? properties.size() : 0); if (properties != null) { for (Map.Entry<Text, Text> entry : properties.entrySet()) { out.writeUTF(entry.getKey().toString()); out.writeUTF(entry.getValue().toString()); } } }
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); int size = in.readInt(); if (size > 0) { properties = new HashMap<>(); for (int i = 0; i < size; i++) { Text key = new Text(in.readUTF()); Text val = new Text(in.readUTF()); properties.put(key, val); } } }
/** * Writes vector to the cluster directory. */ private void writeVectorToCluster(Writer writer, WeightedVectorWritable point) throws IOException { writer.append(new LongWritable(uniqueVectorId++), new VectorWritable(point.getVector())); writer.sync(); }
/** * Writes vector to the cluster directory. */ private void writeVectorToCluster(Writer writer, WeightedVectorWritable point) throws IOException { writer.append(new LongWritable(uniqueVectorId++), new VectorWritable(point.getVector())); writer.sync(); }
/** * Writes vector to the cluster directory. */ private void writeVectorToCluster(Writer writer, WeightedVectorWritable point) throws IOException { writer.append(new LongWritable(uniqueVectorId++), new VectorWritable(point.getVector())); writer.sync(); }