public void writeMap(MapWritable mw) throws IOException { out.writeMapHeader(mw.size()); for (Map.Entry<Writable, Writable> entry : mw.entrySet()) { write(entry.getKey()); write(entry.getValue()); } }
private boolean metadataEquals(org.apache.hadoop.io.MapWritable otherMetaData) { if (metaData==null || metaData.size() ==0) { return otherMetaData == null || otherMetaData.size() == 0; } if (otherMetaData == null) { // we already know that the current object is not null or empty return false; } HashSet<Entry<Writable, Writable>> set1 = new HashSet<Entry<Writable,Writable>>(metaData.entrySet()); HashSet<Entry<Writable, Writable>> set2 = new HashSet<Entry<Writable,Writable>>(otherMetaData.entrySet()); return set1.equals(set2); }
public void write(DataOutput out) throws IOException { out.writeByte(CUR_VERSION); // store current version out.writeByte(status); out.writeLong(fetchTime); out.writeByte(retries); out.writeInt(fetchInterval); out.writeFloat(score); out.writeLong(modifiedTime); if (signature == null) { out.writeByte(0); } else { out.writeByte(signature.length); out.write(signature); } if (metaData != null && metaData.size() > 0) { out.writeBoolean(true); metaData.write(out); } else { out.writeBoolean(false); } }
@Override public String toString() { StringBuilder sb = new StringBuilder(map.size()); for (Writable wkey : map.keySet()) { sb.append(wkey.toString()); List<String> values = getStringList(new Text(wkey.toString())); sb.append("\t").append(values).append("\n"); } return sb.toString(); } }
public void writeMap(MapWritable mw) throws IOException { out.writeMapHeader(mw.size()); for (Map.Entry<Writable, Writable> entry : mw.entrySet()) { write(entry.getKey()); write(entry.getValue()); } }
public void writeMap(MapWritable mw) throws IOException { out.writeMapHeader(mw.size()); for (Map.Entry<Writable, Writable> entry : mw.entrySet()) { write(entry.getKey()); write(entry.getValue()); } }
public void writeMap(MapWritable mw) throws IOException { out.writeMapHeader(mw.size()); for (Map.Entry<Writable, Writable> entry : mw.entrySet()) { write(entry.getKey()); write(entry.getValue()); } }
public void writeMap(MapWritable mw) throws IOException { out.writeMapHeader(mw.size()); for (Map.Entry<Writable, Writable> entry : mw.entrySet()) { write(entry.getKey()); write(entry.getValue()); } }
public void writeMap(MapWritable mw) throws IOException { out.writeMapHeader(mw.size()); for (Map.Entry<Writable, Writable> entry : mw.entrySet()) { write(entry.getKey()); write(entry.getValue()); } }
public void writeMap(MapWritable mw) throws IOException { out.writeMapHeader(mw.size()); for (Map.Entry<Writable, Writable> entry : mw.entrySet()) { write(entry.getKey()); write(entry.getValue()); } }
/** * Assert MapWritable does not grow across calls to readFields. * @throws Exception * @see <a href="https://issues.apache.org/jira/browse/HADOOP-2244">HADOOP-2244</a> */ public void testMultipleCallsToReadFieldsAreSafe() throws Exception { // Create an instance and add a key/value. MapWritable m = new MapWritable(); final Text t = new Text(getName()); m.put(t, t); // Get current size of map. Key values are 't'. int count = m.size(); // Now serialize... save off the bytes. ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); m.write(dos); dos.close(); // Now add new values to the MapWritable. m.put(new Text("key1"), new Text("value1")); m.put(new Text("key2"), new Text("value2")); // Now deserialize the original MapWritable. Ensure count and key values // match original state. ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); DataInputStream dis = new DataInputStream(bais); m.readFields(dis); assertEquals(count, m.size()); assertTrue(m.get(t).equals(t)); dis.close(); } }
/** * Assert MapWritable does not grow across calls to readFields. * @throws Exception * @see <a href="https://issues.apache.org/jira/browse/HADOOP-2244">HADOOP-2244</a> */ public void testMultipleCallsToReadFieldsAreSafe() throws Exception { // Create an instance and add a key/value. MapWritable m = new MapWritable(); final Text t = new Text(getName()); m.put(t, t); // Get current size of map. Key values are 't'. int count = m.size(); // Now serialize... save off the bytes. ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); m.write(dos); dos.close(); // Now add new values to the MapWritable. m.put(new Text("key1"), new Text("value1")); m.put(new Text("key2"), new Text("value2")); // Now deserialize the original MapWritable. Ensure count and key values // match original state. ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); DataInputStream dis = new DataInputStream(bais); m.readFields(dis); assertEquals(count, m.size()); assertTrue(m.get(t).equals(t)); dis.close(); }
/** * Assert MapWritable does not grow across calls to readFields. * @throws Exception * @see <a href="https://issues.apache.org/jira/browse/HADOOP-2244">HADOOP-2244</a> */ public void testMultipleCallsToReadFieldsAreSafe() throws Exception { // Create an instance and add a key/value. MapWritable m = new MapWritable(); final Text t = new Text(getName()); m.put(t, t); // Get current size of map. Key values are 't'. int count = m.size(); // Now serialize... save off the bytes. ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); m.write(dos); dos.close(); // Now add new values to the MapWritable. m.put(new Text("key1"), new Text("value1")); m.put(new Text("key2"), new Text("value2")); // Now deserialize the original MapWritable. Ensure count and key values // match original state. ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); DataInputStream dis = new DataInputStream(bais); m.readFields(dis); assertEquals(count, m.size()); assertTrue(m.get(t).equals(t)); dis.close(); }