@Override public void putAll(Map<? extends Writable, ? extends Writable> t) { for (Map.Entry<? extends Writable, ? extends Writable> e: t.entrySet()) { put(e.getKey(), e.getValue()); } }
public MapWritable readMap(MapWritable mw) throws IOException { if (mw == null) { mw = new MapWritable(); } int length = in.readMapHeader(); for (int i = 0; i < length; i++) { Writable key = read(); Writable value = read(); mw.put(key, value); } return mw; }
if ((record != null) && (!record.isEmpty())) { for (Entry<String, Object> entry : record.entrySet()) { value.put(new Text(entry.getKey()), entry.getValue() == null ? NullWritable.get() : new ObjectWritable(entry.getValue()));
@Test public void testMapFieldExtractorNested() throws Exception { ConstantFieldExtractor cfe = new MapWritableFieldExtractor(); Map<Writable, Writable> m = new MapWritable(); MapWritable nested = new MapWritable(); nested.put(new Text("bar"), new Text("found")); m.put(new Text("foo"), nested); assertEquals(new Text("found"), extract(cfe, "foo.bar", m)); } }
public void put(Writable key, Writable value) { this.partitionMap.put(key, value); }
@Test public void generateEventWritable() throws Exception { MapWritable document = new MapWritable(); document.put(new Text("field"), new Text("value")); SerializationEventConverter eventConverter = new SerializationEventConverter(); SerializationFailure iaeFailure = new SerializationFailure(new IllegalArgumentException("garbage"), document, new ArrayList<String>()); String rawEvent = eventConverter.getRawEvent(iaeFailure); assertThat(rawEvent, Matchers.startsWith("org.apache.hadoop.io.MapWritable@")); String timestamp = eventConverter.getTimestamp(iaeFailure); assertTrue(StringUtils.hasText(timestamp)); assertTrue(DateUtils.parseDate(timestamp).getTime().getTime() > 1L); String exceptionType = eventConverter.renderExceptionType(iaeFailure); assertEquals("illegal_argument_exception", exceptionType); String exceptionMessage = eventConverter.renderExceptionMessage(iaeFailure); assertEquals("garbage", exceptionMessage); String eventMessage = eventConverter.renderEventMessage(iaeFailure); assertEquals("Could not construct bulk entry from record", eventMessage); } }
public void setServer(String newServer) { partitionMap.put(SERVER, new Text(newServer)); }
public void setService(String newService) { partitionMap.put(SERVICE, new Text(newService)); }
/** Add all metadata from other CrawlDatum to this CrawlDatum. * * @param other CrawlDatum */ public void putAllMetaData(CrawlDatum other) { for (Entry<Writable, Writable> e : other.getMetaData().entrySet()) { getMetaData().put(e.getKey(), e.getValue()); } }
/** * Sets configurations for multiple tables at a time. * * @param implementingClass * the class whose name will be used as a prefix for the property configuration key * @param conf * the Hadoop configuration object to configure * @param configs * an array of InputTableConfig objects to associate with the job * @since 1.6.0 */ public static void setInputTableConfigs(Class<?> implementingClass, Configuration conf, Map<String,org.apache.accumulo.core.client.mapreduce.InputTableConfig> configs) { MapWritable mapWritable = new MapWritable(); for (Map.Entry<String,org.apache.accumulo.core.client.mapreduce.InputTableConfig> tableConfig : configs .entrySet()) mapWritable.put(new Text(tableConfig.getKey()), tableConfig.getValue()); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { mapWritable.write(new DataOutputStream(baos)); } catch (IOException e) { throw new IllegalStateException("Table configuration could not be serialized."); } String confKey = enumToConfKey(implementingClass, ScanOpts.TABLE_CONFIGS); conf.set(confKey, Base64.getEncoder().encodeToString(baos.toByteArray())); }
metaData.put(key, oldMetaData.get(key));
/** {@inheritDoc} */ public void putAll(Map<? extends Writable, ? extends Writable> t) { for (Map.Entry<? extends Writable, ? extends Writable> e: t.entrySet()) { put(e.getKey(), e.getValue()); } }
public static final void writeProperties(DataOutput out, Properties props) throws IOException { MapWritable propsWritable = new MapWritable(); for (Entry<Object, Object> prop : props.entrySet()) { Writable key = new Text(prop.getKey().toString()); Writable value = new Text(prop.getValue().toString()); propsWritable.put(key,value); } propsWritable.write(out); }
public static final void writeProperties(DataOutput out, Properties props) throws IOException { MapWritable propsWritable = new MapWritable(); for (Entry<Object, Object> prop : props.entrySet()) { Writable key = new Text(prop.getKey().toString()); Writable value = new Text(prop.getValue().toString()); propsWritable.put(key,value); } propsWritable.write(out); }
public static void setExecutionHints(Class<?> implementingClass, Configuration conf, Map<String,String> hints) { MapWritable mapWritable = new MapWritable(); hints.forEach((k, v) -> mapWritable.put(new Text(k), new Text(v))); String key = enumToConfKey(implementingClass, ScanOpts.EXECUTION_HINTS); String val = toBase64(mapWritable); conf.set(key, val); }
public MapWritable readMap(MapWritable mw) throws IOException { if (mw == null) { mw = new MapWritable(); } int length = in.readMapHeader(); for (int i = 0; i < length; i++) { Writable key = read(); Writable value = read(); mw.put(key, value); } return mw; }
public MapWritable readMap(MapWritable mw) throws IOException { if (mw == null) { mw = new MapWritable(); } int length = in.readMapHeader(); for (int i = 0; i < length; i++) { Writable key = read(); Writable value = read(); mw.put(key, value); } return mw; }
/** * Test that number of "unknown" classes is propagated across multiple copies. */ @SuppressWarnings("deprecation") public void testForeignClass() { MapWritable inMap = new MapWritable(); inMap.put(new Text("key"), new UTF8("value")); inMap.put(new Text("key2"), new UTF8("value2")); MapWritable outMap = new MapWritable(inMap); MapWritable copyOfCopy = new MapWritable(outMap); assertEquals(1, copyOfCopy.getNewClasses()); }
@Override public void remove() throws IOException { MapWritable msg = new MapWritable(); msg.put(GraphJobRunner.FLAG_VERTEX_DECREASE, this.vertexID); // Get master task peer. String destPeer = GraphJobRunner.getMasterTask(this.getPeer()); runner.getPeer().send(destPeer, new GraphJobMessage(msg)); alterVertexCounter(-1); }