@Override public Data read() throws Exception { Data datum = DataFactory.create(); if (pos < data.size()) { datum.putAll(data.get(pos++)); return datum; } return null; }
@Override public Data read() throws Exception { Data datum = DataFactory.create(); if (pos < data.size()) { datum.putAll(data.get(pos++)); return datum; } return null; }
@Override public Data read() throws Exception { Data datum = DataFactory.create(); if (pos < data.size()) { datum.putAll(data.get(pos++)); return datum; } return null; }
/** * @see stream.Processor#process(stream.Data) */ public Data process(Data input) { byte[] bytes = (byte[]) input.get(key); if (bytes != null) { try { Data item = serializer.decode(bytes); input.putAll(item); } catch (Exception e) { log.error("Failed to de-serialize item: {}", e.getMessage()); if (log.isDebugEnabled()) { e.printStackTrace(); } } } return input; }
@Override public Data read() throws Exception { if (limit > 0 && count > limit) return null; if (queue.isEmpty()) return null; Data datum = DataFactory.create(); Data d = queue.poll(); if (d != null) datum.putAll(d); count++; return datum; }
/** * @see stream.data.DataProcessor#process(stream.data.Data) */ @Override public Data process(Data data) { if (data == null) return data; Serializable ruleLog = data.get(key); if (ruleLog != null) { Map<String, String> coll = processRuleLog(ruleLog.toString(), new HashMap<String, String>()); data.putAll(coll); } return data; }
/** * @see stream.Processor#process(stream.Data) */ @Override public Data process(Data input) { try { byte[] data = (byte[]) input.get(key); Data decoded = codec.decode(data); input.putAll(decoded); } catch (Exception e) { e.printStackTrace(); } return input; }
/** * @see stream.Processor#process(stream.Data) */ @Override public Data process(Data input) { try { byte[] data = (byte[]) input.get(key); Data decoded = codec.decode(data); input.putAll(decoded); } catch (Exception e) { e.printStackTrace(); } return input; }
@Override public Data readNext() throws Exception { Data item = DataFactory.create(); boolean stop = true; for (String id : additionOrder) { Stream s = streams.get(id); Data d = s.read(); if (d != null) { item.putAll(d); stop = false; } } if (stop) return null; return item; }
@Override public Data readNext() throws Exception { Data item = DataFactory.create(); boolean stop = true; for (String id : additionOrder) { Stream s = streams.get(id); Data d = s.read(); if (d != null) { item.putAll(d); stop = false; } } if (stop) return null; return item; }
/** * @see stream.Processor#process(stream.Data) */ @Override public Data process(Data input) { if( queue != null ){ if( input == null ) input = DataFactory.create(); Data item = queue.take(); if( item != null ){ log.debug( "Merging dequeued item '{}'", item ); input.putAll( item ); } else { log.error( "Dequeued 'null' from queue - unexpected?!" ); } } else { log.debug( "No queue defined, not dequeuing anything..." ); } return input; } }
/** * @see stream.io.Stream#read() */ @Override public Data read() throws Exception { Data item = null; while (item == null) { try { item = queue.take(); } catch (InterruptedException ie) { if (socket.isClosed()) return null; } } Data datum = DataFactory.create(); datum.putAll(item); return datum; }
/** * @see stream.io.Stream#read() */ @Override public Data read() throws Exception { Data item = null; while (item == null) { try { item = queue.take(); } catch (InterruptedException ie) { if (socket.isClosed()) return null; } } Data datum = DataFactory.create(); datum.putAll(item); return datum; }
/** * @see stream.io.Stream#read() */ @Override public Data read() throws Exception { Data item = null; while (item == null) { try { item = queue.take(); } catch (InterruptedException ie) { if (socket.isClosed()) return null; } } if( item == eof ) { return null; } Data datum = DataFactory.create(); datum.putAll(item); return datum; }
/** * @see org.apache.hadoop.mapred.RecordReader#next(java.lang.Object, * java.lang.Object) */ public boolean next(Key key, Data value) throws IOException { try { Data item = stream.read(); if (item != null) { value.clear(); value.putAll(item); return true; } else { return false; } } catch (Exception e) { throw new IOException(e); } } }
/** * @see org.apache.hadoop.mapred.RecordReader#next(java.lang.Object, * java.lang.Object) */ public boolean next(Key key, Data value) throws IOException { try { Data item = stream.read(); if (item != null) { value.clear(); value.putAll(item); return true; } else { return false; } } catch (Exception e) { throw new IOException(e); } } }
/** * @see stream.Processor#process(stream.Data) */ public Data process(Data input) { byte[] bytes = (byte[]) input.get(key); if (bytes != null) { try { if (gzip) { bytes = gunzip(bytes); } Data item = serializer.decode(bytes); input.putAll(item); } catch (Exception e) { log.error("Failed to de-serialize item: {}", e.getMessage()); if (log.isDebugEnabled()) { e.printStackTrace(); } } } return input; }
/** * @see stream.data.DataProcessor#process(stream.data.Data) */ @Override public Data process(Data data) { if (data.get(section) != null) { String headerData = data.get(section).toString(); Map<String, String> headers = parseHeader(headerData); data.putAll(headers); } return data; }
/** * @see stream.io.Stream#read() */ @Override public Data readNext() throws Exception { Data item = DataFactory.create(); Data gen = this.generate(); item.clear(); item.putAll(gen); count++; return item; }
/** * @see stream.io.Stream#read() */ @Override public Data readNext() throws Exception { Data item = DataFactory.create(); Data gen = this.generate(); item.clear(); item.putAll(gen); count++; return item; }