@Override public void map(AvroWrapper<IN> wrapper, NullWritable value, OutputCollector<KO,VO> collector, Reporter reporter) throws IOException { if (this.out == null) this.out = new MapCollector<>(collector, isMapOnly); mapper.map(wrapper.datum(), out, reporter); }
@Override public void configure(JobConf conf) { super.setConf(conf);
@Override public void close() throws IOException { this.mapper.close(); }
@Override public void setConf(Configuration conf) { super.setConf(conf); if (conf == null) { return; } _itemKey = conf.get(ITEM_KEY, ""); }
@Override public void map(AvroWrapper<IN> wrapper, NullWritable value, OutputCollector<KO, VO> collector, Reporter reporter) throws IOException { if (mapper == null) { TaggedInputSplit is = (TaggedInputSplit) reporter.getInputSplit(); Class<? extends AvroMapper> mapperClass = is.getMapperClass(); mapper = (AvroMapper<IN,OUT>) ReflectionUtils.newInstance(mapperClass, conf); } if (out == null) out = new MapCollector<OUT,K,V,KO,VO>(collector, isMapOnly); mapper.map(wrapper.datum(), out, reporter); } }
@Override public void close() throws IOException { this.mapper.close(); }
public void setConf(Configuration conf) { super.setConf(conf); if (conf == null) { return; } _lambdaSet = new HashSet<Float>(); String[] lambdastr = conf.get(LAMBDA).split(","); for (String l : lambdastr) { _lambdaSet.add(Float.parseFloat(l)); } }
@Override public void map(AvroWrapper<IN> wrapper, NullWritable value, OutputCollector<KO,VO> collector, Reporter reporter) throws IOException { if (this.out == null) this.out = new MapCollector<OUT,K,V,KO,VO>(collector, isMapOnly); mapper.map(wrapper.datum(), out, reporter); }
public void setConf(Configuration conf) { super.setConf(conf); if (conf == null) { return; } _lambdaSet = new HashSet<Float>(); String[] lambdastr = conf.get(LAMBDA).split(","); for (String l : lambdastr) { _lambdaSet.add(Float.parseFloat(l)); } }
@Override public void map(AvroWrapper<IN> wrapper, NullWritable value, OutputCollector<KO,VO> collector, Reporter reporter) throws IOException { if (this.out == null) this.out = new MapCollector(collector); mapper.map(wrapper.datum(), out, reporter); }
@Override public void setConf(Configuration conf) { super.setConf(conf); if (conf == null) { return; } _mapkey = conf.get(MAP_KEY, ""); _nblocks = conf.getInt(NUM_BLOCKS, 0); _logger.info("nblocks=" + _nblocks); _ignoreValue = conf.getBoolean(IGNORE_FEATURE_VALUE, false); _numClickReplicates = conf.getInt(NUM_CLICK_REPLICATES, 1); }
@Override public void map(AvroWrapper<IN> wrapper, NullWritable value, OutputCollector<KO, VO> collector, Reporter reporter) throws IOException { if (mapper == null) { TaggedInputSplit is = (TaggedInputSplit) reporter.getInputSplit(); Class<? extends AvroMapper> mapperClass = is.getMapperClass(); mapper = (AvroMapper<IN,OUT>) ReflectionUtils.newInstance(mapperClass, conf); } if (out == null) out = new MapCollector<OUT,K,V,KO,VO>(collector, isMapOnly); mapper.map(wrapper.datum(), out, reporter); } }
@Override public void setConf(Configuration conf) { super.setConf(conf); if (conf == null) { return; } AvroDistributedCacheFileReader lambdaRhoReader = new AvroDistributedCacheFileReader(new JobConf(conf)); try { lambdaRhoReader.build(conf.get(LAMBDA_RHO_MAP), _lambdaRhoConsumer); _lambdaRhoConsumer.done(); } catch (IOException e) { e.printStackTrace(); } _logger.info("lambda file:" + conf.get(LAMBDA_RHO_MAP)); _logger.info("Loaded " + String.valueOf(_lambdaRhoConsumer.get().size()) + " lambdas."); }
@Override public void setConf(Configuration conf) { super.setConf(conf); if (conf == null) { return; } _outputSchema = AvroJob.getOutputSchema(conf); AvroDistributedCacheFileReader modelReader = new AvroDistributedCacheFileReader(new JobConf(conf)); try { modelReader.build(conf.get(MODEL_PATH), _modelConsumer); _modelConsumer.done(); } catch (IOException e) { e.printStackTrace(); } _lambda = conf.getFloat(LAMBDA, 0); _ignoreValue = conf.getBoolean(BINARY_FEATURE, false); _logger.info("Loaded the model for test, size:" + _modelConsumer.get().size()); }