@Override public Converter<?, ?, ?, ?> getConverter(PType<?> ptype) { return target.getConverter(ptype); }
getRuntimeContext().setConf(sparkContext.broadcast(WritableUtils.toByteArray(conf))); if (t instanceof MapReduceTarget) { //TODO: check this earlier Converter c = t.getConverter(ptype); IdentityFn ident = IdentityFn.getInstance(); JavaPairRDD<?, ?> outRDD;
getRuntimeContext().setConf(sparkContext.broadcast(WritableUtils.toByteArray(conf))); if (t instanceof MapReduceTarget) { //TODO: check this earlier Converter c = t.getConverter(ptype); IdentityFn ident = IdentityFn.getInstance(); JavaPairRDD<?, ?> outRDD;
if (node == null) { PType<?> ptype = nodePath.tail().getPType(); node = DoNode.createOutputNode(target.toString(), target.getConverter(ptype), ptype); outputHandler.configureNode(node, target); if (node == null) { PType<?> ptype = nodePath.tail().getPType(); node = DoNode.createOutputNode(target.toString(), target.getConverter(ptype), ptype); outputHandler.configureNode(node, target);