@Override public boolean accept(OutputHandler handler, PType<?> ptype) { return target.accept(handler, ptype); }
@Override public <S> SourceTarget<S> asSourceTarget(PType<S> ptype) { return target.asSourceTarget(ptype); }
@Override public Converter<?, ?, ?, ?> getConverter(PType<?> ptype) { return target.getConverter(ptype); }
@SuppressWarnings("unchecked") public void write(PCollection<?> pcollection, Target target, Target.WriteMode writeMode) { if (pcollection instanceof BaseGroupedTable) { pcollection = ((BaseGroupedTable<?, ?>) pcollection).ungroup(); } else if (pcollection instanceof BaseUnionCollection || pcollection instanceof BaseUnionTable) { pcollection = pcollection.parallelDo("UnionCollectionWrapper", (MapFn) IdentityFn.<Object> getInstance(), pcollection.getPType()); } boolean exists = target.handleExisting(writeMode, ((PCollectionImpl) pcollection).getLastModifiedAt(), getConfiguration()); if (exists && writeMode == Target.WriteMode.CHECKPOINT) { SourceTarget<?> st = target.asSourceTarget(pcollection.getPType()); if (st == null) { throw new CrunchRuntimeException("Target " + target + " does not support checkpointing"); } else { ((PCollectionImpl) pcollection).materializeAt(st); } return; } else if (writeMode != Target.WriteMode.APPEND && targetInCurrentRun(target)) { throw new CrunchRuntimeException("Target " + target + " is already written in current run." + " Use WriteMode.APPEND in order to write additional data to it."); } // Need special handling for append targets in the case of materialization if (writeMode == Target.WriteMode.APPEND) { appendedTargets.add(target); } addOutput((PCollectionImpl<?>) pcollection, target); }
@Override public void handleExisting(WriteMode strategy, Configuration conf) { target.handleExisting(strategy, conf); } }
@Override public Target outputConf(String key, String value) { target.outputConf(key, value); return this; }
@Override public boolean handleExisting(WriteMode strategy, long lastModifiedAt, Configuration conf) { return target.handleExisting(strategy, lastModifiedAt, conf); }
/** * Configure the given output target to be compressed using the given codec. */ public static <T extends Target> T compress(T target, Class<? extends CompressionCodec> codecClass) { return (T) target.outputConf("mapred.output.compress", "true") .outputConf("mapred.output.compression.codec", codecClass.getCanonicalName()); }
@Override public <S> SourceTarget<S> asSourceTarget(PType<S> ptype) { return target.asSourceTarget(ptype); }
@Override public boolean accept(OutputHandler handler, PType<?> ptype) { return target.accept(handler, ptype); }
@Override public void write(PCollection<?> collection, Target target, Target.WriteMode writeMode) { target.handleExisting(writeMode, -1, getConfiguration()); if (writeMode != Target.WriteMode.APPEND && activeTargets.contains(target)) { throw new CrunchRuntimeException("Target " + target
getRuntimeContext().setConf(sparkContext.broadcast(WritableUtils.toByteArray(conf))); if (t instanceof MapReduceTarget) { //TODO: check this earlier Converter c = t.getConverter(ptype); IdentityFn ident = IdentityFn.getInstance(); JavaPairRDD<?, ?> outRDD;
/** * Configure the given output target to be compressed using Gzip. */ public static <T extends Target> T gzip(T target) { return (T) compress(target, GzipCodec.class) .outputConf(AvroJob.OUTPUT_CODEC, DataFileConstants.DEFLATE_CODEC); }
@Override public <S> SourceTarget<S> asSourceTarget(PType<S> ptype) { if (ptype != null && ptype.equals(source.getType())) { return (SourceTarget<S>) this; } return target.asSourceTarget(ptype); }
@Override public boolean accept(OutputHandler handler, PType<?> ptype) { return target.accept(handler, ptype); }
@Override public void write(PCollection<?> collection, Target target, Target.WriteMode writeMode) { target.handleExisting(writeMode, getConfiguration()); if (writeMode != WriteMode.APPEND && activeTargets.contains(target)) { throw new CrunchRuntimeException("Target " + target + " is already written in the current run." +
getRuntimeContext().setConf(sparkContext.broadcast(WritableUtils.toByteArray(conf))); if (t instanceof MapReduceTarget) { //TODO: check this earlier Converter c = t.getConverter(ptype); IdentityFn ident = IdentityFn.getInstance(); JavaPairRDD<?, ?> outRDD;
@Override public SourceTarget<T> conf(String key, String value) { source.inputConf(key, value); target.outputConf(key, value); return this; } }
private InputCollection<?> handleSplitTarget(PCollectionImpl<?> splitTarget) { if (!outputs.containsKey(splitTarget)) { outputs.put(splitTarget, Sets.<Target> newHashSet()); } SourceTarget srcTarget = null; Target targetToReplace = null; for (Target t : outputs.get(splitTarget)) { if (t instanceof SourceTarget) { srcTarget = (SourceTarget<?>) t; break; } else { srcTarget = t.asSourceTarget(splitTarget.getPType()); if (srcTarget != null) { targetToReplace = t; break; } } } if (targetToReplace != null) { outputs.get(splitTarget).remove(targetToReplace); } else if (srcTarget == null) { srcTarget = pipeline.createIntermediateOutput(splitTarget.getPType()); } outputs.get(splitTarget).add(srcTarget); splitTarget.materializeAt(srcTarget); return (InputCollection<?>) pipeline.read(srcTarget); } }
public void configureNode(DoNode node, Target target) { workingNode = node; target.accept(this, node.getPType()); }