@Override public <S> SourceTarget<S> asSourceTarget(PType<S> ptype) { return target.asSourceTarget(ptype); }
@Override public <S> SourceTarget<S> asSourceTarget(PType<S> ptype) { return target.asSourceTarget(ptype); }
@Override public <S> SourceTarget<S> asSourceTarget(PType<S> ptype) { if (ptype != null && ptype.equals(source.getType())) { return (SourceTarget<S>) this; } return target.asSourceTarget(ptype); }
private InputCollection<?> handleSplitTarget(PCollectionImpl<?> splitTarget) { if (!outputs.containsKey(splitTarget)) { outputs.put(splitTarget, Sets.<Target> newHashSet()); } SourceTarget srcTarget = null; Target targetToReplace = null; for (Target t : outputs.get(splitTarget)) { if (t instanceof SourceTarget) { srcTarget = (SourceTarget<?>) t; break; } else { srcTarget = t.asSourceTarget(splitTarget.getPType()); if (srcTarget != null) { targetToReplace = t; break; } } } if (targetToReplace != null) { outputs.get(splitTarget).remove(targetToReplace); } else if (srcTarget == null) { srcTarget = pipeline.createIntermediateOutput(splitTarget.getPType()); } outputs.get(splitTarget).add(srcTarget); splitTarget.materializeAt(srcTarget); return (InputCollection<?>) pipeline.read(srcTarget); } }
private InputCollection<?> handleSplitTarget(PCollectionImpl<?> splitTarget) { if (!outputs.containsKey(splitTarget)) { outputs.put(splitTarget, Sets.<Target> newHashSet()); } SourceTarget srcTarget = null; Target targetToReplace = null; for (Target t : outputs.get(splitTarget)) { if (t instanceof SourceTarget) { srcTarget = (SourceTarget<?>) t; break; } else { srcTarget = t.asSourceTarget(splitTarget.getPType()); if (srcTarget != null) { targetToReplace = t; break; } } } if (targetToReplace != null) { outputs.get(splitTarget).remove(targetToReplace); } else if (srcTarget == null) { srcTarget = pipeline.createIntermediateOutput(splitTarget.getPType()); } outputs.get(splitTarget).add(srcTarget); splitTarget.materializeAt(srcTarget); return (InputCollection<?>) pipeline.read(srcTarget); } }
break; } else { srcTarget = t.asSourceTarget(splitTarget.getPType()); if (srcTarget != null) { targetToReplace = t;
materialized = true; } else { SourceTarget st = t.asSourceTarget(c.getPType()); if (st != null) { c.materializeAt(st);
materialized = true; } else { SourceTarget st = t.asSourceTarget(c.getPType()); if (st != null) { c.materializeAt(st);
@SuppressWarnings("unchecked") public void write(PCollection<?> pcollection, Target target, Target.WriteMode writeMode) { if (pcollection instanceof BaseGroupedTable) { pcollection = ((BaseGroupedTable<?, ?>) pcollection).ungroup(); } else if (pcollection instanceof BaseUnionCollection || pcollection instanceof BaseUnionTable) { pcollection = pcollection.parallelDo("UnionCollectionWrapper", (MapFn) IdentityFn.<Object> getInstance(), pcollection.getPType()); } boolean exists = target.handleExisting(writeMode, ((PCollectionImpl) pcollection).getLastModifiedAt(), getConfiguration()); if (exists && writeMode == Target.WriteMode.CHECKPOINT) { SourceTarget<?> st = target.asSourceTarget(pcollection.getPType()); if (st == null) { throw new CrunchRuntimeException("Target " + target + " does not support checkpointing"); } else { ((PCollectionImpl) pcollection).materializeAt(st); } return; } else if (writeMode != Target.WriteMode.APPEND && targetInCurrentRun(target)) { throw new CrunchRuntimeException("Target " + target + " is already written in current run." + " Use WriteMode.APPEND in order to write additional data to it."); } // Need special handling for append targets in the case of materialization if (writeMode == Target.WriteMode.APPEND) { appendedTargets.add(target); } addOutput((PCollectionImpl<?>) pcollection, target); }