ConditionalWork conditionalWork = conditionalTask.getWork(); SparkWork originWork = originalTask.getWork(); SparkWork newWork = newTask.getWork(); List<Task<? extends Serializable>> listTask = conditionalTask.getListTasks(); List<Serializable> listWork = (List<Serializable>) conditionalWork.getListWorks(); int taskIndex = listTask.indexOf(originalTask); ConditionalResolver resolver = conditionalTask.getResolver(); if (resolver instanceof ConditionalResolverSkewJoin) { .getResolverCtx(); HashMap<Path, Task<? extends Serializable>> bigKeysDirToTaskMap = context .getDirToTaskMap();
@Override public boolean done() { boolean ret = true; List<Task<? extends Serializable>> parentTasks = getParentTasks(); if (parentTasks != null) { for (Task<? extends Serializable> par : parentTasks) { ret = ret && par.done(); } } List<Task<? extends Serializable>> retTasks; if (resolved) { retTasks = resTasks; } else { retTasks = getListTasks(); } if (ret && retTasks != null) { for (Task<? extends Serializable> tsk : retTasks) { ret = ret && tsk.done(); } } return ret; }
cndTsk.setListTasks(listTasks); cndTsk.setResolver(new ConditionalResolverMergeFiles()); ConditionalResolverMergeFilesCtx mrCtx = new ConditionalResolverMergeFilesCtx(listTasks, condInputPath.toString()); cndTsk.setResolverCtx(mrCtx);
@Override public int execute(DriverContext driverContext) { resTasks = resolver.getTasks(conf, resolverCtx); resolved = true; try { resolveTask(driverContext); } catch (Exception e) { setException(e); return 1; } return 0; }
/** * Add a dependent task on the current conditional task. The task will not be a direct child of * conditional task. Actually it will be added as child task of associated tasks. * * @return true if the task got added false if it already existed */ @Override public boolean addDependentTask(Task<? extends Serializable> dependent) { boolean ret = false; if (getListTasks() != null) { ret = true; for (Task<? extends Serializable> tsk : getListTasks()) { ret = ret & tsk.addDependentTask(dependent); } } return ret; }
ConditionalTask cndTsk = (ConditionalTask) TaskFactory.get(cndWork, parseCtx.getConf()); cndTsk.setListTasks(listTasks); cndTsk.setResolver(new ConditionalResolverSkewJoin()); cndTsk .setResolverCtx(new ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx( bigKeysDirToTaskMap)); List<Task<? extends Serializable>> oldChildTasks = currTask.getChildTasks(); for (Task<? extends Serializable> tsk : cndTsk.getListTasks()) { for (Task<? extends Serializable> oldChild : oldChildTasks) { tsk.addDependentTask(oldChild);
ConditionalTask t = (ConditionalTask) task; for (Task<? extends Serializable> listTask : t.getListTasks()) { if (t.getChildTasks() != null) { org.apache.hadoop.hive.ql.plan.api.Adjacency childEntry = new org.apache.hadoop.hive.ql.plan.api.Adjacency(); childEntry.setNode(listTask.getId()); for (Task<? extends Serializable> childTask : t.getChildTasks()) { childEntry.addToChildren(childTask.getId()); if (!tasksVisited.contains(childTask)) {
(ConditionalResolverMergeFilesCtx) cndTsk.getResolverCtx(); mrCtx.setDPCtx(fsInputDesc.getDynPartCtx()); mrCtx.setLbCtx(fsInputDesc.getLbCtx());
/** * Add a dependent task on the current conditional task. The task will not be a direct child of * conditional task. Actually it will be added as child task of associated tasks. * * @return true if the task got added false if it already existed */ @Override public boolean addDependentTask(Task<? extends Serializable> dependent) { boolean ret = false; if (getListTasks() != null) { ret = true; for (Task<? extends Serializable> tsk : getListTasks()) { ret = ret & tsk.addDependentTask(dependent); } } return ret; }
ConditionalTask t = (ConditionalTask) task; for (Task<? extends Serializable> listTask : t.getListTasks()) { if (t.getChildTasks() != null) { org.apache.hadoop.hive.ql.plan.api.Adjacency childEntry = new org.apache.hadoop.hive.ql.plan.api.Adjacency(); childEntry.setNode(listTask.getId()); for (Task<? extends Serializable> childTask : t.getChildTasks()) { childEntry.addToChildren(childTask.getId()); if (!tasksVisited.contains(childTask)) {
@Override public int execute(DriverContext driverContext) { resTasks = resolver.getTasks(conf, resolverCtx); resolved = true; try { resolveTask(driverContext); } catch (Exception e) { setException(e); return 1; } return 0; }
(ConditionalResolverMergeFilesCtx) cndTsk.getResolverCtx(); mrCtx.setDPCtx(fsInputDesc.getDynPartCtx()); mrCtx.setLbCtx(fsInputDesc.getLbCtx());
ConditionalWork conditionalWork = conditionalTask.getWork(); SparkWork originWork = originalTask.getWork(); SparkWork newWork = newTask.getWork(); List<Task<? extends Serializable>> listTask = conditionalTask.getListTasks(); List<Serializable> listWork = (List<Serializable>) conditionalWork.getListWorks(); int taskIndex = listTask.indexOf(originalTask); ConditionalResolver resolver = conditionalTask.getResolver(); if (resolver instanceof ConditionalResolverSkewJoin) { .getResolverCtx(); HashMap<Path, Task<? extends Serializable>> bigKeysDirToTaskMap = context .getDirToTaskMap();
cndTsk.setListTasks(listTasks); cndTsk.setResolver(new ConditionalResolverMergeFiles()); ConditionalResolverMergeFilesCtx mrCtx = new ConditionalResolverMergeFilesCtx(listTasks, condInputPath.toString()); cndTsk.setResolverCtx(mrCtx);
protected List<Task<?>> getChildTasks(Task<?> task) { if (task instanceof ConditionalTask) { return ((ConditionalTask) task).getListTasks(); } return task.getChildTasks(); } }
@Override public boolean done() { boolean ret = true; List<Task<? extends Serializable>> parentTasks = getParentTasks(); if (parentTasks != null) { for (Task<? extends Serializable> par : parentTasks) { ret = ret && par.done(); } } List<Task<? extends Serializable>> retTasks; if (resolved) { retTasks = resTasks; } else { retTasks = getListTasks(); } if (ret && retTasks != null) { for (Task<? extends Serializable> tsk : retTasks) { ret = ret && tsk.done(); } } return ret; }
ConditionalTask t = (ConditionalTask) task; for (Task<? extends Serializable> listTask : t.getListTasks()) { if (t.getChildTasks() != null) { org.apache.hadoop.hive.ql.plan.api.Adjacency childEntry = new org.apache.hadoop.hive.ql.plan.api.Adjacency(); childEntry.setNode(listTask.getId()); for (Task<? extends Serializable> childTask : t.getChildTasks()) { childEntry.addToChildren(childTask.getId()); if (!tasksVisited.contains(childTask)) {
@Override public int execute(DriverContext driverContext) { resTasks = resolver.getTasks(conf, resolverCtx); resolved = true; try { resolveTask(driverContext); } catch (Exception e) { setException(e); return 1; } return 0; }
(ConditionalResolverMergeFilesCtx) cndTsk.getResolverCtx(); mrCtx.setDPCtx(fsInputDesc.getDynPartCtx());
} else { List<Task<? extends Serializable>> listTask = conditionalTask.getListTasks(); ConditionalWork conditionalWork = conditionalTask.getWork(); int index = listTask.indexOf(currTask); listTask.set(index, localTask); listWork.set(index, localwork); conditionalWork.setListWorks(listWork); ConditionalResolver resolver = conditionalTask.getResolver(); if (resolver instanceof ConditionalResolverSkewJoin) { .getResolverCtx(); HashMap<Path, Task<? extends Serializable>> bigKeysDirToTaskMap = context .getDirToTaskMap(); conditionalTask.setResolverCtx(context); } else if (resolver instanceof ConditionalResolverCommonJoin) { .getResolverCtx(); HashMap<Task<? extends Serializable>, Set<String>> taskToAliases = context.getTaskToAliases(); conditionalTask.setResolverCtx(context);