public Object getFieldValue(_Fields field) { switch (field) { case NODE: return getNode(); case CHILDREN: return getChildren(); case ADJACENCY_TYPE: return getAdjacencyType(); } throw new IllegalStateException(); }
public Adjacency deepCopy() { return new Adjacency(this); }
@Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof Adjacency) return this.equals((Adjacency)that); return false; }
/** * Performs a deep copy on <i>other</i>. */ public Adjacency(Adjacency other) { if (other.isSetNode()) { this.node = other.node; } if (other.isSetChildren()) { List<String> __this__children = new ArrayList<String>(other.children); this.children = __this__children; } if (other.isSetAdjacencyType()) { this.adjacencyType = other.adjacencyType; } }
public static void setWorkflowAdjacencies(Configuration conf, QueryPlan plan) { try { Graph stageGraph = plan.getQueryPlan().getStageGraph(); if (stageGraph == null) { return; } List<Adjacency> adjList = stageGraph.getAdjacencyList(); if (adjList == null) { return; } for (Adjacency adj : adjList) { List<String> children = adj.getChildren(); if (CollectionUtils.isEmpty(children)) { return; } conf.setStrings("mapreduce.workflow.adjacency." + adj.getNode(), children.toArray(new String[0])); } } catch (IOException e) { } }
new org.apache.hadoop.hive.ql.plan.api.Adjacency(); entry.setAdjacencyType(AdjacencyType.CONJUNCTIVE); entry.setNode(op.getOperatorId()); for (Operator<? extends OperatorDesc> childOp : op.getChildOperators()) { entry.addToChildren(childOp.getOperatorId()); if (!opsVisited.contains(childOp)) { opsToVisit.add(childOp);
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } }
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case NODE: return isSetNode(); case CHILDREN: return isSetChildren(); case ADJACENCY_TYPE: return isSetAdjacencyType(); } throw new IllegalStateException(); }
new org.apache.hadoop.hive.ql.plan.api.Adjacency(); entry.setAdjacencyType(AdjacencyType.CONJUNCTIVE); entry.setNode(op.getOperatorId()); for (Operator<? extends OperatorDesc> childOp : op.getChildOperators()) { entry.addToChildren(childOp.getOperatorId()); if (!opsVisited.contains(childOp)) { opsToVisit.add(childOp);
public static void setWorkflowAdjacencies(Configuration conf, QueryPlan plan) { try { Graph stageGraph = plan.getQueryPlan().getStageGraph(); if (stageGraph == null) { return; } List<Adjacency> adjList = stageGraph.getAdjacencyList(); if (adjList == null) { return; } for (Adjacency adj : adjList) { List<String> children = adj.getChildren(); if (children == null || children.isEmpty()) { return; } conf.setStrings("mapreduce.workflow.adjacency."+adj.getNode(), children.toArray(new String[children.size()])); } } catch (IOException e) { } }
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } }
/** * Performs a deep copy on <i>other</i>. */ public Adjacency(Adjacency other) { if (other.isSetNode()) { this.node = other.node; } if (other.isSetChildren()) { List<String> __this__children = new ArrayList<String>(other.children); this.children = __this__children; } if (other.isSetAdjacencyType()) { this.adjacencyType = other.adjacencyType; } }
new org.apache.hadoop.hive.ql.plan.api.Adjacency(); listEntry.setAdjacencyType(AdjacencyType.DISJUNCTIVE); listEntry.setNode(task.getId()); ConditionalTask t = (ConditionalTask) task; if (t.getChildTasks() != null) { org.apache.hadoop.hive.ql.plan.api.Adjacency childEntry = new org.apache.hadoop.hive.ql.plan.api.Adjacency(); childEntry.setAdjacencyType(AdjacencyType.DISJUNCTIVE); childEntry.setNode(listTask.getId()); childEntry.addToChildren(childTask.getId()); if (!tasksVisited.contains(childTask)) { tasksToVisit.add(childTask); listEntry.addToChildren(listTask.getId()); if (!tasksVisited.contains(listTask)) { tasksToVisit.add(listTask); } else if (task.getChildTasks() != null) { org.apache.hadoop.hive.ql.plan.api.Adjacency entry = new org.apache.hadoop.hive.ql.plan.api.Adjacency(); entry.setAdjacencyType(AdjacencyType.CONJUNCTIVE); entry.setNode(task.getId()); entry.addToChildren(childTask.getId()); if (!tasksVisited.contains(childTask)) { tasksToVisit.add(childTask);
public Object getFieldValue(_Fields field) { switch (field) { case NODE: return getNode(); case CHILDREN: return getChildren(); case ADJACENCY_TYPE: return getAdjacencyType(); } throw new IllegalStateException(); }
String nodeId = AmbroseHiveUtil.getNodeIdFromNodeName(conf, adj.getNode()); if (!nodeIdToDAGNode.containsKey(nodeId)) { continue; List<String> children = adj.getChildren(); if (children == null || children.isEmpty()) { return result; // TODO check!
public Adjacency deepCopy() { return new Adjacency(this); }
@Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof Adjacency) return this.equals((Adjacency)that); return false; }