@Override public boolean isSame(OperatorDesc other) { if (super.isSame(other)) { MapJoinDesc otherDesc = (MapJoinDesc) other; return Objects.equals(getParentToInput(), otherDesc.getParentToInput()) && Objects.equals(getKeyCountsExplainDesc(), otherDesc.getKeyCountsExplainDesc()) && getPosBigTable() == otherDesc.getPosBigTable() && isBucketMapJoin() == otherDesc.isBucketMapJoin(); } return false; }
private void updateReference(BaseWork previous, BaseWork current, SparkWork sparkWork) { String previousName = previous.getName(); String currentName = current.getName(); List<BaseWork> children = sparkWork.getAllWork(); for (BaseWork child : children) { Set<Operator<?>> allOperators = child.getAllOperators(); for (Operator<?> operator : allOperators) { if (operator instanceof MapJoinOperator) { MapJoinDesc mapJoinDesc = ((MapJoinOperator) operator).getConf(); Map<Integer, String> parentToInput = mapJoinDesc.getParentToInput(); for (Integer id : parentToInput.keySet()) { String parent = parentToInput.get(id); if (parent.equals(previousName)) { parentToInput.put(id, currentName); } } } } } }
private void updateReference(BaseWork previous, BaseWork current, SparkWork sparkWork) { String previousName = previous.getName(); String currentName = current.getName(); List<BaseWork> children = sparkWork.getAllWork(); for (BaseWork child : children) { Set<Operator<?>> allOperators = child.getAllOperators(); for (Operator<?> operator : allOperators) { if (operator instanceof MapJoinOperator) { MapJoinDesc mapJoinDesc = ((MapJoinOperator) operator).getConf(); Map<Integer, String> parentToInput = mapJoinDesc.getParentToInput(); for (Integer id : parentToInput.keySet()) { String parent = parentToInput.get(id); if (parent.equals(previousName)) { parentToInput.put(id, currentName); } } } } } }
mapJoinOp.getConf().getParentToInput().put(pos, parentWork.getName());
mapJoinOp.getConf().getParentToInput().put(pos, parentWork.getName());
throws HiveException { Map<Integer, String> parentToInput = desc.getParentToInput(); Map<Integer, Long> parentKeyCounts = desc.getParentKeyCounts();
throws HiveException { Map<Integer, String> parentToInput = desc.getParentToInput(); Map<Integer, Long> parentKeyCounts = desc.getParentKeyCounts();
+ "), pos: " + pos + " --> " + parentWork.getName() + " (" + keyCount + " keys estimated from " + rowCount + " rows, " + bucketCount + " buckets)"); joinConf.getParentToInput().put(pos, parentWork.getName()); if (keyCount != Long.MAX_VALUE) { joinConf.getParentKeyCounts().put(pos, keyCount);
throws HiveException { Map<Integer, String> parentToInput = desc.getParentToInput(); Map<Integer, Long> parentKeyCounts = desc.getParentKeyCounts();
+ ", pos: " + pos + " --> " + parentWork.getName() + " (" + keyCount + " keys estimated from " + rowCount + " rows, " + bucketCount + " buckets)"); joinConf.getParentToInput().put(pos, parentWork.getName()); if (keyCount != Long.MAX_VALUE) { joinConf.getParentKeyCounts().put(pos, keyCount);
throws HiveException { Map<Integer, String> parentToInput = desc.getParentToInput(); Map<Integer, Long> parentKeyCounts = desc.getParentKeyCounts();
@Override public boolean equals(MapJoinOperator op1, MapJoinOperator op2) { Preconditions.checkNotNull(op1); Preconditions.checkNotNull(op2); MapJoinDesc desc1 = op1.getConf(); MapJoinDesc desc2 = op2.getConf(); if (compareObject(desc1.getParentToInput(), desc2.getParentToInput()) && compareString(desc1.getKeyCountsExplainDesc(), desc2.getKeyCountsExplainDesc()) && compareObject(desc1.getKeysString(), desc2.getKeysString()) && desc1.getPosBigTable() == desc2.getPosBigTable() && desc1.isBucketMapJoin() == desc2.isBucketMapJoin() && compareObject(desc1.getKeysString(), desc2.getKeysString()) && compareObject(desc1.getFiltersStringMap(), desc2.getFiltersStringMap()) && compareObject(desc1.getOutputColumnNames(), desc2.getOutputColumnNames()) && compareObject(desc1.getCondsList(), desc2.getCondsList()) && desc1.getHandleSkewJoin() == desc2.getHandleSkewJoin() && compareString(desc1.getNullSafeString(), desc2.getNullSafeString())) { return true; } else { return false; } } }
mapJoinOp.getConf().getParentToInput().put(pos, parentWork.getName());
+ ", pos: " + pos + " --> " + parentWork.getName() + " (" + keyCount + " keys estimated from " + rowCount + " rows, " + bucketCount + " buckets)"); joinConf.getParentToInput().put(pos, parentWork.getName()); if (keyCount != Long.MAX_VALUE) { joinConf.getParentKeyCounts().put(pos, keyCount);
throws HiveException { Map<Integer, String> parentToInput = desc.getParentToInput(); Map<Integer, Long> parentKeyCounts = desc.getParentKeyCounts();
throws HiveException { Map<Integer, String> parentToInput = desc.getParentToInput(); Map<Integer, Long> parentKeyCounts = desc.getParentKeyCounts();