public HiveKVResultCache() { writeBuffer = new ObjectPair[IN_MEMORY_NUM_ROWS]; readBuffer = new ObjectPair[IN_MEMORY_NUM_ROWS]; for (int i = 0; i < IN_MEMORY_NUM_ROWS; i++) { writeBuffer[i] = new ObjectPair<HiveKey, BytesWritable>(); readBuffer[i] = new ObjectPair<HiveKey, BytesWritable>(); } }
/** * Creates a pair. Constructor doesn't infer template args but * the method does, so the code becomes less ugly. */ public static <T1, T2> ObjectPair<T1, T2> create(T1 f, T2 s) { return new ObjectPair<T1, T2>(f, s); }
public HiveKVResultCache() { writeBuffer = new ObjectPair[IN_MEMORY_NUM_ROWS]; readBuffer = new ObjectPair[IN_MEMORY_NUM_ROWS]; for (int i = 0; i < IN_MEMORY_NUM_ROWS; i++) { writeBuffer[i] = new ObjectPair<HiveKey, BytesWritable>(); readBuffer[i] = new ObjectPair<HiveKey, BytesWritable>(); } }
public KeyValueContainer(String spillLocalDirs) { readBuffer = new ObjectPair[IN_MEMORY_NUM_ROWS]; for (int i = 0; i < IN_MEMORY_NUM_ROWS; i++) { readBuffer[i] = new ObjectPair<HiveKey, BytesWritable>(); } try { setupOutput(spillLocalDirs); } catch (IOException | HiveException e) { throw new RuntimeException("Failed to create temporary output file on disk", e); } }
public KeyValueContainer(String spillLocalDirs) { readBuffer = new ObjectPair[IN_MEMORY_NUM_ROWS]; for (int i = 0; i < IN_MEMORY_NUM_ROWS; i++) { readBuffer[i] = new ObjectPair<HiveKey, BytesWritable>(); } try { setupOutput(spillLocalDirs); } catch (IOException | HiveException e) { throw new RuntimeException("Failed to create temporary output file on disk", e); } }
public void addFinalSelectOp( SelectOperator sop, Operator<? extends OperatorDesc> sinkOp) { String operatorId = sop.getOperatorId(); if (!finalSelectOps.containsKey(operatorId)) { Table table = null; if (sinkOp instanceof FileSinkOperator) { FileSinkOperator fso = (FileSinkOperator) sinkOp; table = fso.getConf().getTable(); } finalSelectOps.put(operatorId, new ObjectPair<SelectOperator, Table>(sop, table)); } }
public void addFinalSelectOp( SelectOperator sop, Operator<? extends OperatorDesc> sinkOp) { String operatorId = sop.getOperatorId(); if (!finalSelectOps.containsKey(operatorId)) { Table table = null; if (sinkOp instanceof FileSinkOperator) { FileSinkOperator fso = (FileSinkOperator) sinkOp; table = fso.getConf().getTable(); } finalSelectOps.put(operatorId, new ObjectPair<SelectOperator, Table>(sop, table)); } }
private ObjectPair<List<ASTNode>, List<Integer>> getGroupByGroupingSetsForClause( QBParseInfo parseInfo, String dest) throws SemanticException { List<Integer> groupingSets = new ArrayList<Integer>(); List<ASTNode> groupByExprs = getGroupByForClause(parseInfo, dest); if (parseInfo.getDestRollups().contains(dest)) { groupingSets = getGroupingSetsForRollup(groupByExprs.size()); } else if (parseInfo.getDestCubes().contains(dest)) { groupingSets = getGroupingSetsForCube(groupByExprs.size()); } else if (parseInfo.getDestGroupingSets().contains(dest)) { groupingSets = getGroupingSets(groupByExprs, parseInfo, dest); } return new ObjectPair<List<ASTNode>, List<Integer>>(groupByExprs, groupingSets); }
if (leftAlias == null && (!node.getNoOuterJoin() || !target.getNoOuterJoin())) { return new ObjectPair(-1, null); return new ObjectPair(-1, null); return new ObjectPair(-1, null); return new ObjectPair(-1, null); return new ObjectPair(res, tgtToNodeExprMap);
ObjectPair<List<ASTNode>, List<Long>> getGroupByGroupingSetsForClause( QBParseInfo parseInfo, String dest) throws SemanticException { List<Long> groupingSets = new ArrayList<Long>(); List<ASTNode> groupByExprs = getGroupByForClause(parseInfo, dest); if (parseInfo.getDestRollups().contains(dest)) { groupingSets = getGroupingSetsForRollup(groupByExprs.size()); } else if (parseInfo.getDestCubes().contains(dest)) { groupingSets = getGroupingSetsForCube(groupByExprs.size()); } else if (parseInfo.getDestGroupingSets().contains(dest)) { groupingSets = getGroupingSets(groupByExprs, parseInfo, dest); } if (!groupingSets.isEmpty() && groupByExprs.size() > Long.SIZE) { throw new SemanticException(ErrorMsg.HIVE_GROUPING_SETS_SIZE_LIMIT.getMsg()); } return new ObjectPair<List<ASTNode>, List<Long>>(groupByExprs, groupingSets); }
return new ObjectPair<List<ReduceSinkOperator>, Map<Byte,List<ExprNodeDesc>>>( oldReduceSinkParentOps, keyExprMap);
return new ObjectPair<List<ReduceSinkOperator>, Map<Byte,List<ExprNodeDesc>>>( oldReduceSinkParentOps, keyExprMap);
return new ObjectPair<Long, Integer>(-1L, -1); + ", total cores: " + totalCores + ", memory per executor: " + executorMemoryInMB + " mb, memoryFraction: " + memoryFraction); return new ObjectPair<Long, Integer>(Long.valueOf(memoryPerTaskInBytes), Integer.valueOf(totalCores)); } finally {
ObjectPair<Boolean, Integer> subqInfo = new ObjectPair<Boolean, Integer>(false, 0);
public List<Partition> dropPartitions(String dbName, String tblName, List<DropTableDesc.PartSpec> partSpecs, PartitionDropOptions dropOptions) throws HiveException { try { Table tbl = getTable(dbName, tblName); List<ObjectPair<Integer, byte[]>> partExprs = new ArrayList<ObjectPair<Integer,byte[]>>(partSpecs.size()); for (DropTableDesc.PartSpec partSpec : partSpecs) { partExprs.add(new ObjectPair<Integer, byte[]>(partSpec.getPrefixLength(), SerializationUtilities.serializeExpressionToKryo(partSpec.getPartSpec()))); } List<org.apache.hadoop.hive.metastore.api.Partition> tParts = getMSC().dropPartitions( dbName, tblName, partExprs, dropOptions); return convertFromMetastore(tbl, tParts); } catch (NoSuchObjectException e) { throw new HiveException("Partition or table doesn't exist.", e); } catch (Exception e) { throw new HiveException(e.getMessage(), e); } }
return new ObjectPair<Long, Integer>(-1L, -1); + ", total cores: " + totalCores + ", memory per executor: " + executorMemoryInMB + "M, memoryFraction: " + memoryFraction); return new ObjectPair<Long, Integer>(Long.valueOf(memoryPerTaskInBytes), Integer.valueOf(totalCores));
return false; ObjectPair tem = new ObjectPair(); tem.setFirst(ReflectionUtils.copy(job, key, tem.getFirst())); tem.setSecond(ReflectionUtils.copy(job, value, tem.getSecond()));
return false; ObjectPair tem = new ObjectPair(); tem.setFirst(ReflectionUtils.copy(job, key, tem.getFirst())); tem.setSecond(ReflectionUtils.copy(job, value, tem.getSecond()));
private boolean next(Integer current) throws IOException, HiveException { if (keyFields == null) { byte tag = tagForAlias(alias); // joinKeys/joinKeysOI are initialized after making merge queue, so setup lazily at runtime keyFields = joinKeys[tag]; keyFieldOIs = joinKeysObjectInspectors[tag]; } InspectableObject nextRow = segments[current].getNextRow(); while (nextRow != null) { sinkOp.reset(); if (keys[current] == null) { keys[current] = new ObjectPair<List<Object>, InspectableObject>(); } // Pass the row though the operator tree. It is guaranteed that not more than 1 row can // be produced from a input row. forwardOp.process(nextRow.o, 0); nextRow = sinkOp.getResult(); // It is possible that the row got absorbed in the operator tree. if (nextRow.o != null) { // todo this should be changed to be evaluated lazily, especially for single segment case keys[current].setFirst(JoinUtil.computeKeys(nextRow.o, keyFields, keyFieldOIs)); keys[current].setSecond(nextRow); return true; } nextRow = segments[current].getNextRow(); } keys[current] = null; return false; } }
private boolean next(Integer current) throws IOException, HiveException { if (keyFields == null) { byte tag = tagForAlias(alias); // joinKeys/joinKeysOI are initialized after making merge queue, so setup lazily at runtime keyFields = joinKeys[tag]; keyFieldOIs = joinKeysObjectInspectors[tag]; } InspectableObject nextRow = segments[current].getNextRow(); while (nextRow != null) { sinkOp.reset(); if (keys[current] == null) { keys[current] = new ObjectPair<List<Object>, InspectableObject>(); } // Pass the row though the operator tree. It is guaranteed that not more than 1 row can // be produced from a input row. forwardOp.process(nextRow.o, 0); nextRow = sinkOp.getResult(); // It is possible that the row got absorbed in the operator tree. if (nextRow.o != null) { // todo this should be changed to be evaluated lazily, especially for single segment case keys[current].setFirst(JoinUtil.computeKeys(nextRow.o, keyFields, keyFieldOIs)); keys[current].setSecond(nextRow); return true; } nextRow = segments[current].getNextRow(); } keys[current] = null; return false; } }