public final InspectableObject getNextRow() throws IOException { if (currentMinSegment != null) { adjustPriorityQueue(currentMinSegment); } Integer current = top(); if (current == null) { if (LOG.isInfoEnabled()) { LOG.info("MergeQueue forwarded " + counter + " rows"); } return null; } counter++; return keys[currentMinSegment = current].getSecond(); }
public final InspectableObject getNextRow() throws IOException { if (currentMinSegment != null) { adjustPriorityQueue(currentMinSegment); } Integer current = top(); if (current == null) { if (isLogInfoEnabled) { LOG.info("MergeQueue forwarded " + counter + " rows"); } return null; } counter++; return keys[currentMinSegment = current].getSecond(); }
/** * Enqueue most recent record read, and dequeue earliest result in the queue. * * @param job * Current job configuration. * * @param recordreader * Record reader. * * @param key * Key of current reading record. * * @param value * Value of current reading record. * * @return Return false if reaches the end of file, otherwise return true. */ public boolean updateBuffer(JobConf job, RecordReader recordreader, WritableComparable key, Writable value) throws IOException { key = ReflectionUtils.copy(job, (WritableComparable)buffer.get(cur).getFirst(), key); value = ReflectionUtils.copy(job, (Writable)buffer.get(cur).getSecond(), value); boolean notEOF = recordreader.next(buffer.get(cur).getFirst(), buffer.get(cur).getSecond()); if (notEOF) { cur = (++cur) % buffer.size(); } return notEOF; }
static synchronized public Object evaluateExprOnPart( ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> pair, Object partColValues) throws HiveException { return pair.getFirst().getPrimitiveJavaObject(pair.getSecond().evaluate(partColValues)); } }
/** * Enqueue most recent record read, and dequeue earliest result in the queue. * * @param job * Current job configuration. * * @param recordreader * Record reader. * * @param key * Key of current reading record. * * @param value * Value of current reading record. * * @return Return false if reaches the end of file, otherwise return true. */ public boolean updateBuffer(JobConf job, RecordReader recordreader, WritableComparable key, Writable value) throws IOException { key = ReflectionUtils.copy(job, (WritableComparable)buffer.get(cur).getFirst(), key); value = ReflectionUtils.copy(job, (Writable)buffer.get(cur).getSecond(), value); boolean notEOF = recordreader.next(buffer.get(cur).getFirst(), buffer.get(cur).getSecond()); if (notEOF) { cur = (++cur) % buffer.size(); } return notEOF; }
Conjunct analyzeConjunct(ASTNode conjunct) throws SemanticException { if(conjunct.getChildCount() == 2) { ASTNode left = (ASTNode) conjunct.getChild(0); ASTNode right = (ASTNode) conjunct.getChild(1); ObjectPair<ExprType,ColumnInfo> leftInfo = analyzeExpr(left); ObjectPair<ExprType,ColumnInfo> rightInfo = analyzeExpr(right); return new Conjunct(left, right, leftInfo.getFirst(), rightInfo.getFirst(), leftInfo.getSecond(), rightInfo.getSecond()); } else { ObjectPair<ExprType,ColumnInfo> sqExprInfo = analyzeExpr(conjunct); return new Conjunct(conjunct, null, sqExprInfo.getFirst(), null, sqExprInfo.getSecond(), sqExprInfo.getSecond()); } }
static synchronized public Object evaluateExprOnPart( ObjectPair<PrimitiveObjectInspector, ExprNodeEvaluator> pair, Object partColValues) throws HiveException { return pair.getFirst().getPrimitiveJavaObject(pair.getSecond().evaluate(partColValues)); } }
Conjunct analyzeConjunct(ASTNode conjunct) throws SemanticException { int type = conjunct.getType(); if ( type == HiveParser.EQUAL ) { ASTNode left = (ASTNode) conjunct.getChild(0); ASTNode right = (ASTNode) conjunct.getChild(1); ObjectPair<ExprType,ColumnInfo> leftInfo = analyzeExpr(left); ObjectPair<ExprType,ColumnInfo> rightInfo = analyzeExpr(right); return new Conjunct(left, right, leftInfo.getFirst(), rightInfo.getFirst(), leftInfo.getSecond(), rightInfo.getSecond()); } else { ObjectPair<ExprType,ColumnInfo> sqExprInfo = analyzeExpr(conjunct); return new Conjunct(conjunct, null, sqExprInfo.getFirst(), null, sqExprInfo.getSecond(), sqExprInfo.getSecond()); } }
private boolean topLevelConjunctCheck(ASTNode searchCond, ObjectPair<Boolean, Integer> subqInfo) { if( searchCond.getType() == HiveParser.KW_OR) { subqInfo.setFirst(Boolean.TRUE); if(subqInfo.getSecond() > 1) { return false; } } if( searchCond.getType() == HiveParser.TOK_SUBQUERY_EXPR) { subqInfo.setSecond(subqInfo.getSecond() + 1); if(subqInfo.getSecond()> 1 && subqInfo.getFirst()) { return false; } return true; } for(int i=0; i<searchCond.getChildCount(); i++){ boolean validSubQuery = topLevelConjunctCheck((ASTNode)searchCond.getChild(i), subqInfo); if(!validSubQuery) { return false; } } return true; }
tem.setSecond(ReflectionUtils.copy(job, value, tem.getSecond())); buffer.add(tem);
List<Long> groupingSets = grpByExprsGroupingSets.getSecond();
tem.setSecond(ReflectionUtils.copy(job, value, tem.getSecond())); buffer.add(tem);
List<FieldSchema> fieldSchemas = plan.getResultSchema().getFieldSchemas(); SelectOperator finalSelOp = pair.getFirst(); org.apache.hadoop.hive.ql.metadata.Table t = pair.getSecond(); String destTableName = null; List<String> colNames = null;
List<Integer> groupingSets = grpByExprsGroupingSets.getSecond();
public synchronized void add(HiveKey key, BytesWritable value) { if (writeCursor >= IN_MEMORY_NUM_ROWS) { // Write buffer is full if (!readBufferUsed) { // Read buffer isn't used, switch buffer switchBufferAndResetCursor(); } else { // Need to spill from write buffer to disk try { if (output == null) { setupOutput(); } for (int i = 0; i < IN_MEMORY_NUM_ROWS; i++) { ObjectPair<HiveKey, BytesWritable> pair = writeBuffer[i]; writeHiveKey(output, pair.getFirst()); writeValue(output, pair.getSecond()); pair.setFirst(null); pair.setSecond(null); } writeCursor = 0; } catch (Exception e) { clear(); // Clean up the cache throw new RuntimeException("Failed to spill rows to disk", e); } } } ObjectPair<HiveKey, BytesWritable> pair = writeBuffer[writeCursor++]; pair.setFirst(key); pair.setSecond(value); }
public synchronized void add(HiveKey key, BytesWritable value) { if (writeCursor >= IN_MEMORY_NUM_ROWS) { // Write buffer is full if (!readBufferUsed) { // Read buffer isn't used, switch buffer switchBufferAndResetCursor(); } else { // Need to spill from write buffer to disk try { if (output == null) { setupOutput(); } for (int i = 0; i < IN_MEMORY_NUM_ROWS; i++) { ObjectPair<HiveKey, BytesWritable> pair = writeBuffer[i]; writeHiveKey(output, pair.getFirst()); writeValue(output, pair.getSecond()); pair.setFirst(null); pair.setSecond(null); } writeCursor = 0; } catch (Exception e) { clear(); // Clean up the cache throw new RuntimeException("Failed to spill rows to disk", e); } } } ObjectPair<HiveKey, BytesWritable> pair = writeBuffer[writeCursor++]; pair.setFirst(key); pair.setSecond(value); }
getGroupByGroupingSetsForClause(parseInfo, dest); List<Long> groupingSets = grpByExprsGroupingSets.getSecond(); if (!groupingSets.isEmpty()) { throw new SemanticException(ErrorMsg.HIVE_GROUPING_SETS_AGGR_NOMAPAGGR_MULTIGBY.getMsg());
ObjectPair<HiveKey, BytesWritable> pair = kvContainer.next(); Writable key = pair.getFirst(); Writable val = pair.getSecond(); writeHelper.setKeyValue(key, val); restoredHashMap.put(writeHelper, -1);