@Override public Aggregator[] aggregate(Tuple result) { Aggregator[] rowAggregators = aggregators.getAggregators(); aggregators.reset(rowAggregators); aggregators.aggregate(rowAggregators, result); return rowAggregators; } }
@Override public Aggregator[] aggregate(Tuple result) { Aggregator[] rowAggregators = aggregators.getAggregators(); aggregators.reset(rowAggregators); aggregators.aggregate(rowAggregators, result); return rowAggregators; }
@Override public Aggregator[] aggregate(Tuple result) { Aggregator[] rowAggregators = aggregators.getAggregators(); aggregators.reset(rowAggregators); aggregators.aggregate(rowAggregators, result); return rowAggregators; }
private HashMap<ImmutableBytesWritable, Aggregator[]> populateHash() throws SQLException { hash = new HashMap<ImmutableBytesWritable, Aggregator[]>(HASH_AGG_INIT_SIZE, 0.75f); final int aggSize = aggregators.getEstimatedByteSize(); long keySize = 0; for (Tuple result = resultIterator.next(); result != null; result = resultIterator.next()) { ImmutableBytesWritable key = new ImmutableBytesWritable(UNITIALIZED_KEY_BUFFER); key = getGroupingKey(result, key); Aggregator[] rowAggregators = hash.get(key); if (rowAggregators == null) { keySize += key.getSize(); long hashSize = SizedUtil.sizeOfMap(hash.size() + 1, SizedUtil.IMMUTABLE_BYTES_WRITABLE_SIZE, aggSize) + keySize; if (hashSize > memoryChunk.getSize() + CLIENT_HASH_AGG_MEMORY_CHUNK_SIZE) { // This will throw InsufficientMemoryException if necessary memoryChunk.resize(hashSize + CLIENT_HASH_AGG_MEMORY_CHUNK_SIZE); } rowAggregators = aggregators.newAggregators(); hash.put(key, rowAggregators); } aggregators.aggregate(rowAggregators, result); } return hash; }
rowAggregators = aggregate(previous); aggregators.aggregate(rowAggregators, current); traversedIterators = this.traversedIterator;
@Override public Tuple next() throws SQLException { Tuple result = resultIterator.next(); if (result == null) { return null; } if (currentKey.get() == UNITIALIZED_KEY_BUFFER) { getGroupingKey(result, currentKey); } Aggregator[] rowAggregators = aggregators.getAggregators(); aggregators.reset(rowAggregators); while (true) { aggregators.aggregate(rowAggregators, result); Tuple nextResult = resultIterator.peek(); if (nextResult == null || !currentKey.equals(getGroupingKey(nextResult, nextKey))) { break; } result = resultIterator.next(); } byte[] value = aggregators.toBytes(rowAggregators); Tuple tuple = wrapKeyValueAsResult(PhoenixKeyValueUtil.newKeyValue(currentKey, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, AGG_TIMESTAMP, value, 0, value.length)); currentKey.set(nextKey.get(), nextKey.getOffset(), nextKey.getLength()); return tuple; }
@Override public Aggregator[] aggregate(Tuple result) { Aggregator[] rowAggregators = aggregators.getAggregators(); aggregators.reset(rowAggregators); aggregators.aggregate(rowAggregators, result); return rowAggregators; } }
@Override public Aggregator[] aggregate(Tuple result) { Aggregator[] rowAggregators = aggregators.getAggregators(); aggregators.reset(rowAggregators); aggregators.aggregate(rowAggregators, result); return rowAggregators; }
@Override public Aggregator[] aggregate(Tuple result) { Aggregator[] rowAggregators = aggregators.getAggregators(); aggregators.reset(rowAggregators); aggregators.aggregate(rowAggregators, result); return rowAggregators; } }
@Override public Aggregator[] aggregate(Tuple result) { Aggregator[] rowAggregators = aggregators.getAggregators(); aggregators.reset(rowAggregators); aggregators.aggregate(rowAggregators, result); return rowAggregators; }
@Override public Aggregator[] aggregate(Tuple result) { Aggregator[] rowAggregators = aggregators.getAggregators(); aggregators.reset(rowAggregators); aggregators.aggregate(rowAggregators, result); return rowAggregators; }
indexMutations.clear(); aggregators.aggregate(rowAggregators, result); hasAny = true;
private HashMap<ImmutableBytesWritable, Aggregator[]> populateHash() throws SQLException { hash = new HashMap<ImmutableBytesWritable, Aggregator[]>(HASH_AGG_INIT_SIZE, 0.75f); final int aggSize = aggregators.getEstimatedByteSize(); long keySize = 0; for (Tuple result = resultIterator.next(); result != null; result = resultIterator.next()) { ImmutableBytesWritable key = new ImmutableBytesWritable(UNITIALIZED_KEY_BUFFER); key = getGroupingKey(result, key); Aggregator[] rowAggregators = hash.get(key); if (rowAggregators == null) { keySize += key.getSize(); long hashSize = SizedUtil.sizeOfMap(hash.size() + 1, SizedUtil.IMMUTABLE_BYTES_WRITABLE_SIZE, aggSize) + keySize; if (hashSize > memoryChunk.getSize() + CLIENT_HASH_AGG_MEMORY_CHUNK_SIZE) { // This will throw InsufficientMemoryException if necessary memoryChunk.resize(hashSize + CLIENT_HASH_AGG_MEMORY_CHUNK_SIZE); } rowAggregators = aggregators.newAggregators(); hash.put(key, rowAggregators); } aggregators.aggregate(rowAggregators, result); } return hash; }
rowAggregators = aggregate(previous); aggregators.aggregate(rowAggregators, current); traversedIterators = this.traversedIterator;
rowAggregators = aggregate(previous); aggregators.aggregate(rowAggregators, current); traversedIterators = this.traversedIterator;
@Override public Tuple next() throws SQLException { Tuple result = resultIterator.next(); if (result == null) { return null; } if (currentKey.get() == UNITIALIZED_KEY_BUFFER) { getGroupingKey(result, currentKey); } Aggregator[] rowAggregators = aggregators.getAggregators(); aggregators.reset(rowAggregators); while (true) { aggregators.aggregate(rowAggregators, result); Tuple nextResult = resultIterator.peek(); if (nextResult == null || !currentKey.equals(getGroupingKey(nextResult, nextKey))) { break; } result = resultIterator.next(); } byte[] value = aggregators.toBytes(rowAggregators); Tuple tuple = wrapKeyValueAsResult(PhoenixKeyValueUtil.newKeyValue(currentKey, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, AGG_TIMESTAMP, value, 0, value.length)); currentKey.set(nextKey.get(), nextKey.getOffset(), nextKey.getLength()); return tuple; }
@Override public Tuple next() throws SQLException { Tuple result = resultIterator.next(); if (result == null) { return null; } if (currentKey.get() == UNITIALIZED_KEY_BUFFER) { getGroupingKey(result, currentKey); } Aggregator[] rowAggregators = aggregators.getAggregators(); aggregators.reset(rowAggregators); while (true) { aggregators.aggregate(rowAggregators, result); Tuple nextResult = resultIterator.peek(); if (nextResult == null || !currentKey.equals(getGroupingKey(nextResult, nextKey))) { break; } result = resultIterator.next(); } byte[] value = aggregators.toBytes(rowAggregators); Tuple tuple = wrapKeyValueAsResult(PhoenixKeyValueUtil.newKeyValue(currentKey, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, AGG_TIMESTAMP, value, 0, value.length)); currentKey.set(nextKey.get(), nextKey.getOffset(), nextKey.getLength()); return tuple; }
indexMutations.clear(); aggregators.aggregate(rowAggregators, result); hasAny = true;
indexMutations.clear(); aggregators.aggregate(rowAggregators, result); hasAny = true;