@Override public int partition(IFrameTupleAccessor accessor, int tIndex, int nParts) throws HyracksDataException { return delegate.partition(accessor, tIndex, factor * nParts) / factor; } };
@Override public void open() throws HyracksDataException { tpc.initialize(); for (int i = 0; i < pWriters.length; ++i) { isOpen[i] = true; pWriters[i].open(); } }
@Override public int partition(IFrameTupleAccessor accessor, int tIndex, int nParts) throws HyracksDataException { return delegate.partition(accessor, tIndex, factor * nParts) / factor; } };
@Override public int findVictimPartition(IFrameTupleAccessor accessor, int tIndex) throws HyracksDataException { int entryInHashTable = tpc.partition(accessor, tIndex, tableSize); int partition = getPartition(entryInHashTable); return spillPolicy.selectVictimPartition(partition); } };
@Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { tupleAccessor.reset(buffer); int tupleCount = tupleAccessor.getTupleCount(); for (int i = 0; i < tupleCount; ++i) { int h = pWriters.length == 1 ? 0 : tpc.partition(tupleAccessor, i, pWriters.length); FrameUtils.appendToWriter(pWriters[h], appenders[h], tupleAccessor, i); } }
public void build(ByteBuffer buffer) throws HyracksDataException { accessorBuild.reset(buffer); int tupleCount = accessorBuild.getTupleCount(); for (int i = 0; i < tupleCount; ++i) { int pid = buildHpc.partition(accessorBuild, i, numOfPartitions); processTuple(i, pid); buildPSizeInTups[pid]++; } }
@Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { tupleAccessor.reset(buffer); int tupleCount = tupleAccessor.getTupleCount(); for (int i = 0; i < tupleCount; ++i) { int h = tpc.partition(tupleAccessor, i, consumerPartitionCount); if (!allocatedFrames[h]) { allocateFrames(h); } FrameUtils.appendToWriter(pWriters[h], appenders[h], tupleAccessor, i); } }
public void build(ByteBuffer buffer) throws HyracksDataException { buffers.add(buffer); int bIndex = buffers.size() - 1; accessorBuild.reset(buffer); int tCount = accessorBuild.getTupleCount(); for (int i = 0; i < tCount; ++i) { int entry = tpcBuild.partition(accessorBuild, i, table.getTableSize()); storedTuplePointer.reset(bIndex, i); // If an insertion fails, then tries to insert the same tuple pointer again after compacting the table. if (!table.insert(entry, storedTuplePointer)) { compactTableAndInsertAgain(entry, storedTuplePointer); } } }
/** * Updates the given Header to Content Frame Pointer after calculating the corresponding hash value from the * given tuple pointer. */ private void updateHeaderToContentPointerInHeaderFrame(ITuplePointerAccessor bufferAccessor, ITuplePartitionComputer tpc, TuplePointer hashedTuple, int newContentFrame, int newOffsetInContentFrame) throws HyracksDataException { // Finds the original hash value. We assume that bufferAccessor and tpc is already assigned. bufferAccessor.reset(hashedTuple); int entry = tpc.partition(bufferAccessor, hashedTuple.getTupleIndex(), tableSize); // Finds the location of the hash value in the header frame arrays. int headerFrameIndex = getHeaderFrameIndex(entry); int offsetInHeaderFrame = getHeaderFrameOffset(entry); IntSerDeBuffer headerFrame = headers[headerFrameIndex]; // Updates the hash value. headerFrame.writeInt(offsetInHeaderFrame, newContentFrame); headerFrame.writeInt(offsetInHeaderFrame + 1, newOffsetInContentFrame); }
@Override public boolean insert(IFrameTupleAccessor accessor, int tIndex) throws HyracksDataException { int entryInHashTable = tpc.partition(accessor, tIndex, tableSize); for (int i = 0; i < hashTableForTuplePointer.getTupleCount(entryInHashTable); i++) { hashTableForTuplePointer.getTuplePointer(entryInHashTable, i, pointer); bufferAccessor.reset(pointer); int c = ftpcInputCompareToAggregate.compare(accessor, tIndex, bufferAccessor); if (c == 0) { aggregateExistingTuple(accessor, tIndex, bufferAccessor, pointer.getTupleIndex()); return true; } } return insertNewAggregateEntry(entryInHashTable, accessor, tIndex); }
/** * Reads the given tuple from the probe side and joins it with tuples from the build side. * This method assumes that the accessorProbe is already set to the current probe frame. */ void join(int tid, IFrameWriter writer) throws HyracksDataException { boolean matchFound = false; if (isTableCapacityNotZero) { int entry = tpcProbe.partition(accessorProbe, tid, table.getTableSize()); int tupleCount = table.getTupleCount(entry); for (int i = 0; i < tupleCount; i++) { table.getTuplePointer(entry, i, storedTuplePointer); int bIndex = storedTuplePointer.getFrameIndex(); int tIndex = storedTuplePointer.getTupleIndex(); accessorBuild.reset(buffers.get(bIndex)); int c = tpComparator.compare(accessorProbe, tid, accessorBuild, tIndex); if (c == 0) { boolean predEval = evaluatePredicate(tid, tIndex); if (predEval) { matchFound = true; appendToResult(tid, tIndex, writer); } } } } if (!matchFound && isLeftOuter) { FrameUtils.appendConcatToWriter(writer, appender, accessorProbe, tid, missingTupleBuild.getFieldEndOffsets(), missingTupleBuild.getByteArray(), 0, missingTupleBuild.getSize()); } }
int pid = probeHpc.partition(accessorProbe, i, numOfPartitions);