@Override public int getFieldCount() { return recordDescriptor.getFieldCount(); } }
@Override public int getFieldSlotsLength() { return recordDescriptor.getFieldCount() * 4; }
@Override public int getFieldCount() { return recordDescriptor.getFieldCount(); }
private static int calculateMinFreeSpace(RecordDescriptor recordDescriptor) { // + 4 for the tuple offset return recordDescriptor.getFieldCount() * 4 + 4; }
@Override public int getFieldCount() { return recordDescriptor.getFieldCount(); }
@Override public int getFieldCount() { return recordDescriptor.getFieldCount(); }
public int getOutputWidth() { return recordDescriptors[recordDescriptors.length - 1].getFieldCount(); }
public LSMSecondaryIndexCreationTupleProcessorNodePushable(IHyracksTaskContext ctx, int partition, RecordDescriptor inputRecDesc, IMissingWriterFactory missingWriterFactory, int numTagFields, int numSecondaryKeys, int numPrimaryKeys, boolean hasBuddyBTree) throws HyracksDataException { super(ctx, partition, inputRecDesc, numTagFields, numSecondaryKeys, numPrimaryKeys, hasBuddyBTree); this.prevMatterTupleBuilder = new ArrayTupleBuilder(inputRecDesc.getFieldCount()); if (this.hasBuddyBTree) { missingWriter = missingWriterFactory.createMissingWriter(); } else { missingWriter = null; } }
private TupleOuterProduct(RecordDescriptor recordDescriptor, IMissingWriter[] missingWriters) { ta = new FrameTupleAccessor(recordDescriptor); tb = new ArrayTupleBuilder( missingWriters.length + SubplanRuntimeFactory.this.inputRecordDesc.getFieldCount()); this.missingWriters = missingWriters; }
public SerializingDataWriter(IHyracksTaskContext ctx, RecordDescriptor recordDescriptor, IFrameWriter frameWriter) throws HyracksDataException { tb = new ArrayTupleBuilder(recordDescriptor.getFieldCount()); this.recordDescriptor = recordDescriptor; this.frameWriter = frameWriter; tupleAppender = new FrameTupleAppender(new VSizeFrame(ctx)); open = false; }
@Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { try { frameTupleAccessor.reset(buffer); for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) { int start = frameTupleAccessor.getTupleStartOffset(tIndex) + frameTupleAccessor.getFieldSlotsLength(); bbis.setByteBuffer(buffer, start); Object[] record = new Object[recordDescriptor.getFieldCount()]; for (int i = 0; i < record.length; ++i) { Object instance = recordDescriptor.getFields()[i].deserialize(di); if (i == 0) { out.write(String.valueOf(instance)); } else { out.write(delim + String.valueOf(instance)); } } out.write("\n"); } } catch (IOException ex) { throw HyracksDataException.create(ex); } }
public Object[] deserializeRecord() throws HyracksDataException { int start = frameTupleAccessor.getTupleStartOffset(tIndex) + frameTupleAccessor.getFieldSlotsLength(); bbis.setByteBuffer(buffer, start); Object[] record = new Object[recordDescriptor.getFieldCount()]; for (int i = 0; i < record.length; ++i) { Object instance = recordDescriptor.getFields()[i].deserialize(di); if (LOGGER.isTraceEnabled()) { LOGGER.trace(i + " " + LogRedactionUtil.userData(instance.toString())); } record[i] = instance; if (FrameConstants.DEBUG_FRAME_IO) { try { if (di.readInt() != FrameConstants.FRAME_FIELD_MAGIC) { throw new HyracksDataException("Field magic mismatch"); } } catch (IOException e) { e.printStackTrace(); } } } if (LOGGER.isTraceEnabled()) { LOGGER.trace("Read Record tIndex = " + tIndex + ", tupleCount = " + tupleCount); } ++tIndex; return record; }
private void writeMatterTuple(ITupleReference tuple) throws HyracksDataException { // simply output the original tuple to the writer TupleUtils.copyTuple(tb, tuple, recordDesc.getFieldCount()); FrameUtils.appendToWriter(writer, appender, tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize()); }
protected final void initAccessAppendFieldRef(IHyracksTaskContext ctx) throws HyracksDataException { frame = new VSizeFrame(ctx); appender = new FrameFixedFieldTupleAppender(inputRecordDesc.getFieldCount()); appender.reset(frame, true); tAccess = new FrameTupleAccessor(inputRecordDesc); tRef = new FrameTupleReference(); }
@Override public void open() throws HyracksDataException { writer.open(); accessor = new FrameTupleAccessor(inputRecDesc); builder = new ArrayTupleBuilder(outputRecDesc.getFieldCount()); builderData = builder.getFieldData(); appender = new FrameTupleAppender(new VSizeFrame(ctx), true); }
@Override public void open() throws HyracksDataException { super.open(); deletedTupleCounter = new DeletedTupleCounter(ctx.getJobletContext().getJobId(), partition); ctx.setStateObject(deletedTupleCounter); try { tb = new ArrayTupleBuilder(recordDesc.getFieldCount()); dos = tb.getDataOutput(); appender = new FrameTupleAppender(new VSizeFrame(ctx), true); } catch (Exception e) { throw HyracksDataException.create(e); } }
public LSMSecondaryIndexBulkLoadNodePushable(IHyracksTaskContext ctx, int partition, RecordDescriptor inputRecDesc, IIndexDataflowHelperFactory primaryIndexHelperFactory, IIndexDataflowHelperFactory secondaryIndexHelperFactory, int[] fieldPermutation, int numTagFields, int numSecondaryKeys, int numPrimaryKeys, boolean hasBuddyBTree) throws HyracksDataException { super(ctx, partition, inputRecDesc, numTagFields, numSecondaryKeys, numPrimaryKeys, hasBuddyBTree); this.primaryIndexHelper = primaryIndexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.secondaryIndexHelper = secondaryIndexHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition); this.tuple = new PermutingFrameTupleReference(fieldPermutation); int[] sourcePermutation = new int[fieldPermutation.length - numTagFields]; for (int i = 0; i < sourcePermutation.length; i++) { sourcePermutation[i] = i + numTagFields; } sourceTuple = new PermutingTupleReference(sourcePermutation); int[] deletedKeyPermutation = new int[inputRecDesc.getFieldCount() - numTagFields - numSecondaryKeys]; for (int i = 0; i < deletedKeyPermutation.length; i++) { deletedKeyPermutation[i] = i + numTagFields + numSecondaryKeys; } deletedKeyTuple = new PermutingTupleReference(deletedKeyPermutation); }
@Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { accessor.reset(buffer); int tupleCount = accessor.getTupleCount(); for (int i = 0; i < tupleCount; i++) { try { // if both previous value and new value are null, then we skip tuple.reset(accessor, i); if (isAntiMatterTuple(tuple)) { processAntiMatterTuple(tuple); hasPrevMatterTuple = false; } else { processMatterTuple(tuple); // save the matter tuple TupleUtils.copyTuple(prevMatterTupleBuilder, tuple, recordDesc.getFieldCount()); prevMatterTuple.reset(prevMatterTupleBuilder.getFieldEndOffsets(), prevMatterTupleBuilder.getByteArray()); hasPrevMatterTuple = true; } } catch (Exception e) { throw HyracksDataException.create(e); } } }
@Override public void initialize() throws HyracksDataException { writer.open(); try { for (int i = 0; i < numRecords; i++) { tb.reset(); for (int j = 0; j < recDesc.getFieldCount(); j++) { genField(tb, j); } if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) { appender.flush(writer, true); if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) { throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size (" + appender.getBuffer().capacity() + ")"); } } } appender.flush(writer, true); } catch (Exception e) { writer.fail(); throw new HyracksDataException(e); } finally { writer.close(); } }
private void writeAntiMatterTuple(ITupleReference tuple, int componentPos) throws HyracksDataException { deletedTupleCounter.inc(componentPos); tb.reset(); // write tag fields tb.addField(IntegerSerializerDeserializer.INSTANCE, componentPos); tb.addField(BooleanSerializerDeserializer.INSTANCE, true); if (hasBuddyBTree) { // the output tuple does not have secondary keys (only primary keys + filter values) // write secondary keys (missing) for (int i = 0; i < numSecondaryKeys; i++) { missingWriter.writeMissing(dos); tb.addFieldEndOffset(); } } else { for (int i = numTagFields; i < numTagFields + numSecondaryKeys; i++) { tb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i)); } } // write all remaining fields for (int i = numTagFields + numSecondaryKeys; i < recordDesc.getFieldCount(); i++) { tb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i)); } FrameUtils.appendToWriter(writer, appender, tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize()); }