@Override public int bytesRequired(ITupleReference tuple, int startField, int numFields) { int bytes = getNullFlagsBytes(tuple, startField, numFields) + getFieldSlotsBytes(tuple, startField, numFields); for (int i = startField; i < startField + numFields; i++) { bytes += tuple.getFieldLength(i); } return bytes; }
@Override public int getFieldLength(int fIdx) { int tupleIndex = getTupleIndex(fIdx); int fieldIndex = getFieldIndex(tupleIndex, fIdx); return tuples[tupleIndex].getFieldLength(fieldIndex); }
@Override protected int getFieldSlotsBytes(ITupleReference tuple) { int fieldSlotBytes = 0; for (int i = inputKeyFieldCount; i < inputTotalFieldCount; i++) { if (!typeTraits[i].isFixedLength()) { fieldSlotBytes += VarLenIntEncoderDecoder.getBytesRequired(tuple.getFieldLength(i)); } } return fieldSlotBytes; }
@Override public void write(DataOutput output, ITupleReference tuple) throws HyracksDataException { byte[] data = tuple.getFieldData(0); int start = tuple.getFieldStart(0); int len = tuple.getFieldLength(0); try { output.write(data, start, len); output.writeByte(newLine); } catch (Exception e) { throw HyracksDataException.create(e); } }
private void writePKValue(ByteBuffer buffer) { if (logSource == LogSource.LOCAL) { for (int i = 0; i < PKFieldCnt; i++) { buffer.put(PKValue.getFieldData(0), PKValue.getFieldStart(PKFields[i]), PKValue.getFieldLength(PKFields[i])); } } else { // since PKValue is already serialized in remote logs, just put it into buffer buffer.put(PKValue.getFieldData(0), 0, PKValueSize); } }
protected int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) { int fieldSlotBytes = 0; for (int i = startField; i < startField + numFields; i++) { if (!typeTraits[i].isFixedLength()) { fieldSlotBytes += VarLenIntEncoderDecoder.getBytesRequired(tuple.getFieldLength(i)); } } return fieldSlotBytes; }
public static Object[] deserializeTuple(ITupleReference tuple, ISerializerDeserializer[] fields) throws HyracksDataException { int numFields = Math.min(tuple.getFieldCount(), fields.length); Object[] objs = new Object[numFields]; for (int i = 0; i < numFields; i++) { ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i)); DataInput dataIn = new DataInputStream(inStream); objs[i] = fields[i].deserialize(dataIn); } return objs; }
private void appendPreviousMeta() throws IOException { // if has meta, then append meta if (hasMeta) { dos.write(prevTuple.getFieldData(metaFieldIndex), prevTuple.getFieldStart(metaFieldIndex), prevTuple.getFieldLength(metaFieldIndex)); tb.addFieldEndOffset(); } }
@Override public Feed getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException { byte[] serRecord = frameTuple.getFieldData(FEED_PAYLOAD_TUPLE_FIELD_INDEX); int recordStartOffset = frameTuple.getFieldStart(FEED_PAYLOAD_TUPLE_FIELD_INDEX); int recordLength = frameTuple.getFieldLength(FEED_PAYLOAD_TUPLE_FIELD_INDEX); ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength); DataInput in = new DataInputStream(stream); ARecord feedRecord = recordSerDes.deserialize(in); return createFeedFromARecord(feedRecord); }
public int fieldRangeCompare(ITupleReference tupleA, ITupleReference tupleB, int startFieldIndex, int numFields) throws HyracksDataException { for (int i = startFieldIndex; i < startFieldIndex + numFields; i++) { int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i), tupleB.getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i)); if (cmp != 0) { return cmp; } } return 0; }
@Override public FeedPolicyEntity getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException { byte[] serRecord = frameTuple.getFieldData(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX); int recordStartOffset = frameTuple.getFieldStart(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX); int recordLength = frameTuple.getFieldLength(FEED_POLICY_PAYLOAD_TUPLE_FIELD_INDEX); ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength); DataInput in = new DataInputStream(stream); ARecord feedPolicyRecord = recordSerDes.deserialize(in); return createFeedPolicyFromARecord(feedPolicyRecord); }
@Override public int compare(ITupleReference tupleA, ITupleReference tupleB) throws HyracksDataException { return cmp.compare(tupleA.getFieldData(0), tupleA.getFieldStart(0), tupleA.getFieldLength(0), tupleB.getFieldData(0), tupleB.getFieldStart(0), tupleB.getFieldLength(0)); } }
@Override public Dataset getMetadataEntityFromTuple(ITupleReference frameTuple) throws HyracksDataException { byte[] serRecord = frameTuple.getFieldData(DATASET_PAYLOAD_TUPLE_FIELD_INDEX); int recordStartOffset = frameTuple.getFieldStart(DATASET_PAYLOAD_TUPLE_FIELD_INDEX); int recordLength = frameTuple.getFieldLength(DATASET_PAYLOAD_TUPLE_FIELD_INDEX); ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength); DataInput in = new DataInputStream(stream); ARecord datasetRecord = recordSerDes.deserialize(in); return createDatasetFromARecord(datasetRecord); }
public int compare(ITupleReference tupleA, ITupleReference tupleB) throws HyracksDataException { for (int i = 0; i < cmps.length; i++) { int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i), tupleB.getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i)); if (cmp != 0) { return cmp; } } return 0; }
public int compare(ITupleReference tupleA, ITupleReference tupleB, int startFieldIndex) throws HyracksDataException { for (int i = 0; i < cmps.length; i++) { int ix = startFieldIndex + i; int cmp = cmps[i].compare(tupleA.getFieldData(ix), tupleA.getFieldStart(ix), tupleA.getFieldLength(ix), tupleB.getFieldData(ix), tupleB.getFieldStart(ix), tupleB.getFieldLength(ix)); if (cmp != 0) { return cmp; } } return 0; }
@Override public CompactionPolicy getMetadataEntityFromTuple(ITupleReference tuple) throws HyracksDataException { byte[] serRecord = tuple.getFieldData(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX); int recordStartOffset = tuple.getFieldStart(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX); int recordLength = tuple.getFieldLength(COMPACTION_POLICY_PAYLOAD_TUPLE_FIELD_INDEX); ByteArrayInputStream stream = new ByteArrayInputStream(serRecord, recordStartOffset, recordLength); DataInput in = new DataInputStream(stream); ARecord compactionPolicyRecord = recordSerDes.deserialize(in); return createCompactionPolicyFromARecord(compactionPolicyRecord); }
public void reset(ITupleReference inputTuple) { this.inputTuple = inputTuple; tokenizer.reset(inputTuple.getFieldData(DOC_FIELD_INDEX), inputTuple.getFieldStart(DOC_FIELD_INDEX), inputTuple.getFieldLength(DOC_FIELD_INDEX)); }
@Override public int bytesRequired(ITupleReference tuple) { int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple); for (int i = 0; i < tuple.getFieldCount(); i++) { bytes += tuple.getFieldLength(i); } return bytes; }
protected void saveLastTuple(ITupleReference tuple) throws HyracksDataException { lastTupleBuilder.reset(); for (int i = 0; i < tuple.getFieldCount(); i++) { lastTupleBuilder.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i)); } lastTuple.reset(lastTupleBuilder.getFieldEndOffsets(), lastTupleBuilder.getByteArray()); }
public static ITupleReference copyTuple(ITupleReference tuple) throws HyracksDataException { ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(tuple.getFieldCount()); for (int i = 0; i < tuple.getFieldCount(); i++) { tupleBuilder.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i)); } ArrayTupleReference tupleCopy = new ArrayTupleReference(); tupleCopy.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray()); return tupleCopy; }