@Override public int getFieldCount() { return recordDescriptor.getFieldCount(); } }
public static RecordDescriptor createFileIndexRecordDescriptor() { return new RecordDescriptor(createExternalFileTupleFieldsSerdes(), EXTERNAL_FILE_INDEX_TYPE_TRAITS); }
protected RecordDescriptor getTaggedRecordDescriptor(RecordDescriptor recDescriptor) { ISerializerDeserializer[] fields = new ISerializerDeserializer[recDescriptor.getFields().length + NUM_TAG_FIELDS]; ITypeTraits[] traits = null; if (recDescriptor.getTypeTraits() != null) { traits = new ITypeTraits[recDescriptor.getTypeTraits().length + NUM_TAG_FIELDS]; } //component position field fields[COMPONENT_POS_OFFSET] = IntegerSerializerDeserializer.INSTANCE; if (traits != null) { traits[COMPONENT_POS_OFFSET] = IntegerPointable.TYPE_TRAITS; } //anti-matter field fields[ANTI_MATTER_OFFSET] = BooleanSerializerDeserializer.INSTANCE; if (traits != null) { traits[ANTI_MATTER_OFFSET] = BooleanPointable.TYPE_TRAITS; } for (int i = NUM_TAG_FIELDS; i < fields.length; i++) { fields[i] = recDescriptor.getFields()[i - NUM_TAG_FIELDS]; if (traits != null && i < traits.length) { traits[i] = recDescriptor.getTypeTraits()[i - NUM_TAG_FIELDS]; } } return new RecordDescriptor(fields, traits); }
@Override public void nextFrame(ByteBuffer buffer) throws HyracksDataException { try { frameTupleAccessor.reset(buffer); for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) { int start = frameTupleAccessor.getTupleStartOffset(tIndex) + frameTupleAccessor.getFieldSlotsLength(); bbis.setByteBuffer(buffer, start); Object[] record = new Object[recordDescriptor.getFieldCount()]; for (int i = 0; i < record.length; ++i) { Object instance = recordDescriptor.getFields()[i].deserialize(di); if (i == 0) { out.write(String.valueOf(instance)); } else { out.write(delim + String.valueOf(instance)); } } out.write("\n"); } } catch (IOException ex) { throw HyracksDataException.create(ex); } }
for (int k = 0; k < recordDesc.getFieldCount(); k++) { tokenKeyPairFields[k] = recordDesc.getFields()[k]; tokenKeyPairTypeTraits[k] = recordDesc.getTypeTraits()[k]; int tokenOffset = recordDesc.getFieldCount(); RecordDescriptor tokenKeyPairRecDesc = new RecordDescriptor(tokenKeyPairFields, tokenKeyPairTypeTraits); IOperatorDescriptor tokenizerOp;
private void appendUpsertIndicator(boolean isUpsert) throws IOException { recordDesc.getFields()[0].serialize(isUpsert ? ABoolean.TRUE : ABoolean.FALSE, dos); tb.addFieldEndOffset(); }
new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), splitsAndConstraint.first); LSMPrimaryUpsertOperatorDescriptor op; ITypeTraits[] outputTypeTraits = new ITypeTraits[inputRecordDesc.getFieldCount() + 1 + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields]; ISerializerDeserializer<?>[] outputSerDes = new ISerializerDeserializer[inputRecordDesc.getFieldCount() + 1 + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields]; IDataFormat dataFormat = metadataProvider.getDataFormat(); f++; for (int j = 0; j < inputRecordDesc.getFieldCount(); j++) { outputTypeTraits[j + f] = inputRecordDesc.getTypeTraits()[j]; outputSerDes[j + f] = inputRecordDesc.getFields()[j]; RecordDescriptor outputRecordDesc = new RecordDescriptor(outputSerDes, outputTypeTraits); op = new LSMPrimaryUpsertOperatorDescriptor(spec, outputRecordDesc, fieldPermutation, idfh, missingWriterFactory, modificationCallbackFactory, searchCallbackFactory,
@Override public boolean read(Object[] record) throws Exception { in.mark(1); if (in.read() < 0) { return false; } in.reset(); for (int i = 0; i < record.length; ++i) { record[i] = recordDesc.getFields()[i].deserialize(in); } return true; }
public Object[] deserializeRecord() throws HyracksDataException { int start = frameTupleAccessor.getTupleStartOffset(tIndex) + frameTupleAccessor.getFieldSlotsLength(); bbis.setByteBuffer(buffer, start); Object[] record = new Object[recordDescriptor.getFieldCount()]; for (int i = 0; i < record.length; ++i) { Object instance = recordDescriptor.getFields()[i].deserialize(di); if (LOGGER.isTraceEnabled()) { LOGGER.trace(i + " " + LogRedactionUtil.userData(instance.toString())); } record[i] = instance; if (FrameConstants.DEBUG_FRAME_IO) { try { if (di.readInt() != FrameConstants.FRAME_FIELD_MAGIC) { throw new HyracksDataException("Field magic mismatch"); } } catch (IOException e) { e.printStackTrace(); } } } if (LOGGER.isTraceEnabled()) { LOGGER.trace("Read Record tIndex = " + tIndex + ", tupleCount = " + tupleCount); } ++tIndex; return record; }
@Override public int getFieldCount() { return recordDescriptor.getFieldCount(); }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { EmptyTupleSourceRuntimeFactory runtime = new EmptyTupleSourceRuntimeFactory(); runtime.setSourceLocation(op.getSourceLocation()); RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {}); builder.contributeMicroOperator(op, runtime, recDesc); }
private RecordDescriptor getUpsertOutRecDesc(RecordDescriptor inputRecordDesc, Dataset dataset, int numFilterFields, ARecordType itemType, ARecordType metaItemType) throws Exception { ITypeTraits[] outputTypeTraits = new ITypeTraits[inputRecordDesc.getFieldCount() + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields]; ISerializerDeserializer<?>[] outputSerDes = new ISerializerDeserializer[inputRecordDesc.getFieldCount() + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields]; f++; for (int j = 0; j < inputRecordDesc.getFieldCount(); j++) { outputTypeTraits[j + f] = inputRecordDesc.getTypeTraits()[j]; outputSerDes[j + f] = inputRecordDesc.getFields()[j]; return new RecordDescriptor(outputSerDes, outputTypeTraits);
secondaryRecFields[numNestedSecondaryKeyFields + i] = primaryRecDesc.getFields()[i]; secondaryTypeTraits[numNestedSecondaryKeyFields + i] = primaryRecDesc.getTypeTraits()[i]; enforcedRecFields[i] = primaryRecDesc.getFields()[i]; enforcedTypeTraits[i] = primaryRecDesc.getTypeTraits()[i]; enforcedRecDesc = new RecordDescriptor(enforcedRecFields, enforcedTypeTraits); if (numFilterFields > 0) { rtreeFields = new int[numNestedSecondaryKeyFields + numPrimaryKeys]; secondaryRecFields[numPrimaryKeys + numNestedSecondaryKeyFields] = serde; secondaryRecDesc = new RecordDescriptor(secondaryRecFields); primaryKeyFields = new int[numPrimaryKeys]; for (int i = 0; i < primaryKeyFields.length; i++) { recFieldsForPointMBR[idx++] = secondaryRecFields[numNestedSecondaryKeyFields + i]; secondaryRecDescForPointMBR = new RecordDescriptor(recFieldsForPointMBR);
/** * Debugging method * * @param tuple * @param fieldsIdx * @param descIdx * @throws HyracksDataException */ public static void prettyPrint(IFrameTupleAccessor fta, RecordDescriptor recordDescriptor, ITupleReference tuple, int fieldsIdx, int descIdx) throws HyracksDataException { try (ByteBufferInputStream bbis = new ByteBufferInputStream(); DataInputStream dis = new DataInputStream(bbis)) { StringBuilder sb = new StringBuilder(); sb.append("["); sb.append("f" + fieldsIdx + ":(" + tuple.getFieldStart(fieldsIdx) + ", " + (tuple.getFieldLength(fieldsIdx) + tuple.getFieldStart(fieldsIdx)) + ") "); sb.append("{"); ByteBuffer bytebuff = ByteBuffer.wrap(tuple.getFieldData(fieldsIdx)); bbis.setByteBuffer(bytebuff, tuple.getFieldStart(fieldsIdx)); sb.append(recordDescriptor.getFields()[descIdx].deserialize(dis)); sb.append("}"); sb.append("\n"); System.err.println(sb.toString()); } catch (IOException e) { e.printStackTrace(); } }
@Override public int getFieldCount() { return recordDescriptor.getFieldCount(); }
public IRecordDescriptorProvider getSearchRecordDescriptorProvider() { ITypeTraits[] primaryKeyTypeTraits = new ITypeTraits[primaryKeyTypes.length]; ISerializerDeserializer<?>[] primaryKeySerdes = new ISerializerDeserializer<?>[primaryKeyTypes.length]; for (int i = 0; i < primaryKeyTypes.length; i++) { primaryKeyTypeTraits[i] = TypeTraitProvider.INSTANCE.getTypeTrait(primaryKeyTypes[i]); primaryKeySerdes[i] = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(primaryKeyTypes[i]); } RecordDescriptor searcgRecDesc = new RecordDescriptor(primaryKeySerdes, primaryKeyTypeTraits); IRecordDescriptorProvider rDescProvider = Mockito.mock(IRecordDescriptorProvider.class); Mockito.when(rDescProvider.getInputRecordDescriptor(Mockito.any(), Mockito.anyInt())) .thenReturn(searcgRecDesc); return rDescProvider; }
secondaryRecFields[numNestedSecondaryKeyFields + i] = primaryRecDesc.getFields()[i]; secondaryTypeTraits[numNestedSecondaryKeyFields + i] = primaryRecDesc.getTypeTraits()[i]; enforcedRecFields[i] = primaryRecDesc.getFields()[i]; enforcedTypeTraits[i] = primaryRecDesc.getTypeTraits()[i]; enforcedRecDesc = new RecordDescriptor(enforcedRecFields, enforcedTypeTraits); if (numFilterFields > 0) { rtreeFields = new int[numNestedSecondaryKeyFields + numPrimaryKeys]; secondaryRecFields[numPrimaryKeys + numNestedSecondaryKeyFields] = serde; secondaryRecDesc = new RecordDescriptor(secondaryRecFields); primaryKeyFields = new int[numPrimaryKeys]; for (int i = 0; i < primaryKeyFields.length; i++) { recFieldsForPointMBR[idx++] = secondaryRecFields[numNestedSecondaryKeyFields + i]; secondaryRecDescForPointMBR = new RecordDescriptor(recFieldsForPointMBR);
/** * Debugging method * * @param tuple * @param descF * @throws HyracksDataException */ public static void prettyPrint(IFrameTupleAccessor fta, RecordDescriptor recordDescriptor, ITupleReference tuple, int[] descF) throws HyracksDataException { try (ByteBufferInputStream bbis = new ByteBufferInputStream(); DataInputStream dis = new DataInputStream(bbis)) { StringBuilder sb = new StringBuilder(); sb.append("["); for (int j = 0; j < descF.length; ++j) { sb.append("f" + j + ":(" + tuple.getFieldStart(j) + ", " + (tuple.getFieldLength(j) + tuple.getFieldStart(j)) + ") "); sb.append("{"); ByteBuffer bytebuff = ByteBuffer.wrap(tuple.getFieldData(j)); bbis.setByteBuffer(bytebuff, tuple.getFieldStart(j)); sb.append(recordDescriptor.getFields()[descF[j]].deserialize(dis)); sb.append("}"); } sb.append("\n"); System.err.println(sb.toString()); } catch (IOException e) { e.printStackTrace(); } }
@Override public int getFieldSlotsLength() { return recordDescriptor.getFieldCount() * 4; }
public RecordDescriptor getSearchOutputDesc(IAType[] keyTypes, ARecordType recordType, ARecordType metaType) { int primaryIndexNumOfTupleFields = keyTypes.length + (1 + ((metaType == null) ? 0 : 1)); ITypeTraits[] primaryIndexTypeTraits = createPrimaryIndexTypeTraits(primaryIndexNumOfTupleFields, keyTypes, recordType, metaType); ISerializerDeserializer<?>[] primaryIndexSerdes = createPrimaryIndexSerdes(primaryIndexNumOfTupleFields, keyTypes, recordType, metaType); return new RecordDescriptor(primaryIndexSerdes, primaryIndexTypeTraits); }