int[] fieldPermutation = createFieldPermutationForBulkLoadOp(numNestedSecondaryKeyFields);
int numNestedSecondaryKeFieldsConsideringPointMBR =
isPointMBR ? numNestedSecondaryKeyFields / 2 : numNestedSecondaryKeyFields;
sourceOp = createCastOp(spec, dataset.getDatasetType(), index.isEnforced());
spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
AlgebricksMetaOperatorDescriptor asterixAssignOp = createAssignOp(spec,
numNestedSecondaryKeFieldsConsideringPointMBR, secondaryRecDescConsideringPointMBR);
selectOp = createFilterNullsSelectOp(spec, numNestedSecondaryKeFieldsConsideringPointMBR,
secondaryRecDescConsideringPointMBR);
ExternalSortOperatorDescriptor sortOp = createSortOp(spec,
new IBinaryComparatorFactory[] {
MetadataProvider.proposeLinearizer(keyType, secondaryComparatorFactories.length) },
isPointMBR ? secondaryRecDescForPointMBR : secondaryRecDesc);
TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoadOp = createTreeIndexBulkLoadOp(spec, fieldPermutation,
indexDataflowHelperFactory, GlobalConfig.DEFAULT_TREE_FILL_FACTOR);
SinkRuntimeFactory sinkRuntimeFactory = new SinkRuntimeFactory();
ExternalScanOperatorDescriptor primaryScanOp = createExternalIndexingOp(spec);
AbstractOperatorDescriptor sourceOp = primaryScanOp;
if (isOverridingKeyFieldTypes && !enforcedItemType.equals(itemType)) {
sourceOp = createCastOp(spec, dataset.getDatasetType(), index.isEnforced());
spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, sourceOp, 0);
AlgebricksMetaOperatorDescriptor asterixAssignOp = createExternalAssignOp(spec,