public static int[] createFilterFields(Dataset dataset) { if (dataset.getDatasetType() == DatasetType.EXTERNAL) { return null; } List<String> filterField = getFilterField(dataset); if (filterField == null) { return null; } List<List<String>> partitioningKeys = dataset.getPrimaryKeys(); int numKeys = partitioningKeys.size(); int[] filterFields = new int[1]; filterFields[0] = numKeys + 1; return filterFields; }
private static int[] getPrimaryKeyPermutationForUpsert(Dataset dataset) { // upsertIndicatorVar + prev record int f = 2; // add the previous meta second if (dataset.hasMetaPart()) { f++; } // add the previous filter third int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1; if (numFilterFields > 0) { f++; } int numPrimaryKeys = dataset.getPrimaryKeys().size(); int[] pkIndexes = new int[numPrimaryKeys]; for (int i = 0; i < pkIndexes.length; i++) { pkIndexes[i] = f; f++; } return pkIndexes; }
public static int[] createBTreeFieldsWhenThereisAFilter(Dataset dataset) { if (dataset.getDatasetType() == DatasetType.EXTERNAL) { return null; } List<String> filterField = getFilterField(dataset); if (filterField == null) { return null; } List<List<String>> partitioningKeys = dataset.getPrimaryKeys(); int valueFields = dataset.hasMetaPart() ? 2 : 1; int[] btreeFields = new int[partitioningKeys.size() + valueFields]; for (int i = 0; i < btreeFields.length; ++i) { btreeFields[i] = i; } return btreeFields; }
public static ITypeTraits[] computeFilterTypeTraits(Dataset dataset, ARecordType itemType) throws AlgebricksException { if (dataset.getDatasetType() == DatasetType.EXTERNAL) { return null; } List<String> filterField = getFilterField(dataset); if (filterField == null) { return null; } ITypeTraits[] typeTraits = new ITypeTraits[1]; IAType type = itemType.getSubFieldType(filterField); typeTraits[0] = TypeTraitProvider.INSTANCE.getTypeTrait(type); return typeTraits; }
public static IBinaryComparatorFactory[] computeFilterBinaryComparatorFactories(Dataset dataset, ARecordType itemType, IBinaryComparatorFactoryProvider comparatorFactoryProvider) throws AlgebricksException { if (dataset.getDatasetType() == DatasetType.EXTERNAL) { return null; } List<String> filterField = getFilterField(dataset); if (filterField == null) { return null; } IBinaryComparatorFactory[] bcfs = new IBinaryComparatorFactory[1]; IAType type = itemType.getSubFieldType(filterField); bcfs[0] = comparatorFactoryProvider.getBinaryComparatorFactory(type, true); return bcfs; }
int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1; int numOfAdditionalFields = additionalNonFilterFields == null ? 0 : additionalNonFilterFields.size();
int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
ARecordType recType = null; if (dataset != null && dataset.getDatasetType() == DatasetType.INTERNAL) { filterFieldName = DatasetUtil.getFilterField(dataset); IAType itemType = ((MetadataProvider) context.getMetadataProvider()) .findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataverseName, datasetName); int numKeys = keys.size(); int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
List<String> additionalFilteringField = DatasetUtil.getFilterField(targetDatasource.getDataset()); List<LogicalVariable> additionalFilteringVars; List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions;
numPrimaryKeys = dataset.getPrimaryKeys().size(); if (dataset.getDatasetType() == DatasetType.INTERNAL) { filterFieldName = DatasetUtil.getFilterField(dataset); if (filterFieldName != null) { numFilterFields = 1;
List<String> additionalFilteringField = DatasetUtil.getFilterField(targetDatasource.getDataset()); List<LogicalVariable> additionalFilteringVars; List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions;
Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataverseName, datasetName); int numKeys = primaryKeys.size() + secondaryKeys.size(); int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
int numKeys = numSecondaryKeys + numPrimaryKeys; int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1; int[] fieldPermutation = new int[numKeys + numFilterFields]; int[] modificationCallbackPrimaryKeyFields = new int[primaryKeys.size()];
MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataSource.getId().getDataverseName(), datasetName); int numKeys = keys.size(); int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
List<String> filterFieldName = DatasetUtil.getFilterField(dataset); int numPrimaryKeys = primaryKeys.size(); int numSecondaryKeys = secondaryKeys.size();
int[] fieldPermutation, IMissingWriterFactory missingWriterFactory) throws AlgebricksException { int numKeys = dataset.getPrimaryKeys().size(); int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1; ARecordType itemType = (ARecordType) metadataProvider.findType(dataset); ARecordType metaItemType = (ARecordType) metadataProvider.findMetaType(dataset); String filterField = DatasetUtil.getFilterField(dataset).get(0); String[] fieldNames = itemType.getFieldNames(); int i = 0;
String filterField = DatasetUtil.getFilterField(dataset).get(0); String[] fieldNames = itemType.getFieldNames(); int i = 0;