@Override public String getId() { return index.getIndexName(); }
public Index dropIndex(Index index) { synchronized (indexes) { Map<String, Map<String, Index>> datasetMap = indexes.get(index.getDataverseName()); if (datasetMap == null) { return null; } Map<String, Index> indexMap = datasetMap.get(index.getDatasetName()); if (indexMap == null) { return null; } return indexMap.remove(index.getIndexName()); } }
private Index addIndexIfNotExistsInternal(Index index) { Map<String, Map<String, Index>> datasetMap = indexes.get(index.getDataverseName()); if (datasetMap == null) { datasetMap = new HashMap<>(); indexes.put(index.getDataverseName(), datasetMap); } Map<String, Index> indexMap = datasetMap.get(index.getDatasetName()); if (indexMap == null) { indexMap = new HashMap<>(); datasetMap.put(index.getDatasetName(), indexMap); } if (!indexMap.containsKey(index.getIndexName())) { return indexMap.put(index.getIndexName(), index); } return null; }
public static boolean isFileIndex(Index index) { return index.getIndexName().equals(IndexingConstants.getFilesIndexName(index.getDatasetName())); } }
@Override public boolean equals(Object other) { if (!(other instanceof Index)) { return false; } Index otherIndex = (Index) other; if (!indexName.equals(otherIndex.getIndexName())) { return false; } if (!datasetName.equals(otherIndex.getDatasetName())) { return false; } if (!dataverseName.equals(otherIndex.getDataverseName())) { return false; } return true; }
int result = indexName.compareTo(otherIndex.getIndexName()); if (result != 0) { return result;
private void ensureNonPrimaryIndexDrop(Index index, SourceLocation sourceLoc) throws AlgebricksException { if (index.isPrimaryIndex()) { throw new MetadataException(ErrorCode.CANNOT_DROP_INDEX, sourceLoc, index.getIndexName(), index.getDatasetName()); } }
private static int[] getBloomFilterFields(Dataset dataset, Index index) throws AlgebricksException { if (index.isPrimaryIndex()) { return dataset.getPrimaryBloomFilterFields(); } else if (dataset.getDatasetType() == DatasetType.EXTERNAL) { if (index.getIndexName().equals(IndexingConstants.getFilesIndexName(dataset.getDatasetName()))) { return FilesIndexDescription.BLOOM_FILTER_FIELDS; } else { return new int[] { index.getKeyFieldNames().size() }; } } int numKeys = index.getKeyFieldNames().size(); int[] bloomFilterKeyFields = new int[numKeys]; for (int i = 0; i < numKeys; i++) { bloomFilterKeyFields[i] = i; } return bloomFilterKeyFields; } }
@Override public void addIndex(TxnId txnId, Index index) throws AlgebricksException { try { IndexTupleTranslator tupleWriter = tupleTranslatorProvider.getIndexTupleTranslator(txnId, this, true); ITupleReference tuple = tupleWriter.getTupleFromMetadataEntity(index); insertTupleIntoIndex(txnId, MetadataPrimaryIndexes.INDEX_DATASET, tuple); } catch (HyracksDataException e) { if (e.getComponent().equals(ErrorCode.HYRACKS) && e.getErrorCode() == ErrorCode.DUPLICATE_KEY) { throw new AlgebricksException("An index with name '" + index.getIndexName() + "' already exists.", e); } else { throw new AlgebricksException(e); } } }
return primaryTypeTraits; } else if (dataset.getDatasetType() == DatasetType.EXTERNAL && index.getIndexName().equals(IndexingConstants.getFilesIndexName(dataset.getDatasetName()))) { return FilesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS;
public static JobSpecification buildRecoverOp(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider) throws AlgebricksException { JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); IStorageManager storageMgr = metadataProvider.getStorageComponentProvider().getStorageManager(); ArrayList<IIndexDataflowHelperFactory> treeDataflowHelperFactories = new ArrayList<>(); AlgebricksPartitionConstraint constraints = null; for (Index index : indexes) { IFileSplitProvider indexSplitProvider; if (isValidIndexName(index.getDatasetName(), index.getIndexName())) { Pair<IFileSplitProvider, AlgebricksPartitionConstraint> sAndConstraints = metadataProvider.getSplitProviderAndConstraints(ds, index.getIndexName()); indexSplitProvider = sAndConstraints.first; constraints = sAndConstraints.second; } else { indexSplitProvider = metadataProvider.getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName())).first; } IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory(storageMgr, indexSplitProvider); treeDataflowHelperFactories.add(indexDataflowHelperFactory); } ExternalDatasetIndexesRecoverOperatorDescriptor op = new ExternalDatasetIndexesRecoverOperatorDescriptor(spec, treeDataflowHelperFactories); spec.addRoot(op); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op, constraints); spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy()); return spec; }
public static JobSpecification buildCommitJob(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider) throws AlgebricksException { JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); IStorageManager storageMgr = metadataProvider.getStorageComponentProvider().getStorageManager(); ArrayList<IIndexDataflowHelperFactory> treeDataflowHelperFactories = new ArrayList<>(); AlgebricksPartitionConstraint constraints = null; for (Index index : indexes) { IFileSplitProvider indexSplitProvider; if (isValidIndexName(index.getDatasetName(), index.getIndexName())) { Pair<IFileSplitProvider, AlgebricksPartitionConstraint> sAndConstraints = metadataProvider.getSplitProviderAndConstraints(ds, index.getIndexName()); indexSplitProvider = sAndConstraints.first; constraints = sAndConstraints.second; } else { indexSplitProvider = metadataProvider.getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName())).first; } IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory(storageMgr, indexSplitProvider); treeDataflowHelperFactories.add(indexDataflowHelperFactory); } ExternalDatasetIndexesCommitOperatorDescriptor op = new ExternalDatasetIndexesCommitOperatorDescriptor(spec, treeDataflowHelperFactories); spec.addRoot(op); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op, constraints); spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy()); return spec; }
public static JobSpecification buildAbortOp(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider) throws AlgebricksException { JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); IStorageManager storageMgr = metadataProvider.getStorageComponentProvider().getStorageManager(); ArrayList<IIndexDataflowHelperFactory> treeDataflowHelperFactories = new ArrayList<>(); AlgebricksPartitionConstraint constraints = null; for (Index index : indexes) { IFileSplitProvider indexSplitProvider; if (isValidIndexName(index.getDatasetName(), index.getIndexName())) { Pair<IFileSplitProvider, AlgebricksPartitionConstraint> sAndConstraints = metadataProvider.getSplitProviderAndConstraints(ds, index.getIndexName()); indexSplitProvider = sAndConstraints.first; constraints = sAndConstraints.second; } else { indexSplitProvider = metadataProvider.getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName())).first; } IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory(storageMgr, indexSplitProvider); treeDataflowHelperFactories.add(indexDataflowHelperFactory); } ExternalDatasetIndexesAbortOperatorDescriptor op = new ExternalDatasetIndexesAbortOperatorDescriptor(spec, treeDataflowHelperFactories); spec.addRoot(op); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op, constraints); spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy()); return spec; }
private static int[] getBloomFilterFields(Dataset dataset, Index index) throws AlgebricksException { if (index.isPrimaryIndex()) { return dataset.getPrimaryBloomFilterFields(); } else if (dataset.getDatasetType() == DatasetType.EXTERNAL) { if (index.getIndexName().equals(IndexingConstants.getFilesIndexName(dataset.getDatasetName()))) { return FilesIndexDescription.BLOOM_FILTER_FIELDS; } else { return new int[] { index.getKeyFieldNames().size() }; } } int numKeys = index.getKeyFieldNames().size(); int[] bloomFilterKeyFields = new int[numKeys]; for (int i = 0; i < numKeys; i++) { bloomFilterKeyFields[i] = i; } return bloomFilterKeyFields; } }
@Override public JobSpecification buildDropJobSpec(Set<DropOption> options) throws AlgebricksException { JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset, index.getIndexName()); IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory( metadataProvider.getStorageComponentProvider().getStorageManager(), splitsAndConstraint.first); // The index drop operation should be persistent regardless of temp datasets or permanent dataset. IndexDropOperatorDescriptor btreeDrop = new IndexDropOperatorDescriptor(spec, dataflowHelperFactory, options); btreeDrop.setSourceLocation(sourceLoc); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, btreeDrop, splitsAndConstraint.second); spec.addRoot(btreeDrop); return spec; }
public SecondaryIndexInfo(PrimaryIndexInfo primaryIndexInfo, Index secondaryIndex) { this.primaryIndexInfo = primaryIndexInfo; this.secondaryIndex = secondaryIndex; List<String> nodes = Collections.singletonList(ExecutionTestUtil.integrationUtil.ncs[0].getId()); CcApplicationContext appCtx = (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(); FileSplit[] splits = SplitsAndConstraintsUtil.getIndexSplits(appCtx.getClusterStateManager(), primaryIndexInfo.dataset, secondaryIndex.getIndexName(), nodes); fileSplitProvider = new ConstantFileSplitProvider(splits); secondaryIndexTypeTraits = createSecondaryIndexTypeTraits(primaryIndexInfo.recordType, primaryIndexInfo.metaType, primaryIndexInfo.primaryKeyTypes, secondaryIndex.getKeyFieldTypes().toArray(new IAType[secondaryIndex.getKeyFieldTypes().size()])); secondaryIndexSerdes = createSecondaryIndexSerdes(primaryIndexInfo.recordType, primaryIndexInfo.metaType, primaryIndexInfo.primaryKeyTypes, secondaryIndex.getKeyFieldTypes().toArray(new IAType[secondaryIndex.getKeyFieldTypes().size()])); rDesc = new RecordDescriptor(secondaryIndexSerdes, secondaryIndexTypeTraits); insertFieldsPermutations = new int[secondaryIndexTypeTraits.length]; for (int i = 0; i < insertFieldsPermutations.length; i++) { insertFieldsPermutations[i] = i; } primaryKeyIndexes = new int[primaryIndexInfo.primaryKeyIndexes.length]; for (int i = 0; i < primaryKeyIndexes.length; i++) { primaryKeyIndexes[i] = i + secondaryIndex.getKeyFieldNames().size(); } }
metaType == null ? null : SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(metaType); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset, index.getIndexName()); secondaryFileSplitProvider = secondarySplitsAndConstraint.first; secondaryPartitionConstraint = secondarySplitsAndConstraint.second;
@Override public JobSpecification buildCompactJobSpec() throws AlgebricksException { JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset, index.getIndexName()); IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory( metadataProvider.getStorageComponentProvider().getStorageManager(), splitsAndConstraint.first); LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec, dataflowHelperFactory); compactOp.setSourceLocation(sourceLoc); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp, secondaryPartitionConstraint); spec.addRoot(compactOp); spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy()); return spec; } }
return primaryTypeTraits; } else if (dataset.getDatasetType() == DatasetType.EXTERNAL && index.getIndexName().equals(IndexingConstants.getFilesIndexName(dataset.getDatasetName()))) { return FilesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS;
return dataset.getPrimaryComparatorFactories(metadataProvider, recordType, metaType); } else if (dataset.getDatasetType() == DatasetType.EXTERNAL && index.getIndexName().equals(IndexingConstants.getFilesIndexName(dataset.getDatasetName()))) { return FilesIndexDescription.FILES_INDEX_COMP_FACTORIES;