MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); try { metadataProvider.setMetadataTxnContext(mdTxnCtx); Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); if (dataset == null) { jsonResponse.put("error", return; FileSplit[] fileSplits = metadataProvider.splitsForIndex(mdTxnCtx, dataset, datasetName); ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); List<List<String>> primaryKeys = dataset.getPrimaryKeys(); metadataProvider.getLocks().unlock();
private static int getSortNumFrames(MetadataProvider metadataProvider, SourceLocation sourceLoc) throws AlgebricksException { return OptimizationConfUtil.getSortNumFrames(metadataProvider.getApplicationContext().getCompilerProperties(), metadataProvider.getConfig(), sourceLoc); }
private IScalarEvaluatorFactory createConstantEvaluatorFactory(ConstantExpression expr, JobGenContext context) throws AlgebricksException { MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider(); return metadataProvider.getDataFormat().getConstantEvalFactory(expr.getValue()); }
/** * Retrieve the Output RecordType, as defined by "set output-record-type". */ public ARecordType findOutputRecordType() throws AlgebricksException { return MetadataManagerUtil.findOutputRecordType(mdTxnCtx, getDefaultDataverseName(), getProperty("output-record-type")); }
private static void tryDropDatasetNodegroup(Dataset source, MetadataProvider metadataProvider) throws Exception { ICcApplicationContext appCtx = metadataProvider.getApplicationContext(); String sourceNodeGroup = source.getNodeGroupName(); appCtx.getMetadataLockManager().acquireNodeGroupWriteLock(metadataProvider.getLocks(), sourceNodeGroup); MetadataManager.INSTANCE.dropNodegroup(metadataProvider.getMetadataTxnContext(), sourceNodeGroup, true); }
public static JobSpecification createDatasetJobSpec(Dataset dataset, MetadataProvider metadataProvider) throws AlgebricksException { Index index = IndexUtil.getPrimaryIndex(dataset); ARecordType itemType = (ARecordType) metadataProvider.findType(dataset); metaItemType = (ARecordType) metadataProvider.findMetaType(dataset); JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset); FileSplit[] fs = splitsAndConstraint.first.getFileSplits(); StringBuilder sb = new StringBuilder(); DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext()); compactionInfo.first, compactionInfo.second); IndexBuilderFactory indexBuilderFactory = new IndexBuilderFactory(metadataProvider.getStorageComponentProvider().getStorageManager(), splitsAndConstraint.first, resourceFactory, true); IndexCreateOperatorDescriptor indexCreateOp = new IndexCreateOperatorDescriptor(spec, indexBuilderFactory);
final TxnId txnId = metadataProvider.getTxnIdFactory().create(); metadataProvider.setTxnId(txnId); ILangExpressionToPlanTranslator t = translatorFactory.createExpressionToPlanTranslator(metadataProvider, varCounter, externalVars); CompilerProperties compilerProperties = metadataProvider.getApplicationContext().getCompilerProperties(); Map<String, Object> querySpecificConfig = validateConfig(metadataProvider.getConfig(), sourceLoc); final PhysicalOptimizationConfig physOptConf = OptimizationConfUtil.createPhysicalOptimizationConf(compilerProperties, querySpecificConfig, sourceLoc); new HeuristicCompilerFactoryBuilder(OptimizationContextFactory.INSTANCE); builder.setPhysicalOptimizationConfig(physOptConf); builder.setLogicalRewrites(ruleSetFactory.getLogicalRewrites(metadataProvider.getApplicationContext())); builder.setPhysicalRewrites(ruleSetFactory.getPhysicalRewrites(metadataProvider.getApplicationContext())); IDataFormat format = metadataProvider.getDataFormat(); ICompilerFactory compilerFactory = builder.create(); builder.setExpressionEvalSizeComputer(format.getExpressionEvalSizeComputer()); compilerProperties.getParallelism()); AlgebricksAbsolutePartitionConstraint computationLocations = chooseLocations(clusterInfoCollector, parallelism, metadataProvider.getClusterLocations()); builder.setClusterLocations(computationLocations); builder.setComparatorFactoryProvider(format.getBinaryComparatorFactoryProvider()); builder.setExpressionRuntimeProvider( new ExpressionRuntimeProvider(new QueryLogicalExpressionJobGen(metadataProvider.getFunctionManager()))); builder.setHashFunctionFactoryProvider(format.getBinaryHashFunctionFactoryProvider()); builder.setHashFunctionFamilyProvider(format.getBinaryHashFunctionFamilyProvider());
int numKeys = dataset.getPrimaryKeys().size(); int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1; ARecordType itemType = (ARecordType) metadataProvider.findType(dataset); ARecordType metaItemType = (ARecordType) metadataProvider.findMetaType(dataset); Index primaryIndex = metadataProvider.getIndex(dataset.getDataverseName(), dataset.getDatasetName(), dataset.getDatasetName()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset); metadataProvider.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName()).size() > 1; IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider(); IModificationOperationCallbackFactory modificationCallbackFactory = dataset.getModificationCallbackFactory( storageComponentProvider, primaryIndex, IndexOperation.UPSERT, primaryKeyFields); ISerializerDeserializer<?>[] outputSerDes = new ISerializerDeserializer[inputRecordDesc.getFieldCount() + 1 + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields]; IDataFormat dataFormat = metadataProvider.getDataFormat();
public static JobSpecification buildDropFilesIndexJobSpec(MetadataProvider metadataProvider, Dataset dataset) throws AlgebricksException { String indexName = IndexingConstants.getFilesIndexName(dataset.getDatasetName()); JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset, indexName); IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory( metadataProvider.getStorageComponentProvider().getStorageManager(), splitsAndConstraint.first); IndexDropOperatorDescriptor btreeDrop = new IndexDropOperatorDescriptor(spec, dataflowHelperFactory); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, btreeDrop, splitsAndConstraint.second); spec.addRoot(btreeDrop); return spec; }
public static JobSpecification compactDatasetJobSpec(Dataverse dataverse, String datasetName, MetadataProvider metadataProvider) throws AlgebricksException { String dataverseName = dataverse.getDataverseName(); Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); if (dataset == null) { throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName); } JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset); IIndexDataflowHelperFactory indexHelperFactory = new IndexDataflowHelperFactory( metadataProvider.getStorageComponentProvider().getStorageManager(), splitsAndConstraint.first); LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec, indexHelperFactory); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second); spec.addRoot(compactOp); return spec; }
DataSourceId asid = ((IDataSource<DataSourceId>) scan.getDataSource()).getId(); Dataset dataset = mp.findDataset(asid.getDataverseName(), asid.getDatasourceName()); if (dataset == null) { throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, scan.getSourceLocation(), if (pos != null) { String tName = dataset.getItemTypeName(); IAType t = mp.findType(dataset.getItemTypeDataverseName(), tName); if (t.getTypeTag() != ATypeTag.OBJECT) { return false; List<Index> datasetIndexes = mp.getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName()); boolean hasSecondaryIndex = false; for (Index index : datasetIndexes) {
new ITypeTraits[1 + numPrimaryKeys + (dataset.hasMetaPart() ? 1 : 0) + numFilterFields]; secondaryTypeTraits = new ITypeTraits[numSecondaryKeys + numPrimaryKeys]; ISerializerDeserializerProvider serdeProvider = metadataProvider.getDataFormat().getSerdeProvider(); ITypeTraitProvider typeTraitProvider = metadataProvider.getDataFormat().getTypeTraitProvider(); IBinaryComparatorFactoryProvider comparatorFactoryProvider = metadataProvider.getDataFormat().getBinaryComparatorFactoryProvider(); sourceColumn = recordColumn + 1; secondaryFieldAccessEvalFactories[i] = metadataProvider.getDataFormat().getFieldAccessEvaluatorFactory( metadataProvider.getFunctionManager(), isOverridingKeyTypes ? enforcedItemType : sourceType, index.getKeyFieldNames().get(i), sourceColumn, sourceLoc); Pair<IAType, Boolean> keyTypePair = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(i), metadataProvider.getDataFormat().getFieldAccessEvaluatorFactory( metadataProvider.getFunctionManager(), itemType, filterFieldName, recordColumn, sourceLoc); Pair<IAType, Boolean> keyTypePair = Index.getNonNullableKeyFieldType(filterFieldName, itemType); IAType type = keyTypePair.first;
IStatementRewriter stmtRewriter = rewriterFactory.createStatementRewriter(); MetadataProvider metadataProvider = new MetadataProvider(appCtx, activeDataverse); metadataProvider.getConfig().putAll(config); metadataProvider.setWriterFactory(writerFactory); metadataProvider.setResultSerializerFactoryProvider(resultSerializerFactoryProvider); metadataProvider.setOutputFile(outputFile); switch (stmt.getKind()) { case SET: case UPSERT: if (((InsertStatement) stmt).getReturnExpression() != null) { metadataProvider.setResultSetId(new ResultSetId(resultSetIdCounter++)); metadataProvider.setResultAsyncMode(resultDelivery == ResultDelivery.ASYNC || resultDelivery == ResultDelivery.DEFERRED); metadataProvider.setMaxResultReads(maxResultReads); break; case QUERY: metadataProvider.setResultSetId(new ResultSetId(resultSetIdCounter++)); metadataProvider.setResultAsyncMode( resultDelivery == ResultDelivery.ASYNC || resultDelivery == ResultDelivery.DEFERRED); metadataProvider.setMaxResultReads(maxResultReads); handleQuery(metadataProvider, (Query) stmt, hcc, resultSet, resultDelivery, outMetadata, stats, clientContextId, ctx, stmtParams, stmtRewriter);
String feedName = cfs.getFeedName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); MetadataLockUtil.disconnectFeedBegin(lockManager, metadataProvider.getLocks(), dataverseName, dataverseName + "." + datasetName, dataverseName + "." + cfs.getFeedName()); try { FeedConnection fc = MetadataManager.INSTANCE.getFeedConnection(metadataProvider.getMetadataTxnContext(), dataverseName, feedName, datasetName); Dataset ds = metadataProvider.findDataset(dataverseName, datasetName); if (ds == null) { throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName, throw e; } finally { metadataProvider.getLocks().unlock();
numNestedSecondaryKeyFields = numDimensions * 2; int recordColumn = NUM_TAG_FIELDS + numPrimaryKeys; secondaryFieldAccessEvalFactories = metadataProvider.getDataFormat().createMBRFactory( metadataProvider.getFunctionManager(), isOverridingKeyFieldTypes ? enforcedItemType : itemType, secondaryKeyFields.get(0), recordColumn, numDimensions, filterFieldName, isPointMBR, sourceLoc); secondaryComparatorFactories = new IBinaryComparatorFactory[numNestedSecondaryKeyFields]; secondaryTypeTraits[i] = TypeTraitProvider.INSTANCE.getTypeTrait(nestedKeyType); valueProviderFactories[i] = metadataProvider.getStorageComponentProvider().getPrimitiveValueProviderFactory();
return functionDecls; String value = (String) metadataProvider.getConfig().get(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS); boolean includePrivateFunctions = (value != null) ? Boolean.valueOf(value.toLowerCase()) : false; Set<CallExpr> functionCalls = functionCollector.getFunctionCalls(expression); signature.setNamespace(metadataProvider.getDefaultDataverseName()); if (!namespace.equals(FunctionConstants.ASTERIX_NS) && !namespace.equals(AlgebricksBuiltinFunctions.ALGEBRICKS_NS) && metadataProvider.findDataverse(namespace) == null) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, functionCall.getSourceLocation(), "In function call \"" + namespace + "." + signature.getName() + "(...)\", the dataverse \"" function = lookupUserDefinedFunctionDecl(metadataProvider.getMetadataTxnContext(), signature); } catch (AlgebricksException e) { throw new CompilationException(e);
when(metadataProvider.getDefaultDataverseName()).thenReturn(dvName); when(metadataProvider.getConfig()).thenReturn(config); when(config.get(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS)).thenReturn("true"); when(metadataProvider.findDataset(anyString(), anyString())).thenReturn(mock(Dataset.class));
Dataset dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName()); int[] keyIndexes = getKeyIndexes(jobGenParams.getKeyVarList(), inputSchemas); CompilerProperties compilerProp = metadataProvider.getApplicationContext().getCompilerProperties(); Map<String, Object> queryConfig = metadataProvider.getConfig(); int frameLimit = OptimizationConfUtil.getTextSearchNumFrames(compilerProp, queryConfig, op.getSourceLocation());
CcApplicationContext appCtx = (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(); MetadataProvider mdProvider = new MetadataProvider(appCtx, null); try { MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); int pos = skNames.get(i).size() > 1 ? -1 : sourceType.getFieldIndex(skNames.get(i).get(0)); secondaryFieldAccessEvalFactories[i] = mdProvider.getDataFormat().getFieldAccessEvaluatorFactory(mdProvider.getFunctionManager(), sourceType, secondaryIndex.getKeyFieldNames().get(i), pos, null); mdProvider.getLocks().unlock();
protected Dataverse handleUseDataverseStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception { DataverseDecl dvd = (DataverseDecl) stmt; SourceLocation sourceLoc = dvd.getSourceLocation(); String dvName = dvd.getDataverseName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); lockManager.acquireDataverseReadLock(metadataProvider.getLocks(), dvName); try { Dataverse dv = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(), dvName); if (dv == null) { throw new MetadataException(ErrorCode.UNKNOWN_DATAVERSE, sourceLoc, dvName); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return dv; } catch (Exception e) { abort(e, e, mdTxnCtx); throw new MetadataException(ErrorCode.METADATA_ERROR, e, sourceLoc, e.toString()); } finally { metadataProvider.getLocks().unlock(); } }