public static List<String> findNodes(MetadataTransactionContext mdTxnCtx, String nodeGroupName) throws AlgebricksException { return MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroupName).getNodeNames(); }
public static INodeDomain findNodeDomain(IClusterStateManager clusterStateManager, MetadataTransactionContext mdTxnCtx, String nodeGroupName) throws AlgebricksException { NodeGroup nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroupName); List<String> partitions = new ArrayList<>(); for (String location : nodeGroup.getNodeNames()) { int numPartitions = clusterStateManager.getNodePartitionsCount(location); for (int i = 0; i < numPartitions; i++) { partitions.add(location); } } return new DefaultNodeGroupDomain(partitions); }
/** * Calculate an estimate size of the bloom filter. Note that this is an * estimation which assumes that the data is going to be uniformly distributed * across all partitions. * * @param dataset * @return Number of elements that will be used to create a bloom filter per * dataset per partition * @throws AlgebricksException */ public long getCardinalityPerPartitionHint(Dataset dataset) throws AlgebricksException { String numElementsHintString = dataset.getHints().get(DatasetCardinalityHint.NAME); long numElementsHint; if (numElementsHintString == null) { numElementsHint = DatasetCardinalityHint.DEFAULT; } else { numElementsHint = Long.parseLong(numElementsHintString); } int numPartitions = 0; List<String> nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName()).getNodeNames(); IClusterStateManager csm = appCtx.getClusterStateManager(); for (String nd : nodeGroup) { numPartitions += csm.getNodePartitionsCount(nd); } return numElementsHint / numPartitions; }
/*** * Creates a node group that is associated with a new dataset. * * @param dataverseName, * the dataverse name of the dataset. * @param datasetName, * the name of the dataset. * @param rebalanceCount * , the rebalance count of the dataset. * @param ncNames, * the set of node names. * @param metadataProvider, * the metadata provider. * @return the name of the created node group. * @throws Exception */ public static String createNodeGroupForNewDataset(String dataverseName, String datasetName, long rebalanceCount, Set<String> ncNames, MetadataProvider metadataProvider) throws Exception { ICcApplicationContext appCtx = metadataProvider.getApplicationContext(); String nodeGroup = dataverseName + "." + datasetName + (rebalanceCount == 0L ? "" : "_" + rebalanceCount); MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext(); appCtx.getMetadataLockManager().acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroup); NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroup); if (ng != null) { nodeGroup = nodeGroup + "_" + UUID.randomUUID().toString(); appCtx.getMetadataLockManager().acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroup); } MetadataManager.INSTANCE.addNodegroup(mdTxnCtx, new NodeGroup(nodeGroup, new ArrayList<>(ncNames))); return nodeGroup; }
public static FileSplit[] getIndexSplits(Dataset dataset, String indexName, MetadataTransactionContext mdTxnCtx, IClusterStateManager csm) throws AlgebricksException { try { NodeGroup nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName()); if (nodeGroup == null) { throw new AlgebricksException("Couldn't find node group " + dataset.getNodeGroupName()); } List<String> nodeList = nodeGroup.getNodeNames(); return getIndexSplits(csm, dataset, indexName, nodeList); } catch (MetadataException me) { throw new AlgebricksException(me); } }
protected void handleCreateNodeGroupStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception { NodegroupDecl stmtCreateNodegroup = (NodegroupDecl) stmt; SourceLocation sourceLoc = stmtCreateNodegroup.getSourceLocation(); String ngName = stmtCreateNodegroup.getNodegroupName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); lockManager.acquireNodeGroupWriteLock(metadataProvider.getLocks(), ngName); try { NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, ngName); if (ng != null) { if (!stmtCreateNodegroup.getIfNotExists()) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "A nodegroup with this name " + ngName + " already exists."); } } else { List<Identifier> ncIdentifiers = stmtCreateNodegroup.getNodeControllerNames(); List<String> ncNames = new ArrayList<>(ncIdentifiers.size()); for (Identifier id : ncIdentifiers) { ncNames.add(id.getValue()); } MetadataManager.INSTANCE.addNodegroup(mdTxnCtx, new NodeGroup(ngName, ncNames)); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { abort(e, e, mdTxnCtx); throw e; } finally { metadataProvider.getLocks().unlock(); } }
final NodeGroup nodegroup = MetadataManager.INSTANCE.getNodegroup(readMdTxn, nodeGroupName); if (nodegroup == null) { throw new AssertionError("nodegroup was found after metadata txn was aborted");
protected void handleNodegroupDropStatement(MetadataProvider metadataProvider, Statement stmt) throws Exception { NodeGroupDropStatement stmtDelete = (NodeGroupDropStatement) stmt; SourceLocation sourceLoc = stmtDelete.getSourceLocation(); String nodegroupName = stmtDelete.getNodeGroupName().getValue(); MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); lockManager.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodegroupName); try { NodeGroup ng = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodegroupName); if (ng == null) { if (!stmtDelete.getIfExists()) { throw new CompilationException(ErrorCode.UNKNOWN_NODEGROUP, sourceLoc, nodegroupName); } } else { MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodegroupName, false); } MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); } catch (Exception e) { abort(e, e, mdTxnCtx); throw e; } finally { metadataProvider.getLocks().unlock(); } }
String nodeGroup = dataset.getNodeGroupName(); lockManager.acquireNodeGroupWriteLock(metadataProvider.getLocks(), nodeGroup); if (MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, nodeGroup) != null) { MetadataManager.INSTANCE.dropNodegroup(mdTxnCtx, nodeGroup, true);
@Test public void abortMetadataTxn() throws Exception { ICcApplicationContext appCtx = (ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext(); final MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext mdTxn = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxn); final String nodeGroupName = "ng"; try { final List<String> ngNodes = Arrays.asList("asterix_nc1"); MetadataManager.INSTANCE.addNodegroup(mdTxn, new NodeGroup(nodeGroupName, ngNodes)); MetadataManager.INSTANCE.abortTransaction(mdTxn); } finally { metadataProvider.getLocks().unlock(); } // ensure that the node group was not added final MetadataTransactionContext readMdTxn = MetadataManager.INSTANCE.beginTransaction(); try { final NodeGroup nodegroup = MetadataManager.INSTANCE.getNodegroup(readMdTxn, nodeGroupName); if (nodegroup != null) { throw new AssertionError("nodegroup was found after metadata txn was aborted"); } } finally { MetadataManager.INSTANCE.commitTransaction(readMdTxn); } }
final NodeGroup nodegroup = MetadataManager.INSTANCE.getNodegroup(readMdTxn, nodeGroup); if (nodegroup != null) { throw new AssertionError("nodegroup was found after its only dataset was dropped");