public Dataset getTargetDatasetForRebalance(String targetNodeGroupName) { return new Dataset(this.dataverseName, this.datasetName, this.recordTypeDataverseName, this.recordTypeName, this.metaTypeDataverseName, this.metaTypeName, targetNodeGroupName, this.compactionPolicyFactory, this.compactionPolicyProperties, this.datasetDetails, this.hints, this.datasetType, DatasetIdFactory.generateAlternatingDatasetId(this.datasetId), this.pendingOp, this.rebalanceCount + 1, this.compressionScheme); }
public void dropDataset(String dataverseName, String datasetName) { Dataset dataset = new Dataset(dataverseName, datasetName, null, null, null, null, null, null, null, null, -1, MetadataUtil.PENDING_NO_OP); droppedCache.addDatasetIfNotExists(dataset); logAndApply(new MetadataLogicalOperation(dataset, false)); }
/** * Inserts a metadata dataset to the physical dataset index Should be performed * on a bootstrap of a new universe * * @param mdTxnCtx * @param indexes * @throws AlgebricksException */ public static void insertMetadataDatasets(MetadataTransactionContext mdTxnCtx, IMetadataIndex[] indexes) throws AlgebricksException { for (int i = 0; i < indexes.length; i++) { IDatasetDetails id = new InternalDatasetDetails(FileStructure.BTREE, PartitioningStrategy.HASH, indexes[i].getPartitioningExpr(), indexes[i].getPartitioningExpr(), null, indexes[i].getPartitioningExprType(), false, null); MetadataManager.INSTANCE.addDataset(mdTxnCtx, new Dataset(indexes[i].getDataverseName(), indexes[i].getIndexedDatasetName(), indexes[i].getDataverseName(), indexes[i].getPayloadRecordType().getTypeName(), indexes[i].getNodeGroupName(), GlobalConfig.DEFAULT_COMPACTION_POLICY_NAME, GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES, id, new HashMap<String, String>(), DatasetType.INTERNAL, indexes[i].getDatasetId().getId(), MetadataUtil.PENDING_NO_OP)); } if (LOGGER.isInfoEnabled()) { LOGGER.info("Finished inserting initial datasets."); } }
handler = new ActiveNotificationHandler(); allDatasets = new ArrayList<>(); firstDataset = new Dataset(dataverseName, "firstDataset", null, null, null, null, null, null, null, null, 0, 0); secondDataset = new Dataset(dataverseName, "secondDataset", null, null, null, null, null, null, null, null, 0, 0); allDatasets.add(firstDataset); allDatasets.add(secondDataset);
assertSuccess(query); Dataset newDataset = new Dataset(dataverseName, "newDataset", null, null, null, null, null, null, null, null, 0, 0); Action addDataset = users[1].addDataset(newDataset, listener);
@Test public void testCreateNewDatasetWhileRunning() throws Exception { testStartWhenStartSucceed(); Dataset newDataset = new Dataset(dataverseName, "newDataset", null, null, null, null, null, null, null, null, 0, 0); Action createDatasetAction = users[1].addDataset(newDataset, listener); createDatasetAction.sync(); assertFailure(createDatasetAction, ErrorCode.CANNOT_ADD_DATASET_TO_ACTIVE_ENTITY); Assert.assertEquals(ActivityState.RUNNING, listener.getState()); Assert.assertEquals(2, listener.getDatasets().size()); Assert.assertEquals(clusterController.getAllDatasets().size(), listener.getDatasets().size()); }
@Test public void testCreateNewDatasetWhilePermanentFailure() throws Exception { testRecoveryFailureAfterOneAttemptCompilationFailure(); Assert.assertEquals(ActivityState.STOPPED, listener.getState()); Dataset newDataset = new Dataset(dataverseName, "newDataset", null, null, null, null, null, null, null, null, 0, 0); Action createDatasetAction = users[0].addDataset(newDataset, listener); createDatasetAction.sync(); assertSuccess(createDatasetAction); Assert.assertEquals(ActivityState.STOPPED, listener.getState()); Assert.assertEquals(3, listener.getDatasets().size()); Assert.assertEquals(clusterController.getAllDatasets().size(), listener.getDatasets().size()); }
public static Dataset createTransactionDataset(Dataset dataset) { ExternalDatasetDetails originalDsd = (ExternalDatasetDetails) dataset.getDatasetDetails(); ExternalDatasetDetails dsd = new ExternalDatasetDetails(originalDsd.getAdapter(), originalDsd.getProperties(), originalDsd.getTimestamp(), TransactionState.BEGIN); return new Dataset(dataset.getDataverseName(), dataset.getDatasetName(), dataset.getItemTypeDataverseName(), dataset.getItemTypeName(), dataset.getNodeGroupName(), dataset.getCompactionPolicy(), dataset.getCompactionPolicyProperties(), dsd, dataset.getHints(), DatasetType.EXTERNAL, dataset.getDatasetId(), dataset.getPendingOp()); }
@SuppressWarnings("deprecation") @Test public void testCreateNewDatasetDuringRecoveryAttemptThatFailsRuntime() throws Exception { testStartWhenStartSucceed(); listener.onStart(Behavior.FAIL_COMPILE); WaitForStateSubscriber tempFailSubscriber = new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.TEMPORARILY_FAILED)); clusterController.jobFinish(listener.getJobId(), JobStatus.FAILURE, Collections.singletonList(new HyracksDataException("Runtime Failure"))); // recovery is ongoing listener.onStart(Behavior.STEP_FAIL_RUNTIME); tempFailSubscriber.sync(); WaitForStateSubscriber recoveringSubscriber = new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.RECOVERING)); recoveringSubscriber.sync(); tempFailSubscriber = new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.TEMPORARILY_FAILED)); Dataset newDataset = new Dataset(dataverseName, "newDataset", null, null, null, null, null, null, null, null, 0, 0); Action add = users[1].addDataset(newDataset, listener); listener.allowStep(); tempFailSubscriber.sync(); add.sync(); assertFailure(add, ErrorCode.CANNOT_ADD_DATASET_TO_ACTIVE_ENTITY); Assert.assertEquals(2, listener.getDatasets().size()); Assert.assertEquals(clusterController.getAllDatasets().size(), listener.getDatasets().size()); }
@SuppressWarnings("deprecation") @Test public void testCreateNewDatasetDuringRecoveryAttemptThatFailsCompile() throws Exception { testStartWhenStartSucceed(); listener.onStart(Behavior.FAIL_COMPILE); WaitForStateSubscriber tempFailSubscriber = new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.TEMPORARILY_FAILED)); clusterController.jobFinish(listener.getJobId(), JobStatus.FAILURE, Collections.singletonList(new HyracksDataException("Runtime Failure"))); // recovery is ongoing listener.onStart(Behavior.STEP_FAIL_COMPILE); tempFailSubscriber.sync(); WaitForStateSubscriber recoveringSubscriber = new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.RECOVERING)); recoveringSubscriber.sync(); tempFailSubscriber = new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.TEMPORARILY_FAILED)); Dataset newDataset = new Dataset(dataverseName, "newDataset", null, null, null, null, null, null, null, null, 0, 0); Action add = users[1].addDataset(newDataset, listener); listener.allowStep(); tempFailSubscriber.sync(); add.sync(); assertFailure(add, ErrorCode.CANNOT_ADD_DATASET_TO_ACTIVE_ENTITY); Assert.assertEquals(2, listener.getDatasets().size()); Assert.assertEquals(clusterController.getAllDatasets().size(), listener.getDatasets().size()); }
@SuppressWarnings("deprecation") @Test public void testCreateNewDatasetDuringRecoveryAttemptThatSucceeds() throws Exception { testStartWhenStartSucceed(); listener.onStart(Behavior.FAIL_COMPILE); WaitForStateSubscriber tempFailSubscriber = new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.TEMPORARILY_FAILED)); clusterController.jobFinish(listener.getJobId(), JobStatus.FAILURE, Collections.singletonList(new HyracksDataException("Runtime Failure"))); // recovery is ongoing listener.onStart(Behavior.STEP_SUCCEED); tempFailSubscriber.sync(); WaitForStateSubscriber runningSubscriber = new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.RUNNING)); WaitForStateSubscriber recoveringSubscriber = new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.RECOVERING)); recoveringSubscriber.sync(); Dataset newDataset = new Dataset(dataverseName, "newDataset", null, null, null, null, null, null, null, null, 0, 0); Action add = users[1].addDataset(newDataset, listener); listener.allowStep(); runningSubscriber.sync(); add.sync(); assertFailure(add, ErrorCode.CANNOT_ADD_DATASET_TO_ACTIVE_ENTITY); Assert.assertEquals(2, listener.getDatasets().size()); Assert.assertEquals(clusterController.getAllDatasets().size(), listener.getDatasets().size()); }
new Dataset(dataverseName, datasetName, getItemTypeDataverseName(), getItemTypeName(), getMetaItemTypeDataverseName(), getMetaItemTypeName(), getNodeGroupName(), getCompactionPolicy(), getCompactionPolicyProperties(), getDatasetDetails(), getHints(), new Dataset(dataverseName, datasetName, getItemTypeDataverseName(), getItemTypeName(), getNodeGroupName(), getCompactionPolicy(), getCompactionPolicyProperties(), getDatasetDetails(), getHints(), getDatasetType(), getDatasetId(),
@Test public void testCreateNewDatasetWhileStarting() throws Exception { Assert.assertEquals(ActivityState.STOPPED, listener.getState()); listener.onStart(Behavior.STEP_SUCCEED); Action startAction = users[0].startActivity(listener); WaitForStateSubscriber subscriber = new WaitForStateSubscriber(listener, Collections.singleton(ActivityState.STARTING)); subscriber.sync(); Dataset newDataset = new Dataset(dataverseName, "newDataset", null, null, null, null, null, null, null, null, 0, 0); Action createDatasetAction = users[1].addDataset(newDataset, listener); listener.allowStep(); startAction.sync(); assertSuccess(startAction); createDatasetAction.sync(); assertFailure(createDatasetAction, ErrorCode.CANNOT_ADD_DATASET_TO_ACTIVE_ENTITY); Assert.assertEquals(ActivityState.RUNNING, listener.getState()); Assert.assertEquals(2, listener.getDatasets().size()); Assert.assertEquals(clusterController.getAllDatasets().size(), listener.getDatasets().size()); }
String compressionScheme = getCompressionScheme(datasetRecord); return new Dataset(dataverseName, datasetName, typeDataverseName, typeName, metaTypeDataverseName, metaTypeName, nodeGroupName, compactionPolicy, compactionPolicyProperties, datasetDetails, hints, datasetType, datasetId, pendingOp, rebalanceCount, compressionScheme);
@Test public void testCreateNewDatasetWhileSuspended() throws Exception { testStartWhenStartSucceed(); // suspend Assert.assertEquals(ActivityState.RUNNING, listener.getState()); listener.onStop(Behavior.STEP_SUCCEED); Action suspension = users[1].suspendActivity(listener); WaitForStateSubscriber subscriber = new WaitForStateSubscriber(listener, EnumSet.of(ActivityState.SUSPENDING, ActivityState.SUSPENDED)); subscriber.sync(); Dataset newDataset = new Dataset(dataverseName, "newDataset", null, null, null, null, null, null, null, null, 0, 0); Action createDatasetAction = users[0].addDataset(newDataset, listener); listener.allowStep(); listener.allowStep(); suspension.sync(); assertSuccess(suspension); users[1].resumeActivity(listener); createDatasetAction.sync(); assertFailure(createDatasetAction, ErrorCode.CANNOT_ADD_DATASET_TO_ACTIVE_ENTITY); Assert.assertEquals(ActivityState.RUNNING, listener.getState()); Assert.assertEquals(2, listener.getDatasets().size()); Assert.assertEquals(clusterController.getAllDatasets().size(), listener.getDatasets().size()); }
List<List<String>> partitioningKeys = new ArrayList<>(); partitioningKeys.add(Collections.singletonList("key")); Dataset dataset = new Dataset(DATAVERSE_NAME, DATASET_NAME, DATAVERSE_NAME, DATA_TYPE_NAME, NODE_GROUP_NAME, NoMergePolicyFactory.NAME, null, new InternalDatasetDetails(null, InternalDatasetDetails.PartitioningStrategy.HASH,
private void addDataset(ICcApplicationContext appCtx, Dataset source, int datasetPostfix, boolean abort) throws Exception { Dataset dataset = new Dataset(source.getDataverseName(), "ds_" + datasetPostfix, source.getDataverseName(), source.getDatasetType().name(), source.getNodeGroupName(), NoMergePolicyFactory.NAME, null, source.getDatasetDetails(), source.getHints(), DatasetConfig.DatasetType.INTERNAL, datasetPostfix, 0); MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext writeTxn = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(writeTxn); try { MetadataManager.INSTANCE.addDataset(writeTxn, dataset); if (abort) { MetadataManager.INSTANCE.abortTransaction(writeTxn); } else { MetadataManager.INSTANCE.commitTransaction(writeTxn); } } finally { metadataProvider.getLocks().unlock(); } } }
dataset = new Dataset(dataverseName, datasetName, itemTypeDataverseName, itemTypeName, metaItemTypeDataverseName, metaItemTypeName, ngName, compactionPolicy, compactionPolicyProperties, datasetDetails, dd.getHints(), dsType, DatasetIdFactory.generateDatasetId(),