private boolean isDatasetExplorable(DatasetId datasetInstance) { return !NamespaceId.SYSTEM.getNamespace().equals(datasetInstance.getNamespace()) && !"system.queue.config".equals(datasetInstance.getDataset()) && !datasetInstance.getDataset().startsWith("system.sharded.queue") && !datasetInstance.getDataset().startsWith("system.queue") && !datasetInstance.getDataset().startsWith("system.stream"); }
@VisibleForTesting void clear() throws IOException { service.dropTable(PREVIEW_TABLE_ID.getDataset()); service.ensureTableExists(PREVIEW_TABLE_ID.getDataset()); } }
@Inject DefaultPreviewStore(LevelDBTableService service) { try { this.service = service; service.ensureTableExists(PREVIEW_TABLE_ID.getDataset()); this.table = new LevelDBTableCore(PREVIEW_TABLE_ID.getDataset(), service); } catch (IOException e) { throw new RuntimeException("Error creating preview table", e); } }
@VisibleForTesting void clear() throws IOException { service.dropTable(PREVIEW_TABLE_ID.getDataset()); service.ensureTableExists(PREVIEW_TABLE_ID.getDataset()); } }
@Inject DefaultPreviewStore(LevelDBTableService service) { try { this.service = service; service.ensureTableExists(PREVIEW_TABLE_ID.getDataset()); this.table = new LevelDBTableCore(PREVIEW_TABLE_ID.getDataset(), service); } catch (IOException e) { throw new RuntimeException("Error creating preview table", e); } }
public String getTableName(DatasetId datasetID, Map<String, String> properties) { if (properties != null) { String tableName = ExploreProperties.getExploreTableName(properties); if (tableName != null) { return tableName; } } return String.format("dataset_%s", cleanTableName(datasetID.getDataset())); }
public String getTableName(DatasetId datasetID, Map<String, String> properties) { if (properties != null) { String tableName = ExploreProperties.getExploreTableName(properties); if (tableName != null) { return tableName; } } return String.format("dataset_%s", cleanTableName(datasetID.getDataset())); }
public void dropPartition(DatasetId datasetInstance, DatasetSpecification spec, PartitionKey key) throws ExploreException, SQLException { if (!exploreEnabled) { return; } ListenableFuture<Void> futureSuccess = exploreClient.dropPartition(datasetInstance, spec, key); handleExploreFuture(futureSuccess, "drop", "partition", datasetInstance.getDataset()); }
public void addPartition(DatasetId datasetInstance, DatasetSpecification spec, PartitionKey key, String location) throws ExploreException, SQLException { if (!exploreEnabled) { return; } ListenableFuture<Void> futureSuccess = exploreClient.addPartition(datasetInstance, spec, key, location); handleExploreFuture(futureSuccess, "add", "partition", datasetInstance.getDataset()); }
public void addPartition(DatasetId datasetInstance, DatasetSpecification spec, PartitionKey key, String location) throws ExploreException, SQLException { if (!exploreEnabled) { return; } ListenableFuture<Void> futureSuccess = exploreClient.addPartition(datasetInstance, spec, key, location); handleExploreFuture(futureSuccess, "add", "partition", datasetInstance.getDataset()); }
public void dropPartition(DatasetId datasetInstance, DatasetSpecification spec, PartitionKey key) throws ExploreException, SQLException { if (!exploreEnabled) { return; } ListenableFuture<Void> futureSuccess = exploreClient.dropPartition(datasetInstance, spec, key); handleExploreFuture(futureSuccess, "drop", "partition", datasetInstance.getDataset()); }
/** * Disable ad-hoc exploration of the given {@link co.cask.cdap.api.data.batch.RecordScannable}. * * @param datasetInstance dataset instance id. */ public void disableExploreDataset(DatasetId datasetInstance) throws ExploreException, SQLException { if (!(exploreEnabled && isDatasetExplorable(datasetInstance))) { return; } ListenableFuture<Void> futureSuccess = exploreClient.disableExploreDataset(datasetInstance); handleExploreFuture(futureSuccess, "disable", "dataset", datasetInstance.getDataset()); }
@Override public List<TriggerInfo> getTriggerInfos(TriggerInfoContext context) { TriggerInfo triggerInfo = new DefaultPartitionTriggerInfo(dataset.getNamespace(), dataset.getDataset(), numPartitions, getPartitionsCount(context.getNotifications())); return Collections.singletonList(triggerInfo); }
private MetadataDataset getMetadataDataset(DatasetContext context, DatasetId datasetId) throws IOException, DatasetManagementException { MetadataScope scope = datasetId.getDataset().contains("business") ? MetadataScope.USER : MetadataScope.SYSTEM; return DatasetsUtil.getOrCreateDataset(context, dsFramework, datasetId, MetadataDataset.class.getName(), DatasetProperties.builder().add(MetadataDatasetDefinition.SCOPE_KEY, scope.name()).build()); } }
@Override public List<TriggerInfo> getTriggerInfos(TriggerInfoContext context) { TriggerInfo triggerInfo = new DefaultPartitionTriggerInfo(dataset.getNamespace(), dataset.getDataset(), numPartitions, getPartitionsCount(context.getNotifications())); return Collections.singletonList(triggerInfo); }
/** * Enables ad-hoc exploration of the given {@link co.cask.cdap.api.data.batch.RecordScannable}. * * @param datasetInstance dataset instance id. */ public void enableExploreDataset(DatasetId datasetInstance) throws ExploreException, SQLException { if (!(exploreEnabled && isDatasetExplorable(datasetInstance))) { return; } ListenableFuture<Void> futureSuccess = exploreClient.enableExploreDataset(datasetInstance); handleExploreFuture(futureSuccess, "enable", "dataset", datasetInstance.getDataset()); }
@Override public void delete(byte[] row, byte[][] columns) { v3HBaseTable.delete(row, columns); if (v2HBaseTable != null) { try { v2HBaseTable.delete(row, columns); } catch (Exception e) { handleV2TableException(e, "Delete", getV2MetricsTableDatasetId().getDataset()); } } }
@Override public void validate() { ProtoTrigger.validateNotNull(getDataset(), "dataset"); ProtoTrigger.validateNotNull(getDataset().getNamespace(), "dataset namespace"); ProtoTrigger.validateNotNull(getDataset().getDataset(), "dataset name"); ProtoTrigger.validateInRange(getNumPartitions(), "number of partitions", 1, null); }
@Override public void validate() { ProtoTrigger.validateNotNull(getDataset(), "dataset"); ProtoTrigger.validateNotNull(getDataset().getNamespace(), "dataset namespace"); ProtoTrigger.validateNotNull(getDataset().getDataset(), "dataset name"); ProtoTrigger.validateInRange(getNumPartitions(), "number of partitions", 1, null); }
private List<Job> getAllJobs() { return Transactionals.execute(transactional, context -> { JobQueueDataset jobQueue = context.getDataset(Schedulers.JOB_QUEUE_DATASET_ID.getNamespace(), Schedulers.JOB_QUEUE_DATASET_ID.getDataset()); try (CloseableIterator<Job> iterator = jobQueue.fullScan()) { return Lists.newArrayList(iterator); } }); } }