public AbstractQueueAdmin(QueueConstants.QueueType type) { // todo: we have to do that because queues do not follow dataset semantic fully (yet) // system scoped this.unqualifiedTableNamePrefix = NamespaceId.SYSTEM.getEntityName() + "." + type.toString(); this.type = type; }
protected AbstractStreamFileConsumerFactory(CConfiguration cConf, StreamAdmin streamAdmin, StreamConsumerStateStoreFactory stateStoreFactory) { this.cConf = cConf; this.streamAdmin = streamAdmin; this.stateStoreFactory = stateStoreFactory; this.tablePrefix = String.format("%s.%s", NamespaceId.SYSTEM.getEntityName(), QueueConstants.QueueType.STREAM.toString()); }
/** * Get the namespace meta using the namespace id * * @param id id of the namespace * @return the namespace meta, null if not found */ @Nullable public NamespaceMeta get(NamespaceId id) { return getFirst(getNamespaceKey(id.getEntityName()), NamespaceMeta.class); }
@Override public List<StreamSpecification> listStreams(NamespaceId namespaceId) throws Exception { ImmutableList.Builder<StreamSpecification> builder = ImmutableList.builder(); synchronized (streams) { for (String stream : streams.get(namespaceId.getEntityName())) { builder.add(new StreamSpecification.Builder().setName(stream).create()); } } return builder.build(); }
/** * Delete the namespace from the dataset * * @param id id of the namespace */ public void delete(NamespaceId id) { deleteAll(getNamespaceKey(id.getEntityName())); }
private Map<String, String> createContext(HandlerInfo handlerInfo) { // todo: really inefficient to call this on the intense data flow path return ImmutableMap.of( Constants.Metrics.Tag.NAMESPACE, NamespaceId.SYSTEM.getEntityName(), Constants.Metrics.Tag.COMPONENT, serviceName, Constants.Metrics.Tag.HANDLER, getSimpleName(handlerInfo.getHandlerName()), Constants.Metrics.Tag.METHOD, handlerInfo.getMethodName()); }
private String toCDAPManagedHBaseNamespace(NamespaceId namespace) { // Handle backward compatibility to not add the prefix for default namespace // TODO: CDAP-1601 - Conditional should be removed when we have a way to upgrade user datasets return NamespaceId.DEFAULT.getEntityName().equals(namespace.getNamespace()) ? namespace.getNamespace() : tablePrefix + "_" + namespace.getNamespace(); }
private String toCDAPManagedHBaseNamespace(NamespaceId namespace) { // Handle backward compatibility to not add the prefix for default namespace // TODO: CDAP-1601 - Conditional should be removed when we have a way to upgrade user datasets return NamespaceId.DEFAULT.getEntityName().equals(namespace.getNamespace()) ? namespace.getNamespace() : tablePrefix + "_" + namespace.getNamespace(); }
@Override public int getActiveQueryCount(NamespaceId namespace) throws ExploreException { String resource = String.format("namespaces/%s/data/explore/queries/count", namespace.getEntityName()); HttpResponse response = doGet(resource); if (response.getResponseCode() == HttpURLConnection.HTTP_OK) { Map<String, String> mapResponse = parseJson(response, new TypeToken<Map<String, String>>() { }.getType()); return Integer.parseInt(mapResponse.get("count")); } throw new ExploreException("Cannot get list of queries. Reason: " + response); }
private long getSparkMetric(String applicationId, String sparkId, String metricName) throws Exception { Map<String, String> context = ImmutableMap.of( Constants.Metrics.Tag.NAMESPACE, NamespaceId.DEFAULT.getEntityName(), Constants.Metrics.Tag.APP, applicationId, Constants.Metrics.Tag.SPARK, sparkId); return getTotalCounter(context, metricName); }
@Override public List<QueryInfo> getQueries(NamespaceId namespace) throws ExploreException, SQLException { String resource = String.format("namespaces/%s/data/explore/queries/", namespace.getEntityName()); HttpResponse response = doGet(resource); if (response.getResponseCode() == HttpURLConnection.HTTP_OK) { return parseJson(response, QUERY_INFO_LIST_TYPE); } throw new ExploreException("Cannot get list of queries. Reason: " + response); }
@Override public List<QueryInfo> getQueries(NamespaceId namespace) throws ExploreException, SQLException { String resource = String.format("namespaces/%s/data/explore/queries/", namespace.getEntityName()); HttpResponse response = doGet(resource); if (response.getResponseCode() == HttpURLConnection.HTTP_OK) { return parseJson(response, QUERY_INFO_LIST_TYPE); } throw new ExploreException("Cannot get list of queries. Reason: " + response); }
private MDSKey getInstanceKey(NamespaceId namespaceId, @Nullable String instanceName) { MDSKey.Builder builder = new MDSKey.Builder().add(INSTANCE_PREFIX).add(namespaceId.getEntityName()); if (instanceName != null) { builder.add(instanceName); } return builder.build(); } }
@Override public QueryHandle deleteNamespace(NamespaceId namespace) throws ExploreException, SQLException { HttpResponse response = doDelete(String.format("data/explore/namespaces/%s", namespace.getEntityName())); if (response.getResponseCode() == HttpURLConnection.HTTP_OK) { return QueryHandle.fromId(parseResponseAsMap(response, "handle")); } throw new ExploreException("Cannot remove a namespace. Reason: " + response); }
private MDSKey getInstanceKey(NamespaceId namespaceId, @Nullable String instanceName) { MDSKey.Builder builder = new MDSKey.Builder().add(INSTANCE_PREFIX).add(namespaceId.getEntityName()); if (instanceName != null) { builder.add(instanceName); } return builder.build(); } }
@Override public List<StreamSpecification> call(DatasetContext context) throws Exception { return getMetadataStore(context).list(new MDSKey.Builder().add(TYPE_STREAM, namespaceId.getEntityName()).build(), StreamSpecification.class); } }, Exception.class);
@Override protected void startUp() throws Exception { LoggingContextAccessor.setLoggingContext(new ServiceLoggingContext(NamespaceId.SYSTEM.getEntityName(), Constants.Logging.COMPONENT_NAME, Constants.Service.DATASET_EXECUTOR)); LOG.info("Starting DatasetOpExecutorService..."); httpService.start(); cancellable = discoveryService.register( ResolvingDiscoverable.of(new Discoverable(Constants.Service.DATASET_EXECUTOR, httpService.getBindAddress()))); LOG.info("DatasetOpExecutorService started successfully on {}", httpService.getBindAddress()); }
@Override protected void startUp() throws Exception { LoggingContextAccessor.setLoggingContext(new ServiceLoggingContext(NamespaceId.SYSTEM.getEntityName(), Constants.Logging.COMPONENT_NAME, Constants.Service.DATASET_EXECUTOR)); LOG.info("Starting DatasetOpExecutorService..."); httpService.start(); cancellable = discoveryService.register( ResolvingDiscoverable.of(new Discoverable(Constants.Service.DATASET_EXECUTOR, httpService.getBindAddress()))); LOG.info("DatasetOpExecutorService started successfully on {}", httpService.getBindAddress()); }
@Override protected String getTableNameAsString(TableId tableId) { Preconditions.checkArgument(tableId != null, "TableId should not be null."); String tablePrefix = cConf.get(Constants.Dataset.TABLE_PREFIX); if (NamespaceId.DEFAULT.getEntityName().equals(tableId.getNamespace())) { return HTableNameConverter.toHBaseTableName(tablePrefix, tableId); } return Joiner.on(':').join(tableId.getNamespace(), HTableNameConverter.toHBaseTableName(tablePrefix, tableId)); }
@Override protected void startUp() throws Exception { LoggingContextAccessor.setLoggingContext(new ServiceLoggingContext(NamespaceId.SYSTEM.getEntityName(), Constants.Logging.COMPONENT_NAME, Constants.Service.STREAMS)); httpService.start(); discoverable = ResolvingDiscoverable.of(new Discoverable(Constants.Service.STREAMS, httpService.getBindAddress())); cancellable = discoveryService.register(discoverable); }