/** * Adds a Dataset instance, created automatically if absent in the CDAP namespace. If the Dataset instance * already exists, this updates the existing instance with the new properties. * See {@link co.cask.cdap.api.dataset.DatasetDefinition} for details. * * @param datasetName name of the dataset instance * @param typeName name of the dataset type * @param properties dataset instance properties */ @Beta void createDataset(String datasetName, String typeName, DatasetProperties properties);
/** * Spark program execution context. User Spark program can interact with CDAP through this context. */ @Beta public abstract class JavaSparkExecutionContext extends JavaSparkExecutionContextBase { }
/** * Return the state of the workflow. This method can be used from {@link AbstractWorkflow#destroy} * to determine the status of the {@link Workflow}. * @return a {@link ProgramState} */ @Beta ProgramState getState();
/** * Defines an interface for the Spark job. */ @Beta public interface Spark { /** * Configures a {@link Spark} job using the given {@link SparkConfigurer}. */ void configure(SparkConfigurer configurer); }
/** * Implementation of {@link DatasetModule} announces dataset types and other components to the system. */ @Beta public interface DatasetModule { /** * Registers dataset types and other components in the system. * @param registry instance of {@link DatasetDefinitionRegistry} to be used for registering components */ void register(DatasetDefinitionRegistry registry); }
/** * Returns a {@link Scanner} as specified by a given {@link Scan}. * * @param scan a {@link Scan} instance * @return instance of {@link Scanner} */ @Beta Scanner scan(Scan scan);
/** * Spark program execution context. User Spark program can interact with CDAP through this context. */ @Beta public abstract class JavaSparkExecutionContext extends JavaSparkExecutionContextBase { }
/** * Adds a {@link DatasetModule} to be deployed automatically (if absent in the CDAP namespace) during application * deployment. * * @param moduleName Name of the module to deploy * @param moduleClass Class of the module */ @Beta void addDatasetModule(String moduleName, Class<? extends DatasetModule> moduleClass);
/** * @param dataTracerName the name of the logger using which the debug information will be logged * * @return an {@link DataTracer} to perform data trace operations. */ @Beta DataTracer getDataTracer(String dataTracerName); }
/** * Return an immutable {@link Map} of node ids to {@link WorkflowNodeState}. This can be used * from {@link AbstractWorkflow#destroy} method to determine the status of all nodes * executed by the Workflow in the current run. */ @Beta Map<String, WorkflowNodeState> getNodeStates();
/** * Deploys {@link DatasetModule}. * * @param datasetModuleId the dataset module id * @param datasetModule module class * @throws Exception */ @Beta void deployDatasetModule(DatasetModuleId datasetModuleId, Class<? extends DatasetModule> datasetModule) throws Exception;
/** * Interface for stage that supports destroy call for resources cleanup. */ @Beta public interface Destroyable { /** * Invoked for resources cleanup. */ void destroy(); }
/** * Exception thrown when attempting an operation on a topic when a topic of that name does not exist. */ @Beta public class TopicNotFoundException extends MessagingException { public TopicNotFoundException(String namespace, String topic) { super(namespace, topic, "Topic " + namespace + ":" + topic + " does not exist."); } }
/** * Call this method to consolidate the field lineage operations at Workflow level, rather than emitting * them from nodes running inside the Workflow. This method should be called from {@link AbstractWorkflow#initialize} * method at which point no node has been executed yet. Calling this method means Workflow is taking * responsibility of emitting the field operations. In {@link AbstractWorkflow#destroy} method of the Workflow, * field operations will be available as {@link WorkflowNodeState} by calling {@link #getNodeStates} method. * If workflow does not call {@link LineageRecorder#record} method, then no field lineage will be emitted. */ @Beta void enableFieldLineageConsolidation(); }
/** * @return the name of the group for files and directories */ @Beta public static String getFileGroup(Map<String, String> properties) { return properties.get(PROPERTY_FILES_GROUP); }
/** * Set the default permissions for files and directories */ @Beta public Builder setFilePermissions(String permissions) { add(PROPERTY_FILES_PERMISSIONS, permissions); return this; }
/** * Set the name of the group for files and directories */ @Beta public Builder setFileGroup(String group) { add(PROPERTY_FILES_GROUP, group); return this; } }
/** * Set the table permissions as a map from user name to a permission string. */ @Beta public Builder setTablePermissions(Map<String, String> permissions) { TableProperties.setTablePermissions(this, permissions); return this; } }
/** * Set the table permissions as a map from user name to a permission string. */ @SuppressWarnings("unchecked") @Beta public B setTablePermissions(Map<String, String> permissions) { TableProperties.setTablePermissions(this, permissions); return (B) this; } }
/** * @see DatasetConfigurer#addDatasetModule(String, Class) */ @Beta protected final void addDatasetModule(String moduleName, Class<? extends DatasetModule> moduleClass) { getConfigurer().addDatasetModule(moduleName, moduleClass); }