@Override public void addInstance(String datasetTypeName, DatasetId datasetInstanceId, DatasetProperties props, @Nullable KerberosPrincipalId ownerPrincipal) throws IOException, DatasetManagementException { super.addInstance(datasetTypeName, getMappedDatasetInstance(datasetInstanceId), props, ownerPrincipal); }
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, contextConfig.getApplicationSpecification());
private void deleteLocalDatasets() { for (final Map.Entry<String, String> entry : datasetFramework.getDatasetNameMapping().entrySet()) { if (keepLocal(entry.getKey())) { continue; } final String localInstanceName = entry.getValue(); final DatasetId instanceId = new DatasetId(workflowRunId.getNamespace(), localInstanceName); LOG.debug("Deleting Workflow local dataset instance: {}", localInstanceName); try { Retries.runWithRetries(() -> datasetFramework.deleteInstance(instanceId), RetryStrategies.fixDelay(Constants.Retry.LOCAL_DATASET_OPERATION_RETRY_DELAY_SECONDS, TimeUnit.SECONDS)); } catch (Exception e) { LOG.warn("Failed to delete the Workflow local dataset instance {}", localInstanceName, e); } } }
/** * Creates a new instance based on the given {@link WorkflowProgramInfo}. */ public static NameMappedDatasetFramework createFromWorkflowProgramInfo(DatasetFramework datasetFramework, WorkflowProgramInfo info, ApplicationSpecification appSpec) { Set<String> localDatasets = appSpec.getWorkflows().get(info.getName()).getLocalDatasetSpecs().keySet(); return new NameMappedDatasetFramework(datasetFramework, localDatasets, info.getRunId().getId()); }
private void createLocalDatasets() throws IOException, DatasetManagementException { final KerberosPrincipalId principalId = ProgramRunners.getApplicationPrincipal(programOptions); for (final Map.Entry<String, String> entry : datasetFramework.getDatasetNameMapping().entrySet()) { final String localInstanceName = entry.getValue(); final DatasetId instanceId = new DatasetId(workflowRunId.getNamespace(), localInstanceName); final DatasetCreationSpec instanceSpec = workflowSpec.getLocalDatasetSpecs().get(entry.getKey()); LOG.debug("Adding Workflow local dataset instance: {}", localInstanceName); try { Retries.callWithRetries(new Retries.Callable<Void, Exception>() { @Override public Void call() throws Exception { DatasetProperties properties = addLocalDatasetProperty(instanceSpec.getProperties(), keepLocal(entry.getKey())); // we have to do this check since addInstance method can only be used when app impersonation is enabled if (principalId != null) { datasetFramework.addInstance(instanceSpec.getTypeName(), instanceId, properties, principalId); } else { datasetFramework.addInstance(instanceSpec.getTypeName(), instanceId, properties); } return null; } }, RetryStrategies.fixDelay(Constants.Retry.LOCAL_DATASET_OPERATION_RETRY_DELAY_SECONDS, TimeUnit.SECONDS)); } catch (IOException | DatasetManagementException e) { throw e; } catch (Exception e) { // this should never happen throw new IllegalStateException(e); } } }
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, contextConfig.getApplicationSpecification());
private void deleteLocalDatasets() { for (final Map.Entry<String, String> entry : datasetFramework.getDatasetNameMapping().entrySet()) { if (keepLocal(entry.getKey())) { continue; } final String localInstanceName = entry.getValue(); final DatasetId instanceId = new DatasetId(workflowRunId.getNamespace(), localInstanceName); LOG.debug("Deleting Workflow local dataset instance: {}", localInstanceName); try { Retries.runWithRetries(() -> datasetFramework.deleteInstance(instanceId), RetryStrategies.fixDelay(Constants.Retry.LOCAL_DATASET_OPERATION_RETRY_DELAY_SECONDS, TimeUnit.SECONDS)); } catch (Exception e) { LOG.warn("Failed to delete the Workflow local dataset instance {}", localInstanceName, e); } } }
/** * Creates a new instance based on the given {@link WorkflowProgramInfo}. */ public static NameMappedDatasetFramework createFromWorkflowProgramInfo(DatasetFramework datasetFramework, WorkflowProgramInfo info, ApplicationSpecification appSpec) { Set<String> localDatasets = appSpec.getWorkflows().get(info.getName()).getLocalDatasetSpecs().keySet(); return new NameMappedDatasetFramework(datasetFramework, localDatasets, info.getRunId().getId()); }
private void createLocalDatasets() throws IOException, DatasetManagementException { final KerberosPrincipalId principalId = ProgramRunners.getApplicationPrincipal(programOptions); for (final Map.Entry<String, String> entry : datasetFramework.getDatasetNameMapping().entrySet()) { final String localInstanceName = entry.getValue(); final DatasetId instanceId = new DatasetId(workflowRunId.getNamespace(), localInstanceName); final DatasetCreationSpec instanceSpec = workflowSpec.getLocalDatasetSpecs().get(entry.getKey()); LOG.debug("Adding Workflow local dataset instance: {}", localInstanceName); try { Retries.callWithRetries(new Retries.Callable<Void, Exception>() { @Override public Void call() throws Exception { DatasetProperties properties = addLocalDatasetProperty(instanceSpec.getProperties(), keepLocal(entry.getKey())); // we have to do this check since addInstance method can only be used when app impersonation is enabled if (principalId != null) { datasetFramework.addInstance(instanceSpec.getTypeName(), instanceId, properties, principalId); } else { datasetFramework.addInstance(instanceSpec.getTypeName(), instanceId, properties); } return null; } }, RetryStrategies.fixDelay(Constants.Retry.LOCAL_DATASET_OPERATION_RETRY_DELAY_SECONDS, TimeUnit.SECONDS)); } catch (IOException | DatasetManagementException e) { throw e; } catch (Exception e) { // this should never happen throw new IllegalStateException(e); } } }
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, contextConfig.getApplicationSpecification());
@Nullable @Override public <T extends Dataset> T getDataset(DatasetId datasetInstanceId, Map<String, String> arguments, @Nullable ClassLoader classLoader, DatasetClassLoaderProvider classLoaderProvider, @Nullable Iterable<? extends EntityId> owners, AccessType accessType) throws DatasetManagementException, IOException { return super.getDataset(getMappedDatasetInstance(datasetInstanceId), arguments, classLoader, classLoaderProvider, owners, accessType); }
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);
@Override public void updateInstance(DatasetId datasetInstanceId, DatasetProperties props) throws DatasetManagementException, IOException { super.updateInstance(getMappedDatasetInstance(datasetInstanceId), props); }
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);
@Nullable @Override public <T extends DatasetAdmin> T getAdmin(DatasetId datasetInstanceId, @Nullable ClassLoader classLoader) throws DatasetManagementException, IOException { return super.getAdmin(getMappedDatasetInstance(datasetInstanceId), classLoader); }
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);
@Nullable @Override public <T extends DatasetAdmin> T getAdmin(DatasetId datasetInstanceId, @Nullable ClassLoader classLoader) throws DatasetManagementException, IOException { return super.getAdmin(getMappedDatasetInstance(datasetInstanceId), classLoader); }
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);
@Nullable @Override public <T extends Dataset> T getDataset(DatasetId datasetInstanceId, Map<String, String> arguments, @Nullable ClassLoader classLoader, DatasetClassLoaderProvider classLoaderProvider, @Nullable Iterable<? extends EntityId> owners, AccessType accessType) throws DatasetManagementException, IOException { return super.getDataset(getMappedDatasetInstance(datasetInstanceId), arguments, classLoader, classLoaderProvider, owners, accessType); }
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);