@Override public AnalysisJobMetadata eval(InputStream in) { final JaxbJobReader jobReader = new JaxbJobReader(configuration); AnalysisJobMetadata metadata = jobReader.readMetadata(in); return metadata; } });
public AnalysisJobBuilder create(final InputStream inputStream, final SourceColumnMapping sourceColumnMapping) throws NoSuchDatastoreException { return create(inputStream, sourceColumnMapping, null); }
@Override public AnalysisJobMetadata readMetadata(final InputStream inputStream) { final JobType job = unmarshallJob(inputStream); return readMetadata(job); }
public AnalysisJobBuilder create(final InputStream inputStream, final Map<String, String> variableOverrides) throws NoSuchDatastoreException { return create(unmarshallJob(inputStream), null, variableOverrides); }
@Override protected AnalysisJob createWrappedAnalysisJob() { return analysisJobResource.read(in -> { final JaxbJobReader reader = new JaxbJobReader(getDataCleanerConfiguration()); final AnalysisJobBuilder jobBuilder = reader.create(in); return jobBuilder.toAnalysisJob(false); }); }
final JaxbJobReader reader = new JaxbJobReader(_configuration); try { final AnalysisJobBuilder ajb = reader.create(file); final AnalysisJobMetadata metadata = reader.readMetadata(file); final int result = JOptionPane .showConfirmDialog(null, e.getMessage() + "\n\nDo you wish to open this job as a template?",
public AnalysisJobBuilder create(final InputStream inputStream, final Map<String, String> variableOverrides, final Datastore datastore) { final JobType jobType = unmarshallJob(inputStream); final SourceColumnMapping sourceColumnMapping = new SourceColumnMapping(readMetadata(jobType)); sourceColumnMapping.autoMap(datastore); return create(jobType, sourceColumnMapping, variableOverrides); }
public AnalysisJobBuilder create(final JobType job, final SourceColumnMapping sourceColumnMapping, final Map<String, String> variableOverrides) throws NoSuchDatastoreException { if (job == null) { throw new IllegalArgumentException("Job cannot be null"); } if (sourceColumnMapping != null && !sourceColumnMapping.isSatisfied()) { throw new IllegalArgumentException("Source column mapping is not satisfied!"); } final Map<String, String> variables = getVariables(job); overrideVariables(variables, variableOverrides); final JobMetadataType metadata = job.getJobMetadata(); if (metadata != null) { logger.info("Job name: {}", metadata.getJobName()); logger.info("Job version: {}", metadata.getJobVersion()); logger.info("Job description: {}", metadata.getJobDescription()); logger.info("Author: {}", metadata.getAuthor()); logger.info("Created date: {}", metadata.getCreatedDate()); logger.info("Updated date: {}", metadata.getUpdatedDate()); logger.info("Job metadata properties: {}", getMetadataProperties(metadata)); } final AnalysisJobBuilder builder = new AnalysisJobBuilder(_configuration); try { return create(job, sourceColumnMapping, metadata, variables, variableOverrides, builder); } catch (final RuntimeException e) { FileHelper.safeClose(builder); throw e; } }
final List<String> sourceColumnPaths = getSourceColumnPaths(job); sourceColumnMapping = new SourceColumnMapping(sourceColumnPaths); sourceColumnMapping.autoMap(datastore); new ImmutableAnalysisJobMetadata(metadata.getJobName(), metadata.getJobVersion(), metadata.getJobDescription(), metadata.getAuthor(), convertToDate(metadata.getCreatedDate()), convertToDate(metadata.getUpdatedDate()), datastore.getName(), getSourceColumnPaths(job), getSourceColumnTypes(job), variables, getMetadataProperties(metadata)); analysisJobBuilder.setAnalysisJobMetadata(immutableAnalysisJobMetadata); } else { readSourceColumns(sourceColumnMapping, analysisJobBuilder, source); configureComponents(job, variables, variableOverrides, analysisJobBuilder, inputColumns, sourceColumnMapping);
public AnalysisJob readJob(final Map<String, String> variableOverrides) { final JaxbJobReader jobReader = new JaxbJobReader(_configuration);
@Override public AnalysisJobMetadata eval(InputStream in) { return jobReader.readMetadata(in); } });
outputStreamColumnPathName = getOutputStreamColumnPath(findSourceColumn.getName(), componentType, componentBuilder, i); } else { final Map<String, String> variables = getVariables(job); overrideVariables(variables, variableOverrides); configureComponents(job, variables, variableOverrides, outputDataStreamJobBuilder, inputColumns, sourceColumnMapping);
final ComponentBuilder componentBuilder = componentBuilders.get(unconfiguredTransformerKey); applyInputColumns(input, inputColumns, componentBuilder); throw new IllegalStateException("Transformer output column id cannot be null"); registerInputColumn(inputColumns, id, o2);
private void initializeComponentBuilder(final Map<String, String> variables, final StringConverter stringConverter, final Map<ComponentType, ComponentBuilder> componentBuilders, final ComponentType componentType, final ComponentBuilder componentBuilder, final Map<String, InputColumn<?>> inputColumns, final List<ColumnType> columnsTypes) { // shared setting of properties (except for input columns) componentBuilder.setName(componentType.getName()); applyProperties(componentBuilder, componentType.getProperties(), componentType.getMetadataProperties(), stringConverter, variables, inputColumns, columnsTypes); componentBuilders.put(componentType, componentBuilder); }
public AnalysisJobBuilder getAnalysisJobBuilder() { if (_analysisJobBuilder == null) { // set HDFS as default scheme to avoid file resources SystemProperties.setIfNotSpecified(SystemProperties.DEFAULT_RESOURCE_SCHEME, HdfsResource.SCHEME_HDFS); final DataCleanerConfiguration configuration = getConfiguration(); final JaxbJobReader jobReader = new JaxbJobReader(configuration); _analysisJobBuilder = jobReader.create(createInputStream(_analysisJobXml), _customProperties); } applyComponentIndexForKeyLookups(_analysisJobBuilder, new AtomicInteger(0)); return _analysisJobBuilder; }
public AnalysisJobBuilder create(final InputStream inputStream, final SourceColumnMapping sourceColumnMapping, final Map<String, String> variableOverrides) throws NoSuchDatastoreException { return create(unmarshallJob(inputStream), sourceColumnMapping, variableOverrides); }
public AnalysisJobMetadata readMetadata(final File file) { InputStream inputStream = null; try { inputStream = new BufferedInputStream(new FileInputStream(file)); return readMetadata(inputStream); } catch (final FileNotFoundException e) { throw new IllegalArgumentException(e); } finally { FileHelper.safeClose(inputStream); } }
public AnalysisJob readJob(final HttpServletRequest request) throws IOException { final JaxbJobReader reader = new JaxbJobReader(_configuration); final String jobDefinition = request.getParameter(HttpClusterManager.HTTP_PARAM_JOB_DEF); final InputStream inputStream; if (jobDefinition == null) { // backwards compatibility node inputStream = request.getInputStream(); } else { inputStream = new ByteArrayInputStream(jobDefinition.getBytes()); } try { final AnalysisJobBuilder jobBuilder = reader.create(inputStream); if (_jobInterceptor != null) { _jobInterceptor.intercept(jobBuilder, _configuration); } return jobBuilder.toAnalysisJob(); } finally { FileHelper.safeClose(inputStream); } }
@Override public AnalysisJobMetadata eval(InputStream in) { final JaxbJobReader jobReader = new JaxbJobReader(configuration); AnalysisJobMetadata metadata = jobReader.readMetadata(in); return metadata; } });
public AnalysisJobBuilder create(final JobType job) { return create(job, null, null); }