private RepositoryFile createOrUpdateFile(final RepositoryFolder folder, final String filename, final Action<OutputStream> writeAction) { RepositoryFile file = folder.getFile(filename); if (file == null) { file = folder.createFile(filename, writeAction); } else { file.writeFile(writeAction); } return file; } }
public ConfigurationCache(InjectionManagerFactory injectionManagerFactory, TenantContext tenantContext, Repository repository) { _injectionManagerFactory = injectionManagerFactory; _tenantContext = tenantContext; _repository = repository; final RepositoryFolder tenantFolder = _tenantContext.getTenantRootFolder(); RepositoryFile file = tenantFolder.getFile(DataCleanerConfigurationImpl.DEFAULT_FILENAME); if (file == null) { file = tenantFolder.createFile(DataCleanerConfigurationImpl.DEFAULT_FILENAME, new WriteDefaultTenantConfigurationAction()); } _file = file; }
final RepositoryFile resultFile = resultsFolder.createFile(filename, new Action<OutputStream>() { @Override public void run(OutputStream out) throws Exception {
folder.createFile(filename, writeCallback); } else { existingFile.writeFile(writeCallback);
@RolesAllowed(SecurityRoles.JOB_EDITOR) @RequestMapping(method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE, consumes = MediaType.MULTIPART_FORM_DATA_VALUE) @ResponseBody public Map<String, String> uploadAnalysisJobToFolderJson(@PathVariable("tenant") final String tenant, @RequestParam("file") final MultipartFile file) { if (file == null) { throw new IllegalArgumentException( "No file upload provided. Please provide a multipart file using the 'file' HTTP parameter."); } final Action<OutputStream> writeCallback = out -> { final InputStream in = file.getInputStream(); try { FileHelper.copy(in, out); } finally { FileHelper.safeClose(in); } }; final TenantContext context = _contextFactory.getContext(tenant); final RepositoryFile jobFile; final RepositoryFolder jobsFolder = context.getJobFolder(); final String filename = file.getOriginalFilename(); jobFile = jobsFolder.createFile(filename, writeCallback); logger.info("Created new job from uploaded file: {}", filename); final Map<String, String> result = new HashMap<>(); result.put("status", STATUS_SUCCESS); result.put("file_type", jobFile.getType().toString()); result.put("filename", jobFile.getName()); result.put("repository_path", jobFile.getQualifiedPath()); return result; }
public ExecutionLoggerImpl(ExecutionLog execution, RepositoryFolder resultFolder, ApplicationEventPublisher eventPublisher) { _execution = execution; _resultFolder = resultFolder; _eventPublisher = eventPublisher; _erronuous = new AtomicBoolean(false); _executionLogWriter = new JaxbExecutionLogWriter(); _log = new StringBuilder(); final String resultId = execution.getResultId(); final String logFilename = resultId + FileFilters.ANALYSIS_EXECUTION_LOG_XML.getExtension(); final RepositoryFile existingLogFile = resultFolder.getFile(logFilename); if (existingLogFile == null) { _logFile = resultFolder.createFile(logFilename, new Action<OutputStream>() { @Override public void run(OutputStream out) throws Exception { _executionLogWriter.write(_execution, out); } }); } else { _logFile = existingLogFile; } }
/** * Store configuration to repository in JSON * * @param configuration */ public void store(final ComponentStoreHolder configuration) { logger.info("Store component with id: {}", configuration.getInstanceId()); writeLock.lock(); RepositoryFile configFile = componentsFolder.getFile(configuration.getInstanceId()); if (configFile != null) { // I must delete old file. configFile.delete(); } try { componentsFolder.createFile(configuration.getInstanceId(), new Action<OutputStream>() { @Override public void run(OutputStream fileOutput) throws Exception { String jsonConf = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString( configuration); InputStream jsonConfStream = IOUtils.toInputStream(jsonConf); FileHelper.copy(jsonConfStream, fileOutput); } }); } finally { writeLock.unlock(); } }
final RepositoryFile newJobFile = jobFolder.createFile(newJobFilename, new Action<OutputStream>() { @Override public void run(final OutputStream out) throws Exception {
private void renameSchedule(final RepositoryFile oldScheduleFile, final String nameInput, final RepositoryFolder jobFolder) { final String newScheduleFilename = nameInput + SchedulingServiceImpl.EXTENSION_SCHEDULE_XML; final Action<OutputStream> writeScheduleAction = new Action<OutputStream>() { @Override public void run(final OutputStream out) throws Exception { oldScheduleFile.readFile(new Action<InputStream>() { @Override public void run(final InputStream in) throws Exception { FileHelper.copy(in, out); } }); } }; final RepositoryFile newScheduleFile = jobFolder.getFile(newScheduleFilename); if (newScheduleFile == null) { jobFolder.createFile(newScheduleFilename, writeScheduleAction); } else { newScheduleFile.writeFile(writeScheduleAction); } oldScheduleFile.delete(); } }
jobFile = jobsFolder.createFile(filename, writeCallback); } else { jobFile = existingJob.getJobFile();
final RepositoryFile file = _resultFolder.createFile(resultFilename, new NoopAction<OutputStream>()); final Resource resource = new RepositoryFileResource(file); final AnalysisResultSaveHandler analysisResultSaveHandler = new AnalysisResultSaveHandler((AnalysisResult) result, resource); _resultFolder.createFile(resultFilename, new Action<OutputStream>() { @Override public void run(OutputStream out) throws Exception {
jobFolder.createFile(newFilename, writeAction);
private void getResultFileFromCluster(TenantContext tenantContext, ExecutionLogger executionLogger, String hadoopResultFileName, String jobName) { HdfsResource resultsResource = null; try { resultsResource = new HdfsResource(HadoopUtils.getFileSystem().getUri().resolve(hadoopResultFileName) .toString()); if (resultsResource != null && resultsResource.isExists()) { final RepositoryFolder repositoryResultFolder = tenantContext.getResultFolder(); final String fileName = HadoopJobExecutionUtils.getUrlReadyJobName(jobName) + FileFilters.ANALYSIS_RESULT_SER.getExtension(); final Resource resourceFile = repositoryResultFolder.createFile(fileName, null).toResource(); logger.info("Writing the result to" + resourceFile.getQualifiedPath()); FileHelper.copy(resultsResource, resourceFile); } else { final String message = "An error has occured while running the job. The result was not persisted on Hadoop. Please check Hadoop and/or DataCleaner logs"; final Exception error = new Exception(message); executionLogger.setStatusFailed(null, null, error); } } catch (Exception e) { executionLogger.setStatusFailed(null, null, e); } } }
newFile = resultFolder.createFile(newFilename, writeAction); } else { newFile.writeFile(writeAction);
@Override public TimelineIdentifier createTimelineDefinition(final TenantIdentifier tenant, final TimelineIdentifier timelineIdentifier, final TimelineDefinition timelineDefinition) { final String name = timelineIdentifier.getName(); final DashboardGroup group = timelineIdentifier.getGroup(); final RepositoryFolder timelinesFolder = _tenantContextFactory.getContext(tenant).getTimelineFolder(); final RepositoryFolder folder; if (group == null) { folder = timelinesFolder; } else { folder = timelinesFolder.getFolder(group.getName()); } final String fileName = name + FileFilters.ANALYSIS_TIMELINE_XML.getExtension(); final RepositoryFile file = folder.createFile(fileName, new WriteTimelineAction(timelineDefinition)); logger.info("Created timeline definition in file: {}", file); return new TimelineIdentifier(timelineIdentifier.getName(), file.getQualifiedPath(), group); }
@Override public ScheduleDefinition updateSchedule(final TenantIdentifier tenant, final ScheduleDefinition scheduleDefinition) { initializeSchedule(scheduleDefinition); final String jobName = scheduleDefinition.getJob().getName(); final TenantContext context = _tenantContextFactory.getContext(tenant); final RepositoryFolder jobsFolder = context.getJobFolder(); final String filename = jobName + EXTENSION_SCHEDULE_XML; final RepositoryFile file = jobsFolder.getFile(filename); final Action<OutputStream> writeAction = new Action<OutputStream>() { @Override public void run(OutputStream out) throws Exception { JaxbScheduleWriter writer = new JaxbScheduleWriter(); writer.write(scheduleDefinition, out); } }; if (file == null) { jobsFolder.createFile(filename, writeAction); } else { file.writeFile(writeAction); } return scheduleDefinition; }
resultFolder.createFile(newFilename, writeAction); } else { newFile.writeFile(writeAction);