@Timed( value = "service.index", description = "Timing information for the index service.", histogram = true) public Void executeJob(Progress progress, String transactionId) { requireNonNull(progress); IndexActionGroup indexActionGroup = dataService.findOneById(INDEX_ACTION_GROUP, transactionId, IndexActionGroup.class); if (indexActionGroup != null && indexActionGroup.getCount() > 0) { progress.setProgressMax(indexActionGroup.getCount()); progress.status(format("Start indexing for transaction id: [{0}]", transactionId)); performIndexActions(progress, transactionId); progress.status(format("Finished indexing for transaction id: [{0}]", transactionId)); } else { progress.status(format("No index actions found for transaction id: [{0}]", transactionId)); } return null; }
@Override @Transactional public long applyMappings( String mappingProjectId, String entityTypeId, Boolean addSourceAttribute, String packageId, String label, Progress progress) { MappingProject mappingProject = getMappingProject(mappingProjectId); MappingTarget mappingTarget = mappingProject.getMappingTargets().get(0); progress.setProgressMax(calculateMaxProgress(mappingTarget)); progress.progress(0, format("Checking target repository [%s]...", entityTypeId)); EntityType targetMetadata = createTargetMetadata(mappingTarget, entityTypeId, packageId, label, addSourceAttribute); Repository<Entity> targetRepo = getTargetRepository(entityTypeId, targetMetadata); return applyMappingsInternal(mappingTarget, targetRepo, progress, mappingProject.getDepth()); }
FileMeta fileMeta; try { progress.setProgressMax(3); progress.progress(0, "Connection to Amazon Bucket with accessKey '" + accessKey + "'"); AmazonS3 client = amazonBucketClient.getClient(accessKey, secretKey, region);
+ resultRepositoryName); progress.setProgressMax((int) maxCount);
private void copyResources(ResourceCollection resourceCollection, CopyState state) { Progress progress = state.progress(); progress.setProgressMax(calculateMaxProgress(resourceCollection)); progress.progress(0, contextMessageSource.getMessage("progress-copy-started")); copyPackages(resourceCollection.getPackages(), state); copyEntityTypes(resourceCollection.getEntityTypes(), state); progress.status(contextMessageSource.getMessage("progress-copy-success")); }
/** * Imports a csv file defined in the fileIngest entity * * @see FileIngestJobExecutionMetaData */ public FileMeta ingest( String entityTypeId, String url, String loader, String jobExecutionID, Progress progress) { if (!"CSV".equals(loader)) { throw new FileIngestException("Unknown loader '" + loader + "'"); } progress.setProgressMax(2); progress.progress(0, "Downloading url '" + url + "'"); File file = fileStoreDownload.downloadFile(url, jobExecutionID, entityTypeId + ".csv"); progress.progress(1, "Importing..."); FileRepositoryCollection repoCollection = fileRepositoryCollectionFactory.createFileRepositoryCollection(file); ImportService importService = importServiceFactory.getImportService(file, repoCollection); EntityImportReport report = importService.doImport(repoCollection, MetadataAction.UPSERT, ADD_UPDATE_EXISTING, null); progress.status("Ingestion of url '" + url + "' done."); Integer count = report.getNrImportedEntitiesMap().get(entityTypeId); count = count != null ? count : 0; progress.progress(2, "Successfully imported " + count + " " + entityTypeId + " entities."); FileMeta fileMeta = createFileMeta(jobExecutionID, file); FileIngestJobExecution fileIngestJobExecution = (FileIngestJobExecution) progress.getJobExecution(); fileIngestJobExecution.setFile(fileMeta); dataService.add(FILE_META, fileMeta); return fileMeta; }