@Timed( value = "service.index", description = "Timing information for the index service.", histogram = true) public Void executeJob(Progress progress, String transactionId) { requireNonNull(progress); IndexActionGroup indexActionGroup = dataService.findOneById(INDEX_ACTION_GROUP, transactionId, IndexActionGroup.class); if (indexActionGroup != null && indexActionGroup.getCount() > 0) { progress.setProgressMax(indexActionGroup.getCount()); progress.status(format("Start indexing for transaction id: [{0}]", transactionId)); performIndexActions(progress, transactionId); progress.status(format("Finished indexing for transaction id: [{0}]", transactionId)); } else { progress.status(format("No index actions found for transaction id: [{0}]", transactionId)); } return null; }
private void copyPackages(List<Package> packages, CopyState state) { if (!packages.isEmpty()) { state.progress().status(contextMessageSource.getMessage("progress-copy-packages")); packageCopier.copy(packages, state); } }
private void copyEntityTypes(List<EntityType> entityTypes, CopyState state) { if (!entityTypes.isEmpty() || !state.entityTypesInPackages().isEmpty()) { state.progress().status(contextMessageSource.getMessage("progress-copy-entity-types")); entityTypeCopier.copy(entityTypes, state); } }
/** Package-private for testability */ long applyMappingToRepo( EntityMapping sourceMapping, Repository<Entity> targetRepo, Progress progress, int depth) { progress.status(format("Mapping source [%s]...", sourceMapping.getLabel())); AtomicLong counter = new AtomicLong(); boolean canAdd = targetRepo.count() == 0; dataService .getRepository(sourceMapping.getName()) .forEachBatched( entities -> processBatch(sourceMapping, targetRepo, progress, counter, canAdd, entities, depth), MAPPING_BATCH_SIZE); progress.status(format("Mapped %s [%s] entities.", counter, sourceMapping.getLabel())); return counter.get(); }
@Override public Job<List<EntityType>> createJob( OneClickImportJobExecution oneClickImportJobExecution) { final String filename = oneClickImportJobExecution.getFile(); return (Progress progress) -> { List<EntityType> entityTypes = oneClickImportJob.getEntityType(progress, filename); oneClickImportJobExecution.setEntityTypes(createJsonResponse(entityTypes)); String packageId = entityTypes.get(0).getPackage().getId(); oneClickImportJobExecution.setPackage(packageId); String labels = entityTypes.stream().map(EntityType::getLabel).collect(Collectors.joining(",")); progress.status(format("Created table(s): %s", labels)); return entityTypes; }; } };
private long applyMappingsInternal( MappingTarget mappingTarget, Repository<Entity> targetRepo, Progress progress, int depth) { progress.status("Applying mappings to repository [" + targetRepo.getEntityType().getId() + "]"); long result = applyMappingsToRepositories(mappingTarget, targetRepo, progress, depth); if (hasSelfReferences(targetRepo.getEntityType())) { progress.status( "Self reference found, applying the mapping for a second time to set references"); applyMappingsToRepositories(mappingTarget, targetRepo, progress, depth); } progress.status( "Done applying mappings to repository [" + targetRepo.getEntityType().getId() + "]"); return result; }
@Override public Void delete(List<ResourceIdentifier> resources, Progress progress) { progress.status(contextMessageSource.getMessage("progress-delete-started")); Set<Object> packageIds = new LinkedHashSet<>(); Set<Object> entityTypeIds = new LinkedHashSet<>(); resources.forEach( resource -> { switch (resource.getType()) { case PACKAGE: packageIds.add(resource.getId()); break; case ENTITY_TYPE: case ENTITY_TYPE_ABSTRACT: entityTypeIds.add(resource.getId()); break; default: throw new UnexpectedEnumException(resource.getType()); } }); if (!entityTypeIds.isEmpty()) { deleteEntityTypes(entityTypeIds); } if (!packageIds.isEmpty()) { deletePackages(packageIds); } progress.status(contextMessageSource.getMessage("progress-delete-success")); return null; }
findExtensionFromPossibilities(filename, newHashSet("csv", "xlsx", "zip", "xls")); progress.status("Preparing import"); List<DataCollection> dataCollections = newArrayList(); if (fileExtension == null) { dataCollections.forEach( dataCollection -> { progress.status( "Importing [" + dataCollection.getName() + "] into package [" + packageName + "]"); entityTypes.add(entityService.createEntityType(dataCollection, packageName));
importService.doImport( repositoryCollection, MetadataAction.UPSERT, DataAction.ADD_UPDATE_EXISTING, null); progress.status("Download and import from Amazon Bucket done."); progress.progress( 3,
() -> { long maxCount = dataService.count(inputRepositoryName, new QueryImpl<>()); progress.status( "Matching " + maxCount
public FileMeta download( List<ResourceIdentifier> resourceIdentifiers, String filename, Progress progress) { FileMeta fileMeta; try { ResourceCollection resourceCollection = resourceCollector.get(resourceIdentifiers); File emxFile = fileStore.getFile(filename); fileMeta = createFileMeta(emxFile); dataService.add(FileMetaMetaData.FILE_META, fileMeta); emxExportService.export( resourceCollection.getEntityTypes(), resourceCollection.getPackages(), emxFile.toPath(), progress); progress.increment(1); progress.status(getMessage("progress-download-success", "Finished preparing download.")); } catch (RuntimeException exception) { throw new DownloadFailedException(exception); } return fileMeta; }
private void copyResources(ResourceCollection resourceCollection, CopyState state) { Progress progress = state.progress(); progress.setProgressMax(calculateMaxProgress(resourceCollection)); progress.progress(0, contextMessageSource.getMessage("progress-copy-started")); copyPackages(resourceCollection.getPackages(), state); copyEntityTypes(resourceCollection.getEntityTypes(), state); progress.status(contextMessageSource.getMessage("progress-copy-success")); }
/** * Imports a csv file defined in the fileIngest entity * * @see FileIngestJobExecutionMetaData */ public FileMeta ingest( String entityTypeId, String url, String loader, String jobExecutionID, Progress progress) { if (!"CSV".equals(loader)) { throw new FileIngestException("Unknown loader '" + loader + "'"); } progress.setProgressMax(2); progress.progress(0, "Downloading url '" + url + "'"); File file = fileStoreDownload.downloadFile(url, jobExecutionID, entityTypeId + ".csv"); progress.progress(1, "Importing..."); FileRepositoryCollection repoCollection = fileRepositoryCollectionFactory.createFileRepositoryCollection(file); ImportService importService = importServiceFactory.getImportService(file, repoCollection); EntityImportReport report = importService.doImport(repoCollection, MetadataAction.UPSERT, ADD_UPDATE_EXISTING, null); progress.status("Ingestion of url '" + url + "' done."); Integer count = report.getNrImportedEntitiesMap().get(entityTypeId); count = count != null ? count : 0; progress.progress(2, "Successfully imported " + count + " " + entityTypeId + " entities."); FileMeta fileMeta = createFileMeta(jobExecutionID, file); FileIngestJobExecution fileIngestJobExecution = (FileIngestJobExecution) progress.getJobExecution(); fileIngestJobExecution.setFile(fileMeta); dataService.add(FILE_META, fileMeta); return fileMeta; }