private Map<Partition, List<Module>> getModulesByRootDirAndSpace(final OrganizationalUnit ou, final Repository legacyRepository) { final Map<Partition, List<Module>> modulesByDirectory = new HashMap<>(); legacyRepository.getBranches() .stream() .flatMap(branch -> moduleService.getAllModules(branch).stream()) .forEach(module -> { final String fullURI = pathUtil.normalizePath(module.getRootPath()).toURI(); final String branchlessPath = fullURI.replaceFirst("^[A-Za-z]+://([^@]+@)?", ""); final Partition partition = new Partition(branchlessPath, ou); final List<Module> modules = modulesByDirectory.computeIfAbsent(partition, ignore -> new ArrayList<>()); modules.add(module); }); return modulesByDirectory; }
private Optional<Module> getModuleIfPomHasChanges(final Path pomPath, final POM pom) { POM load = load(pomPath); if (!load.equals(pom)) { return Optional.of(moduleService.resolveModule(pomPath)); } else { return Optional.empty(); } }
protected boolean isProjectResourceUpdateNeeded(Path resource) { return moduleService.isPom(resource); }
final Module module = moduleService.resolveModule(resource); final Package pkg = moduleService.resolvePackage(resource); if (module != null && pkg != null) { if (!projectBatchChanges.containsKey(module)) {
@Test public void newProjectValidated() throws Exception { doReturn(module).when(moduleService).newModule(eq(repositoryRoot), eq(pom), eq(DeploymentMode.VALIDATED)); final WorkspaceProject workspaceProject = workspaceProjectService.newProject(ou, pom, DeploymentMode.VALIDATED); assertProject(workspaceProject); verify(newProjectEvent).fire(any()); }
public void updateResource(final Path resource) { //Do nothing if incremental builds are disabled if (!isIncrementalEnabled) { return; } logger.info("Incremental build request received for: " + resource.toURI() + " (updated)."); //The pom.xml cannot be processed incrementally if (isProjectResourceUpdateNeeded(resource)) { scheduleProjectResourceUpdate(resource); } else { //If resource is not within a Package it cannot be used for an incremental build final Package pkg = moduleService.resolvePackage(resource); if (pkg == null) { return; } schedulePackageResourceUpdate(resource); } }
final Module module = moduleService.resolveModule(resource); final Package pkg = moduleService.resolvePackage(resource); if (module != null && pkg != null) { if (!projectBatchChanges.containsKey(module)) {
@Test public void newProjectForced() throws Exception { doReturn(module).when(moduleService).newModule(eq(repositoryRoot), eq(pom), eq(DeploymentMode.FORCED)); final WorkspaceProject workspaceProject = workspaceProjectService.newProject(ou, pom, DeploymentMode.FORCED); assertProject(workspaceProject); verify(newProjectEvent).fire(any()); }
public void updateResource(final Path resource) { //Do nothing if incremental builds are disabled if (!isIncrementalEnabled) { return; } logger.info("Incremental build request received for: " + resource.toURI() + " (updated)."); //The pom.xml cannot be processed incrementally if (isProjectResourceUpdateNeeded(resource)) { scheduleProjectResourceUpdate(resource); } else { //If resource is not within a Package it cannot be used for an incremental build final Package pkg = moduleService.resolvePackage(resource); if (pkg == null) { return; } schedulePackageResourceUpdate(resource); } }
private Optional<Module> getModuleIfPomHasChanges(final Path pomPath, final POM pom) { POM load = load(pomPath); if (!load.equals(pom)) { return Optional.of(moduleService.resolveModule(pomPath)); } else { return Optional.empty(); } }
private Map<Partition, List<Module>> getModulesByRootDirAndSpace(final OrganizationalUnit ou, final Repository legacyRepository) { final Map<Partition, List<Module>> modulesByDirectory = new HashMap<>(); legacyRepository.getBranches() .stream() .flatMap(branch -> moduleService.getAllModules(branch).stream()) .forEach(module -> { final String fullURI = pathUtil.normalizePath(module.getRootPath()).toURI(); final String branchlessPath = fullURI.replaceFirst("^[A-Za-z]+://([^@]+@)?", ""); final Partition partition = new Partition(branchlessPath, ou); final List<Module> modules = modulesByDirectory.computeIfAbsent(partition, ignore -> new ArrayList<>()); modules.add(module); }); return modulesByDirectory; }
@Test public void newProjectValidated() throws Exception { doReturn(module).when(moduleService).newModule(eq(repositoryRoot), eq(pom), eq(DeploymentMode.VALIDATED)); final WorkspaceProject workspaceProject = workspaceProjectService.newProject(ou, pom, DeploymentMode.VALIDATED); assertProject(workspaceProject); verify(newProjectEvent).fire(any()); }
protected boolean isProjectResourceUpdateNeeded(Path resource) { return moduleService.isPom(resource); }
final Package pkg = moduleService.resolvePackage(resource); if (pkg == null) { return;
private void processBatchResourceChanges(final SessionInfo sessionInfo, final Map<Path, Collection<ResourceChange>> resourceChanges) { Module module; final Map<Module, Path> pendingNotifications = new HashMap<Module, Path>(); for (final Map.Entry<Path, Collection<ResourceChange>> pathCollectionEntry : resourceChanges.entrySet()) { //Only process Project resources module = projectService.resolveModule(pathCollectionEntry.getKey()); if (module == null) { continue; } if (!pendingNotifications.containsKey(module) && isObservableResource(pathCollectionEntry.getKey())) { pendingNotifications.put(module, pathCollectionEntry.getKey()); } else if (isPomFile(pathCollectionEntry.getKey())) { //if the pom.xml comes in the batch events set then use the pom.xml path for the cache invalidation event pendingNotifications.put(module, pathCollectionEntry.getKey()); } } for (final Map.Entry<Module, Path> pendingNotification : pendingNotifications.entrySet()) { invalidateDMOProjectCacheEvent.fire(new InvalidateDMOModuleCacheEvent(sessionInfo, pendingNotification.getKey(), pendingNotification.getValue())); } }
@Override public Collection<Module> getModules(final Space space, final String repositoryAlias, final String branchName) { checkNotNull("repositoryAlias", repositoryAlias); checkNotNull("branchName", branchName); final Repository repository = repositoryService.getRepositoryFromSpace(space, repositoryAlias); if (repository == null) { return new ArrayList<>(); } else { final Optional<Branch> branch = repository.getBranch(branchName); if (branch.isPresent()) { return moduleService.getAllModules(branch.get()); } else { return new ArrayList<>(); } } }
@Test public void newProjectForced() throws Exception { doReturn(module).when(moduleService).newModule(eq(repositoryRoot), eq(pom), eq(DeploymentMode.FORCED)); final WorkspaceProject workspaceProject = workspaceProjectService.newProject(ou, pom, DeploymentMode.FORCED); assertProject(workspaceProject); verify(newProjectEvent).fire(any()); }
@Override protected boolean isProjectResourceUpdateNeeded(Path resource) { return moduleService.isPom(resource) || kModuleService.isKModule(resource); } }
final Package pkg = moduleService.resolvePackage(resource); if (pkg == null) { return;
private void processBatchResourceChanges(final SessionInfo sessionInfo, final Map<Path, Collection<ResourceChange>> resourceChanges) { Module module; final Map<Module, Path> pendingNotifications = new HashMap<Module, Path>(); for (final Map.Entry<Path, Collection<ResourceChange>> pathCollectionEntry : resourceChanges.entrySet()) { //Only process Project resources module = projectService.resolveModule(pathCollectionEntry.getKey()); if (module == null) { continue; } if (!pendingNotifications.containsKey(module) && isObservableResource(pathCollectionEntry.getKey())) { pendingNotifications.put(module, pathCollectionEntry.getKey()); } else if (isPomFile(pathCollectionEntry.getKey())) { //if the pom.xml comes in the batch events set then use the pom.xml path for the cache invalidation event pendingNotifications.put(module, pathCollectionEntry.getKey()); } } for (final Map.Entry<Module, Path> pendingNotification : pendingNotifications.entrySet()) { invalidateDMOProjectCacheEvent.fire(new InvalidateDMOModuleCacheEvent(sessionInfo, pendingNotification.getKey(), pendingNotification.getValue())); } }