@Override public CompletableFuture<Collection<JobStatusMessage>> listJobs() { return sendRequest(JobsOverviewHeaders.getInstance()) .thenApply( (multipleJobsDetails) -> multipleJobsDetails .getJobs() .stream() .map(detail -> new JobStatusMessage( detail.getJobId(), detail.getJobName(), detail.getStatus(), detail.getStartTime())) .collect(Collectors.toList())); }
@Override public MultipleJobsDetails process(RequestJobDetails message) { JobDetails running = new JobDetails(new JobID(), "job1", 0, 0, 0, JobStatus.RUNNING, 0, new int[9], 0); JobDetails finished = new JobDetails(new JobID(), "job2", 0, 0, 0, JobStatus.FINISHED, 0, new int[9], 0); return new MultipleJobsDetails(Arrays.asList(running, finished)); } }
/** * This method replicates the JSON response that would be given by the JobsOverviewHandler when * listing both running and finished jobs. * * <p>Every job archive contains a joboverview.json file containing the same structure. Since jobs are archived on * their own however the list of finished jobs only contains a single job. * * <p>For the display in the HistoryServer WebFrontend we have to combine these overviews. */ private static void updateJobOverview(File webOverviewDir, File webDir) { try (JsonGenerator gen = jacksonFactory.createGenerator(HistoryServer.createOrGetFile(webDir, JobsOverviewHeaders.URL))) { File[] overviews = new File(webOverviewDir.getPath()).listFiles(); if (overviews != null) { Collection<JobDetails> allJobs = new ArrayList<>(overviews.length); for (File overview : overviews) { MultipleJobsDetails subJobs = mapper.readValue(overview, MultipleJobsDetails.class); allJobs.addAll(subJobs.getJobs()); } mapper.writeValue(gen, new MultipleJobsDetails(allJobs)); } } catch (IOException ioe) { LOG.error("Failed to update job overview.", ioe); } } }
private void waitUntilAllSlotsAreUsed(DispatcherGateway dispatcherGateway, Time timeout) throws ExecutionException, InterruptedException { FutureUtils.retrySuccessfulWithDelay( () -> dispatcherGateway.requestClusterOverview(timeout), Time.milliseconds(50L), Deadline.fromNow(Duration.ofMillis(timeout.toMilliseconds())), clusterOverview -> clusterOverview.getNumTaskManagersConnected() >= 1 && clusterOverview.getNumSlotsAvailable() == 0 && clusterOverview.getNumSlotsTotal() == 2, TestingUtils.defaultScheduledExecutor()) .get(); }
public JobsWithIDsOverview(JobsWithIDsOverview first, JobsWithIDsOverview second) { this.jobsRunningOrPending = combine(first.getJobsRunningOrPending(), second.getJobsRunningOrPending()); this.jobsFinished = combine(first.getJobsFinished(), second.getJobsFinished()); this.jobsCancelled = combine(first.getJobsCancelled(), second.getJobsCancelled()); this.jobsFailed = combine(first.getJobsFailed(), second.getJobsFailed()); }
MultipleJobsDetails overview = mapper.readValue(response, MultipleJobsDetails.class); Assert.assertEquals(numJobs + 1, overview.getJobs().size()); } finally { hs.stop();
@Override public int hashCode() { int result = super.hashCode(); result = 31 * result + numTaskManagersConnected; result = 31 * result + numSlotsTotal; result = 31 * result + numSlotsAvailable; return result; }
/** * Combines the given jobs overview with this. * * @param jobsOverview to combine with this * @return Combined jobs overview */ public JobsOverview combine(JobsOverview jobsOverview) { return new JobsOverview(this, jobsOverview); }
public JobIdsWithStatusOverview(JobIdsWithStatusOverview first, JobIdsWithStatusOverview second) { this.jobsWithStatus = combine(first.getJobsWithStatus(), second.getJobsWithStatus()); }
private void waitForTaskManagers(int numberOfTaskManagers, DispatcherGateway dispatcherGateway, FiniteDuration timeLeft) throws ExecutionException, InterruptedException { FutureUtils.retrySuccessfulWithDelay( () -> dispatcherGateway.requestClusterOverview(Time.milliseconds(timeLeft.toMillis())), Time.milliseconds(50L), org.apache.flink.api.common.time.Deadline.fromNow(Duration.ofMillis(timeLeft.toMillis())), clusterOverview -> clusterOverview.getNumTaskManagersConnected() >= numberOfTaskManagers, new ScheduledExecutorServiceAdapter(Executors.newSingleThreadScheduledExecutor())) .get(); }
/** * Lists the currently running and finished jobs on the cluster. * * @return future collection of running and finished jobs * @throws Exception if no connection to the cluster could be established */ public CompletableFuture<Collection<JobStatusMessage>> listJobs() throws Exception { final ActorGateway jobManager = getJobManagerGateway(); Future<Object> response = jobManager.ask(new RequestJobDetails(true, false), timeout); CompletableFuture<Object> responseFuture = FutureUtils.toJava(response); return responseFuture.thenApply((responseMessage) -> { if (responseMessage instanceof MultipleJobsDetails) { MultipleJobsDetails details = (MultipleJobsDetails) responseMessage; final Collection<JobDetails> jobDetails = details.getJobs(); Collection<JobStatusMessage> flattenedDetails = new ArrayList<>(jobDetails.size()); jobDetails.forEach(detail -> flattenedDetails.add(new JobStatusMessage(detail.getJobId(), detail.getJobName(), detail.getStatus(), detail.getStartTime()))); return flattenedDetails; } else { throw new CompletionException( new IllegalStateException("Unknown JobManager response of type " + responseMessage.getClass())); } }); }
@Override protected CompletableFuture<MultipleJobsDetails> handleRequest(@Nonnull HandlerRequest<EmptyRequestBody, EmptyMessageParameters> request, @Nonnull DispatcherGateway gateway) throws RestHandlerException { JobDetails running = new JobDetails(new JobID(), "job1", 0, 0, 0, JobStatus.RUNNING, 0, new int[9], 0); JobDetails finished = new JobDetails(new JobID(), "job2", 0, 0, 0, JobStatus.FINISHED, 0, new int[9], 0); return CompletableFuture.completedFuture(new MultipleJobsDetails(Arrays.asList(running, finished))); } }
@Override public int hashCode() { int result = super.hashCode(); result = 31 * result + numTaskManagersConnected; result = 31 * result + numSlotsTotal; result = 31 * result + numSlotsAvailable; return result; }
/** * Combines the given jobs overview with this. * * @param jobsOverview to combine with this * @return Combined jobs overview */ public JobsOverview combine(JobsOverview jobsOverview) { return new JobsOverview(this, jobsOverview); }
tasksPerState[ExecutionState.FAILED.ordinal()] = failed; JobDetails jobDetails = new JobDetails(jobId, name, startTime, endTime, duration, state, lastMod, tasksPerState, numTasks); MultipleJobsDetails multipleJobsDetails = new MultipleJobsDetails(Collections.singleton(jobDetails));
@Override public int hashCode() { int result = super.hashCode(); result = 31 * result + numTaskManagersConnected; result = 31 * result + numSlotsTotal; result = 31 * result + numSlotsAvailable; return result; }
/** * Combines the given jobs overview with this. * * @param jobsOverview to combine with this * @return Combined jobs overview */ public JobsOverview combine(JobsOverview jobsOverview) { return new JobsOverview(this, jobsOverview); }
@Override public int hashCode() { int result = super.hashCode(); result = 31 * result + numTaskManagersConnected; result = 31 * result + numSlotsTotal; result = 31 * result + numSlotsAvailable; return result; }
@Override public JobsOverview getStoredJobsOverview() { return new JobsOverview(0, numFinishedJobs, numCanceledJobs, numFailedJobs); }
@Override public JobsOverview getStoredJobsOverview() { return new JobsOverview(0, numFinishedJobs, numCanceledJobs, numFailedJobs); }