public boolean hasResults() { return resultsStore != null && resultsStore.jobOutputDirectoryExists(jobId); }
@Override public RecordBatches load(int offset, int limit) { return loadJobData(jobId, store.get(jobId), offset, limit); }
public JobData getData(){ Preconditions.checkState(data != null || resultsStore != null, "not available from deserialized Job"); if (data != null) { return data; } return resultsStore.get(jobId); }
final Path jobOutputDir = getJobOutputDir(jobId); if (!dfs.isDirectory(jobOutputDir)) { throw UserException.dataReadError() final List<ArrowFileMetadata> resultMetadata = getLastAttempt(job).getResultMetadataList();
/** Helper method to get the job output directory */ private Path getJobOutputDir(final JobId jobId) { List<String> outputTablePath = getOutputTablePath(jobId); return new Path(jobStoreLocation, Iterables.getLast(outputTablePath)); }
@Override public String getJobResultsTable() { return getJobResultsTableName(jobId); } }
@VisibleForTesting public boolean jobOutputDirectoryExists(JobId jobId) { final Path jobOutputDir = getJobOutputDir(jobId); try { return dfs.exists(jobOutputDir); } catch (IOException e) { return false; } }
private void cleanupJobs(long cutOffTime) { //iterate through the job results and cleanup. final FindByCondition condition = getOldJobsCondition(cutOffTime).setPageSize(MAX_NUMBER_JOBS_TO_FETCH); for (Entry<JobId, JobResult> entry : store.find(condition)) { jobResultsStore.cleanup(entry.getKey()); } } }
/** * Get the output table path for the given id */ private List<String> getOutputTablePath(final JobId jobId) { // Get the information from the store or fallback to using job id as the table name Optional<JobResult> jobResult = Optional.ofNullable(store.get(jobId)); return jobResult .map(result -> getLastAttempt(result).getOutputTableList()) .orElse(Arrays.asList(storageName, jobId.toString())); }
this.jobResultsStore = new JobResultsStore(fileSystemPlugin, store, allocator);
private void setupJobData() { final JobLoader jobLoader = isInternal ? new InternalJobLoader(exception, completionLatch, job.getJobId(), jobResultsStore, store) : new ExternalJobLoader(completionLatch, exception); final JobData result = jobResultsStore.cacheNewJob(job.getJobId(), new JobDataImpl(jobLoader, job.getJobId())); job.setData(result); }
public String getJobResultsTableName(JobId jobId) { // return String.format("TABLE(%s(type => 'arrow'))", PathUtils.constructFullPath(getOutputTablePath(jobId))); }
@Override public String getJobResultsTable() { return jobResultsStore.getJobResultsTableName(id); } }
public boolean cleanup(JobId jobId) { final Path jobOutputDir = getJobOutputDir(jobId); try { if (dfs.exists(jobOutputDir)) { dfs.delete(jobOutputDir, true); logger.info("Deleted job output directory : " + jobOutputDir); } return true; } catch (IOException e) { logger.warn("Could not delete job output directory : " + jobOutputDir, e); return false; } }
@Override public RecordBatches load(int offset, int limit) { try { completionLatch.await(); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); exception.addException(ex); } exception.throwNoClearRuntime(); return jobResultsStore.loadJobData(id, store.get(id), offset, limit); }
@Test public void testJobCleanup() throws Exception { jobsService = (LocalJobsService) l(JobsService.class); SqlQuery ctas = getQueryFromSQL("SHOW SCHEMAS"); Job job = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(ctas) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); SabotContext context = l(SabotContext.class); OptionValue days = OptionValue.createLong(OptionType.SYSTEM, ExecConstants.RESULTS_MAX_AGE_IN_DAYS.getOptionName(), 0); context.getOptionManager().setOption(days); OptionValue millis = OptionValue.createLong(OptionType.SYSTEM, ExecConstants.DEBUG_RESULTS_MAX_AGE_IN_MILLISECONDS.getOptionName(), 10); context.getOptionManager().setOption(millis); Thread.sleep(20); LocalJobsService.CleanupTask cleanupTask = jobsService.new CleanupTask(); cleanupTask.cleanup(); //make sure that the job output directory is gone assertFalse(jobsService.getJobResultsStore().jobOutputDirectoryExists(job.getJobId())); job = jobsService.getJob(job.getJobId()); assertFalse(JobDetailsUI.of(job).getResultsAvailable()); context.getOptionManager().setOption(OptionValue.createLong(OptionType.SYSTEM, ExecConstants.RESULTS_MAX_AGE_IN_DAYS.getOptionName(), 30)); context.getOptionManager().setOption(OptionValue.createLong(OptionType.SYSTEM, ExecConstants.DEBUG_RESULTS_MAX_AGE_IN_MILLISECONDS.getOptionName(), 0)); }
@Test public void testCancelBeforeLoadingJob() throws Exception { exception.expect(new UserExceptionMatcher(UserBitShared.DremioPBError.ErrorType.DATA_READ, "Could not load results as the query was canceled")); final JobResultsStore jobResultsStore = mock(JobResultsStore.class); JobResult jobResult = new JobResult(); JobAttempt jobAttempt = new JobAttempt(); jobAttempt.setState(JobState.CANCELED); List<JobAttempt> attempts = new ArrayList<JobAttempt>(); attempts.add(jobAttempt); jobResult.setAttemptsList(attempts); when(jobResultsStore.loadJobData(new JobId("Canceled Job"),jobResult,0,0)).thenCallRealMethod(); jobResultsStore.loadJobData(new JobId("Canceled Job"),jobResult,0,0); } }