@GET @Path("/{id}/results") public JobData getQueryResults(@PathParam("id") String id, @QueryParam("offset") @DefaultValue("0") Integer offset, @Valid @QueryParam("limit") @DefaultValue("100") Integer limit) { Preconditions.checkArgument(limit <= 500,"limit can not exceed 500 rows"); try { Job job = jobs.getJob(new JobId(id), securityContext.getUserPrincipal().getName()); if (job.getJobAttempt().getState() != JobState.COMPLETED) { throw new BadRequestException(String.format("Can not fetch details for a job that is in [%s] state.", job.getJobAttempt().getState())); } return new QueryJobResults(job, offset, limit).getData(); } catch (JobNotFoundException e) { throw new NotFoundException(String.format("Could not find a job with id [%s]", id)); } }
public long getRowCount() { return job.getJobAttempt().getDetails().getOutputRecords(); }
private JoinAnalysis getJoins(Job job) { return job.getJobAttempt().getInfo().getJoinAnalysis(); }
@Override public void recordExtraInfo(String name, byte[] bytes) { //TODO DX-10977 the reflection manager should rely on its own observer to store this information in a separate store if(job.getJobAttempt().getExtraInfoList() == null) { job.getJobAttempt().setExtraInfoList(new ArrayList<ExtraInfo>()); } job.getJobAttempt().getExtraInfoList().add(new ExtraInfo() .setData(ByteString.copyFrom(bytes)) .setName(name)); storeJob(job); super.recordExtraInfo(name, bytes); }
public static String getDownloadURL(Job job) { final JobInfo jobInfo = job.getJobAttempt().getInfo(); if (jobInfo.getQueryType() == QueryType.UI_EXPORT) { return format("/job/%s/download", job.getJobId().getId()); } return null; }
@Override public void queryStarted(UserRequest query, String user) { job.getJobAttempt().setState(ENQUEUED); job.getJobAttempt().getInfo().setRequestType(query.getRequestType()); job.getJobAttempt().getInfo().setSql(query.getSql()); job.getJobAttempt().getInfo().setDescription(query.getDescription()); storeJob(job); statusListener.jobSubmitted(jobId); if (externalListenerManager != null) { externalListenerManager.queryUpdate(job); } }
private void waitForCompletion(Job job) throws Exception { while (true) { JobState state = job.getJobAttempt().getState(); Assert.assertTrue("expected job to success successfully", Arrays.asList(JobState.RUNNING, JobState.ENQUEUED, JobState.STARTING, JobState.COMPLETED).contains(state)); if (state == JobState.COMPLETED) { break; } else { Thread.sleep(TimeUnit.MILLISECONDS.toMillis(100)); } } } }
@Override public LoggedQuery apply(Job job) { final JobInfo info = job.getJobAttempt().getInfo(); final List<String> contextList = info.getContextList(); return new LoggedQuery( job.getJobId().getId(), contextList == null ? null : contextList.toString(), info.getSql(), info.getStartTime(), info.getFinishTime(), job.getJobAttempt().getState(), job.getJobAttempt().getState() == JobState.CANCELED ? info.getCancellationInfo().getMessage() : null, info.getUser() ); } }
@Override public String toString() { final JobAttempt jobAttempt = getJobAttempt(); return format("{JobId: %s, SQL: %s, Dataset: %s, DatasetVersion: %s}", jobId.getId(), jobAttempt.getInfo().getSql(), PathUtils.constructFullPath(jobAttempt.getInfo().getDatasetPathList()), jobAttempt.getInfo().getDatasetVersion()); //todo }
@Test public void testJobParentSearch() throws Exception { Job jobA1 = createJob("A1", asList("space1", "ds1"), "v1", "A", "space1", JobState.COMPLETED, "select * from LocalFS1.\"dac-sample1.json\"", 100L, 110L, QueryType.UI_RUN); jobA1.getJobAttempt().getInfo().setFieldOriginsList(asList( new FieldOrigin("foo") .setOriginsList(asList( new Origin("foo", false) .setTableList(asList("LocalFS1", "dac-sample1.json")) )) )); jobsService.storeJob(jobA1); List<Job> jobsForParent = ImmutableList.copyOf(jobsService.getJobsForParent(new NamespaceKey(asList("LocalFS1", "dac-sample1.json")), Integer.MAX_VALUE)); assertFalse(jobsForParent.isEmpty()); }
private void createAndSaveRefresh(final JobDetails details, final RefreshDecision decision) { final boolean isFull = decision.getAccelerationSettings().getMethod() == RefreshMethod.FULL; final long updateId = isFull ? -1L : getUpdateId(job.getJobId(), job.getData()); final MaterializationMetrics metrics = ReflectionUtils.computeMetrics(job); final List<DataPartition> dataPartitions = ReflectionUtils.computeDataPartitions(job.getJobAttempt().getInfo()); final List<String> refreshPath = ReflectionUtils.getRefreshPath(job.getJobId(), job.getData(), accelerationBasePath); final Refresh refresh = ReflectionUtils.createRefresh(reflection.getId(), refreshPath, decision.getSeriesId(), decision.getSeriesOrdinal(), updateId, details, metrics, dataPartitions); logger.trace("Refresh created: {}", refresh); materializationStore.save(refresh); logger.debug("materialization {} was written to {}", ReflectionUtils.getId(materialization), PathUtils.constructFullPath(refreshPath)); }
private boolean recordHeader(OutputStream output, JobId id, User user, String submissionId) throws UserNotFoundException, IOException, JobNotFoundException { SupportHeader header = new SupportHeader(); header.setClusterInfo(getClusterInfo()); header.setJob(jobsService.get().getJob(id).getJobAttempt()); Submission submission = new Submission() .setSubmissionId(submissionId) .setDate(System.currentTimeMillis()) .setEmail(user.getEmail()) .setFirst(user.getFirstName()) .setLast(user.getLastName()); header.setSubmission(submission); // record the dremio version that was used to run the query in the header header.setDremioVersion(jobsService.get().getProfile(id, 0).getDremioVersion()); ProtostuffUtil.toJSON(output, header, SupportHeader.getSchema(), false); return true; }
@Test public void testJobPhysicalDatasetParentTableau() throws Exception { populateInitialData(); final JobsService jobsService = l(JobsService.class); Job job = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery("select * from \"LocalFS1\".\"dac-sample1.json\"", USERNAME)) .setQueryType(QueryType.UI_RUN) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); assertEquals(1, job.getJobAttempt().getInfo().getParentsList().size()); assertEquals(Arrays.asList("LocalFS1", "dac-sample1.json"), job.getJobAttempt().getInfo().getParentsList().get(0).getDatasetPathList()); @SuppressWarnings("unused") JobUI job1 = expectSuccess(getBuilder(getAPIv2().path("job/" + job.getJobId().getId())).buildGet(), JobUI.class); assertEquals(1, job.getJobAttempt().getInfo().getParentsList().size()); assertEquals(Arrays.asList("LocalFS1", "dac-sample1.json"), job.getJobAttempt().getInfo().getParentsList().get(0).getDatasetPathList()); }
@Override public void attemptCompletion(UserResult result) { try { final QueryState queryState = result.getState(); if (queryState == QueryState.COMPLETED) { detailsPopulator.attemptCompleted(result.getProfile()); JoinAnalyzer joinAnalyzer = new JoinAnalyzer(result.getProfile(), detailsPopulator.getFinalPrel()); JoinAnalysis joinAnalysis = joinAnalyzer.computeJoinAnalysis(); if (joinAnalysis != null) { job.getJobAttempt().getInfo().setJoinAnalysis(joinAnalysis); } } addAttemptToJob(job, queryState, result.getProfile()); } catch (IOException e) { exception.addException(e); } }
@Test // DX-6142 & DX-9432 public void testDownloadWithLimitInDatasetSql() throws Exception { final DatasetPath dsPath = new DatasetPath("DG.testDS"); DatasetUI ds = createDatasetFromSQLAndSave(dsPath,"select * from DG.dsg1 LIMIT 10 --- comment", asList("cp")); Job job = datasetService.prepareDownload(dsPath, ds.getDatasetVersion(), DownloadFormat.CSV, 50, SampleDataPopulator.DEFAULT_USER_NAME); job.getData().loadIfNecessary(); DownloadDataResponse downloadDataResponse = datasetService.downloadData(job.getJobAttempt().getInfo().getDownloadInfo(), SampleDataPopulator.DEFAULT_USER_NAME); final List<TestData> downloadedData = readDataCsv(downloadDataResponse.getInput()); assertEquals(10, downloadedData.size()); for (int i = 0; i < 10; ++i) { assertEquals("user" + i, downloadedData.get(i).getUser()); assertEquals(i%25, downloadedData.get(i).getAge()); assertEquals("address" + i, downloadedData.get(i).getAddress()); } }
@Test public void testSingleCompletedAttempt() throws Exception { final String attemptId = AttemptIdUtils.toString(new AttemptId()); Job job = createJob("A1", Arrays.asList("space1", "ds1"), "v1", "A", "space1", JobState.COMPLETED, "select * from LocalFS1.\"dac-sample1.json\"", 100L, 110L, QueryType.UI_RUN); job.getJobAttempt().setDetails(new JobDetails()); job.getJobAttempt().setAttemptId(attemptId); JobDetailsUI detailsUI = new JobDetailsUI(job.getJobId(), job.getJobAttempt().getDetails(), JobResource.getPaginationURL(job.getJobId()), job.getAttempts(), JobResource.getDownloadURL(job), null, null, null, true, null, null); assertEquals("", detailsUI.getAttemptsSummary()); assertEquals(1, detailsUI.getAttemptDetails().size()); AttemptDetailsUI attemptDetailsUI = detailsUI.getAttemptDetails().get(0); assertEquals("", attemptDetailsUI.getReason()); assertEquals(JobState.COMPLETED, attemptDetailsUI.getResult()); assertEquals("/profiles/" + job.getJobId().getId() + "?attempt=0", attemptDetailsUI.getProfileUrl()); }
@Test public void testSingleFailedAttempt() throws Exception { final String attemptId = AttemptIdUtils.toString(new AttemptId()); Job job = createJob("A1", Arrays.asList("space1", "ds1"), "v1", "A", "space1", JobState.FAILED, "select * from LocalFS1.\"dac-sample1.json\"", 100L, 110L, QueryType.UI_RUN); job.getJobAttempt().setDetails(new JobDetails()); job.getJobAttempt().setAttemptId(attemptId); JobDetailsUI detailsUI = new JobDetailsUI(job.getJobId(), job.getJobAttempt().getDetails(), JobResource.getPaginationURL(job.getJobId()), job.getAttempts(), JobResource.getDownloadURL(job), new JobFailureInfo("Some error message", JobFailureType.UNKNOWN, null), null, null, false, null, null); assertEquals("", detailsUI.getAttemptsSummary()); assertEquals(1, detailsUI.getAttemptDetails().size()); AttemptDetailsUI attemptDetailsUI = detailsUI.getAttemptDetails().get(0); assertEquals("", attemptDetailsUI.getReason()); assertEquals(JobState.FAILED, attemptDetailsUI.getResult()); assertEquals("/profiles/" + job.getJobId().getId() + "?attempt=0", attemptDetailsUI.getProfileUrl()); }
@Override public void resourcesScheduled(ResourceSchedulingDecisionInfo resourceSchedulingDecisionInfo) { final JobInfo jobInfo = job.getJobAttempt().getInfo(); if (resourceSchedulingDecisionInfo != null) { if (jobInfo.getResourceSchedulingInfo() == null) { jobInfo.setResourceSchedulingInfo(new ResourceSchedulingInfo()); } jobInfo.getResourceSchedulingInfo() .setQueueName(resourceSchedulingDecisionInfo.getQueueName()) .setQueueId(resourceSchedulingDecisionInfo.getQueueId()) .setResourceSchedulingStart(resourceSchedulingDecisionInfo.getSchedulingStartTimeMs()) .setResourceSchedulingEnd(resourceSchedulingDecisionInfo.getSchedulingEndTimeMs()) .setQueryCost(resourceSchedulingDecisionInfo.getResourceSchedulingProperties().getQueryCost()); storeJob(job); } }
@Test public void testDownloadCsv() throws Exception { Job job = datasetService.prepareDownload(dsg1DatasetPath, dsg1.getVersion(), DownloadFormat.CSV, -1, SampleDataPopulator.DEFAULT_USER_NAME); job.getData().loadIfNecessary(); DownloadDataResponse downloadDataResponse = datasetService.downloadData(job.getJobAttempt().getInfo().getDownloadInfo(), SampleDataPopulator.DEFAULT_USER_NAME); validateAllRows(readDataCsv(downloadDataResponse.getInput())); }
@Test public void testDownloadJson() throws Exception { Job job = datasetService.prepareDownload(dsg1DatasetPath, dsg1.getVersion(), DownloadFormat.JSON, -1, SampleDataPopulator.DEFAULT_USER_NAME); job.getData().loadIfNecessary(); DownloadDataResponse downloadDataResponse = datasetService.downloadData(job.getJobAttempt().getInfo().getDownloadInfo(), SampleDataPopulator.DEFAULT_USER_NAME); validateAllRows(readDataJson(downloadDataResponse.getInput())); }