@Override public void recordExtraInfo(String name, byte[] bytes) { //TODO DX-10977 the reflection manager should rely on its own observer to store this information in a separate store if(job.getJobAttempt().getExtraInfoList() == null) { job.getJobAttempt().setExtraInfoList(new ArrayList<ExtraInfo>()); } job.getJobAttempt().getExtraInfoList().add(new ExtraInfo() .setData(ByteString.copyFrom(bytes)) .setName(name)); storeJob(job); super.recordExtraInfo(name, bytes); }
@Test // DX-5119 Index unquoted dataset names along with quoted ones. // TODO (Amit H): DX-1563 We should be using analyzer to match both rather than indexing twice. public void testUnquotedJobFilter() throws Exception { Job jobA1 = createJob("A1", Arrays.asList("Prod-Sample", "ds-1"), "v1", "A", "Prod-Sample", JobState.COMPLETED, "select * from LocalFS1.\"dac-sample1.json\"", 100L, 110L, QueryType.UI_RUN); jobsService.storeJob(jobA1); List<Job> jobs = getAllJobs("ads==Prod-Sample.ds-1", null, null); assertEquals(1, jobs.size()); jobs = getAllJobs("ds==Prod-Sample.ds-1", null, null); assertEquals(0, jobs.size()); }
storeJob(job); statusListener.metadataCollected(metadata); }catch(Exception ex){
@Test public void testJobParentSearch() throws Exception { Job jobA1 = createJob("A1", asList("space1", "ds1"), "v1", "A", "space1", JobState.COMPLETED, "select * from LocalFS1.\"dac-sample1.json\"", 100L, 110L, QueryType.UI_RUN); jobA1.getJobAttempt().getInfo().setFieldOriginsList(asList( new FieldOrigin("foo") .setOriginsList(asList( new Origin("foo", false) .setTableList(asList("LocalFS1", "dac-sample1.json")) )) )); jobsService.storeJob(jobA1); List<Job> jobsForParent = ImmutableList.copyOf(jobsService.getJobsForParent(new NamespaceKey(asList("LocalFS1", "dac-sample1.json")), Integer.MAX_VALUE)); assertFalse(jobsForParent.isEmpty()); }
@Override public QueryObserver createNewQueryObserver(ExternalId id, UserSession session, UserResponseHandler handler) { final JobId jobId = JobsServiceUtil.getExternalIdAsJobId(id); final RpcEndpointInfos clientInfos = session.getClientInfos(); final QueryType queryType = QueryTypeUtils.getQueryType(clientInfos); final JobInfo jobInfo = new JobInfo(jobId, "UNKNOWN", "UNKNOWN", queryType) .setUser(session.getCredentials().getUserName()) .setDatasetPathList(Arrays.asList("UNKNOWN")) .setStartTime(System.currentTimeMillis()); final JobAttempt jobAttempt = new JobAttempt() .setInfo(jobInfo) .setEndpoint(identity) .setDetails(new JobDetails()) .setState(ENQUEUED); final Job job = new Job(jobId, jobAttempt); storeJob(job); QueryListener listener = new QueryListener(job, handler); runningJobs.put(jobId, listener); return listener; }
@Override public void planCompleted(final ExecutionPlan plan) { if (plan != null) { try { builder.addBatchSchema(RootSchemaFinder.getSchema(plan.getRootOperator(), contextProvider.get().getFunctionImplementationRegistry())); } catch (Exception e) { exception.addException(e); } } job.getJobAttempt().setAccelerationDetails( ByteString.copyFrom(detailsPopulator.computeAcceleration())); job.getJobAttempt().setState(STARTING); storeJob(job); if (externalListenerManager != null) { externalListenerManager.queryUpdate(job); } // plan is parallelized after physical planning is done so we need to finalize metadata here finalizeMetadata(); }
@Override public void queryStarted(UserRequest query, String user) { job.getJobAttempt().setState(ENQUEUED); job.getJobAttempt().getInfo().setRequestType(query.getRequestType()); job.getJobAttempt().getInfo().setSql(query.getSql()); job.getJobAttempt().getInfo().setDescription(query.getDescription()); storeJob(job); statusListener.jobSubmitted(jobId); if (externalListenerManager != null) { externalListenerManager.queryUpdate(job); } }
canceled += 1; jobsService.storeJob(jobA1); jobsService.storeJob(jobA2); jobsService.storeJob(jobA3); jobsService.storeJob(jobA4); jobsService.storeJob(jobA5); completed += 4; jobsService.storeJob(jobB1); jobsService.storeJob(jobB2); jobsService.storeJob(jobB3); jobsService.storeJob(jobB4); jobsService.storeJob(jobB5); jobsService.storeJob(jobC1); jobsService.storeJob(jobC2); jobsService.storeJob(jobC3); jobsService.storeJob(jobD1); completed += 1;
@Override public void resourcesScheduled(ResourceSchedulingDecisionInfo resourceSchedulingDecisionInfo) { final JobInfo jobInfo = job.getJobAttempt().getInfo(); if (resourceSchedulingDecisionInfo != null) { if (jobInfo.getResourceSchedulingInfo() == null) { jobInfo.setResourceSchedulingInfo(new ResourceSchedulingInfo()); } jobInfo.getResourceSchedulingInfo() .setQueueName(resourceSchedulingDecisionInfo.getQueueName()) .setQueueId(resourceSchedulingDecisionInfo.getQueueId()) .setResourceSchedulingStart(resourceSchedulingDecisionInfo.getSchedulingStartTimeMs()) .setResourceSchedulingEnd(resourceSchedulingDecisionInfo.getSchedulingEndTimeMs()) .setQueryCost(resourceSchedulingDecisionInfo.getResourceSchedulingProperties().getQueryCost()); storeJob(job); } }
@Override public void execStarted(QueryProfile profile) { try (TimedBlock b = Timer.time("execStarted")) { b.addID("attempt=" + attemptId); final JobInfo jobInfo = job.getJobAttempt().getInfo(); if(profile != null){ jobInfo.setStartTime(profile.getStart()); job.getJobAttempt().setState(JobState.RUNNING); final QueryProfileParser profileParser = new QueryProfileParser(jobId, profile); if (profile.getResourceSchedulingProfile() != null) { if (jobInfo.getResourceSchedulingInfo() == null) { jobInfo.setResourceSchedulingInfo(new ResourceSchedulingInfo()); } jobInfo.getResourceSchedulingInfo().setQueueName(profile.getResourceSchedulingProfile().getQueueName()); jobInfo.getResourceSchedulingInfo().setQueueId(profile.getResourceSchedulingProfile().getQueueId()); } job.getJobAttempt().setStats(profileParser.getJobStats()); job.getJobAttempt().setDetails(profileParser.getJobDetails()); storeJob(job); if (externalListenerManager != null) { externalListenerManager.queryUpdate(job); } } } catch (IOException e) { exception.addException(e); } }
jobAttempt.setDetails(profileParser.getJobDetails()); storeJob(job);