@POST @Path("file_preview/{path: .*}") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public JobDataFragment previewFormatSettings(FileFormat fileFormat, @PathParam("path") String path) throws FileNotFoundException, SourceNotFoundException { FilePath filePath = FilePath.fromURLPath(homeName, path); logger.debug("filePath: " + filePath.toPathString()); // TODO, this should be moved to dataset resource and be paginated. SqlQuery query = new SqlQuery(format("select * from table(%s (%s)) limit 500", filePath.toPathString(), fileFormat.toTableOptions()), securityContext.getUserPrincipal().getName()); JobUI job = new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.UI_INITIAL_PREVIEW) .build(), NoOpJobStatusListener.INSTANCE)); return job.getData().truncate(500); }
@POST @Path("file_preview_unsaved/{path: .*}") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public JobDataFragment previewFormatSettingsStaging(FileFormat fileFormat, @PathParam("path") String path) throws FileNotFoundException, SourceNotFoundException { FilePath filePath = FilePath.fromURLPath(homeName, path); logger.debug("filePath: " + filePath.toPathString()); // use file's location directly to query file String fileLocation = PathUtils.toDottedPath(new org.apache.hadoop.fs.Path(fileFormat.getLocation())); SqlQuery query = new SqlQuery(format("select * from table(%s.%s (%s)) limit 500", SqlUtils.quoteIdentifier(HomeFileSystemStoragePlugin.HOME_PLUGIN_NAME), fileLocation, fileFormat.toTableOptions()), securityContext.getUserPrincipal().getName()); JobUI job = new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.UI_INITIAL_PREVIEW) .build(), NoOpJobStatusListener.INSTANCE)); return job.getData().truncate(500); }
/** * Get the preview response of dataset. Dataset could be a physical or virtual dataset. * @param limit Maximum number of records in initial response. * @return */ @GET @Path("preview") @Produces(APPLICATION_JSON) public InitialDataPreviewResponse preview(@QueryParam("limit") @DefaultValue("50") Integer limit) { final SqlQuery query = new SqlQuery( String.format("select * from %s", datasetPath.toPathString()), securityContext.getUserPrincipal().getName()); final JobUI job = new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.UI_PREVIEW) .build(), NoOpJobStatusListener.INSTANCE)); try { return InitialDataPreviewResponse.of(job.getData().truncate(limit)); } catch(UserException e) { throw DatasetTool.toInvalidQueryException(e, query.getSql(), ImmutableList.<String> of()); } }
@Test public void testGetJobResults() throws InterruptedException { JobsService jobs = l(JobsService.class); SqlQuery query = new SqlQuery("select * from sys.version", Collections.emptyList(), SystemUser.SYSTEM_USERNAME); Job job = jobs.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.REST) .build(), NoOpJobStatusListener.INSTANCE); String id = job.getJobId().getId(); while (true) { JobStatus status = expectSuccess(getBuilder(getPublicAPI(3).path(JOB_PATH).path(id)).buildGet(), JobStatus.class); JobState jobState = status.getJobState(); Assert.assertTrue("expected job to complete successfully", Arrays.asList(JobState.COMPLETED, JobState.RUNNING, JobState.ENQUEUED, JobState.STARTING).contains(jobState)); if (jobState == JobState.COMPLETED) { expectStatus(Response.Status.BAD_REQUEST, getBuilder(getPublicAPI(3).path(JOB_PATH).path(id).path("results").queryParam("limit", 1000)).buildGet()); break; } else { Thread.sleep(TimeUnit.MILLISECONDS.toMillis(100)); } } }
private RelNode getPlan(final String queryString) { final AtomicReference<RelNode> physical = new AtomicReference<>(null); final SqlQuery query = new SqlQuery(queryString, DEFAULT_USERNAME); final Job job = getJobsService().submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.ACCELERATOR_CREATE) .setDatasetPath(DatasetPath.NONE.toNamespaceKey()) .setDatasetVersion(DatasetVersion.NONE) .build(), new NoOpJobStatusListener() { @Override public void planRelTransform(final PlannerPhase phase, final RelNode before, final RelNode after, final long millisTaken) { if (phase == PlannerPhase.PHYSICAL) { physical.set(after); } super.planRelTransform(phase, before, after, millisTaken); } }); job.getData().truncate(1); return physical.get(); } }
@Test public void testParquetFile() throws Exception { final JobsService jobsService = l(JobsService.class); String fileUrlPath = getUrlPath("/singlefile_parquet_dir/0_0_0.parquet"); String fileParentUrlPath = getUrlPath("/singlefile_parquet_dir/"); ParquetFileConfig fileConfig = new ParquetFileConfig(); JobDataFragment data = expectSuccess(getBuilder(getAPIv2().path("/source/dacfs_test/file_preview/" + fileUrlPath)).buildPost(Entity.json(fileConfig)), JobDataFragment.class); assertEquals(25, data.getReturnedRowCount()); assertEquals(4, data.getColumns().size()); JobUI job = new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(createQuery("/singlefile_parquet_dir/0_0_0.parquet")) .setQueryType(QueryType.UI_RUN) .build(), NoOpJobStatusListener.INSTANCE)); JobDataFragment jobData = job.getData().truncate(500); assertEquals(25, jobData.getReturnedRowCount()); assertEquals(4, jobData.getColumns().size()); checkCounts(fileParentUrlPath, "0_0_0.parquet", true, 1, 0, 0); }
protected String getQueryPlan(final String query) { final AtomicReference<String> plan = new AtomicReference<>(""); final Job job = getJobsService().submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery(query, DEFAULT_USERNAME)) .setQueryType(QueryType.UI_INTERNAL_RUN) .setDatasetPath(DatasetPath.NONE.toNamespaceKey()) .build(), new NoOpJobStatusListener() { @Override public void planRelTransform(final PlannerPhase phase, final RelNode before, final RelNode after, final long millisTaken) { if (!Strings.isNullOrEmpty(plan.get())) { return; } if (phase == PlannerPhase.LOGICAL) { plan.set(RelOptUtil.dumpPlan("", after, SqlExplainFormat.TEXT, SqlExplainLevel.ALL_ATTRIBUTES)); } } }); job.getData().loadIfNecessary(); return plan.get(); }
@Test public void testDsg10External() { Job job = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery("select * from DG.dsg10", SampleDataPopulator.DEFAULT_USER_NAME)) .setQueryType(QueryType.UI_RUN) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); assertEquals(inc(dsg10), jobsService.getJobsCount(dsg10.toNamespaceKey())); assertEquals(inc(dsg9), jobsService.getJobsCount(dsg9.toNamespaceKey())); assertEquals(inc(dsg8), jobsService.getJobsCount(dsg8.toNamespaceKey())); assertEquals(inc(dsg3), jobsService.getJobsCount(dsg3.toNamespaceKey())); assertEquals(inc(dsg2), jobsService.getJobsCount(dsg2.toNamespaceKey())); assertEquals(inc(dsg4), jobsService.getJobsCount(dsg4.toNamespaceKey())); assertEquals(inc(dsg1), jobsService.getJobsCount(dsg1.toNamespaceKey())); assertEquals(inc(sample1), jobsService.getJobsCount(sample1.toNamespaceKey())); assertEquals(inc(sample2), jobsService.getJobsCount(sample2.toNamespaceKey())); assertEquals((int) jobsCount.get(dsg5), jobsService.getJobsCount(dsg5.toNamespaceKey())); assertEquals((int)jobsCount.get(dsg6), jobsService.getJobsCount(dsg6.toNamespaceKey())); assertEquals((int)jobsCount.get(dsg7), jobsService.getJobsCount(dsg7.toNamespaceKey())); }
public static Job submitRefreshJob(JobsService jobsService, NamespaceService namespaceService, ReflectionEntry entry, Materialization materialization, String sql, JobStatusListener jobStatusListener) { final SqlQuery query = new SqlQuery(sql, SYSTEM_USERNAME); NamespaceKey datasetPathList = new NamespaceKey(namespaceService.findDatasetByUUID(entry.getDatasetId()).getFullPathList()); MaterializationSummary materializationSummary = new MaterializationSummary() .setDatasetId(entry.getDatasetId()) .setReflectionId(entry.getId().getId()) .setLayoutVersion(entry.getTag()) .setMaterializationId(materialization.getId().getId()) .setReflectionName(entry.getName()) .setReflectionType(entry.getType().toString()); return jobsService.submitJob( JobRequest.newMaterializationJobBuilder(materializationSummary, new SubstitutionSettings(ImmutableList.of())) .setSqlQuery(query) .setQueryType(QueryType.ACCELERATOR_CREATE) .setDatasetPath(datasetPathList) .build(), jobStatusListener); }
@Test public void testJobPhysicalDatasetParentTableau() throws Exception { populateInitialData(); final JobsService jobsService = l(JobsService.class); Job job = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery("select * from \"LocalFS1\".\"dac-sample1.json\"", USERNAME)) .setQueryType(QueryType.UI_RUN) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); assertEquals(1, job.getJobAttempt().getInfo().getParentsList().size()); assertEquals(Arrays.asList("LocalFS1", "dac-sample1.json"), job.getJobAttempt().getInfo().getParentsList().get(0).getDatasetPathList()); @SuppressWarnings("unused") JobUI job1 = expectSuccess(getBuilder(getAPIv2().path("job/" + job.getJobId().getId())).buildGet(), JobUI.class); assertEquals(1, job.getJobAttempt().getInfo().getParentsList().size()); assertEquals(Arrays.asList("LocalFS1", "dac-sample1.json"), job.getJobAttempt().getInfo().getParentsList().get(0).getDatasetPathList()); }
private DatasetUI setupIteratorTests(String datasetName) throws Exception{ TestSpacesStoragePlugin.setup(getCurrentDremioDaemon()); DatasetUI dataset = getDataset(new DatasetPath(datasetName)); // run dataset twice. We do a run and a preview since subsequent previews won't actually rerun... getPreview(dataset); l(JobsService.class).submitJob(JobRequest.newBuilder() .setSqlQuery(getQueryFromConfig(dataset)) .setQueryType(QueryType.UI_RUN) .setDatasetPath(getDatasetPath(dataset).toNamespaceKey()) .setDatasetVersion(dataset.getDatasetVersion()) .build(), NoOpJobStatusListener.INSTANCE) .getData() .loadIfNecessary(); return dataset; }
private static QueryProfile getQueryProfile(final String query) throws JobNotFoundException { final Job job = getJobsService().submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery(query, DEFAULT_USERNAME)) .setQueryType(QueryType.UI_INTERNAL_RUN) .setDatasetPath(DatasetPath.NONE.toNamespaceKey()) .setDatasetVersion(DatasetVersion.NONE) .build(), new NoOpJobStatusListener()); job.getData().loadIfNecessary(); return getJobsService().getProfile(job.getJobId(), 0); }
@Test public void testDsg1External() { Job job = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery("select * from DG.dsg1", SampleDataPopulator.DEFAULT_USER_NAME)) .setQueryType(QueryType.UI_RUN) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); assertEquals(inc(dsg1), jobsService.getJobsCount(dsg1.toNamespaceKey())); assertEquals(inc(sample1), jobsService.getJobsCount(sample1.toNamespaceKey())); }
@Test public void testDsg1Unknown() { Job job = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery("select * from DG.dsg1", SampleDataPopulator.DEFAULT_USER_NAME)) .setQueryType(QueryType.UNKNOWN) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); // unkown jobs are not counted assertEquals((int)jobsCount.get(dsg1), jobsService.getJobsCount(dsg1.toNamespaceKey())); assertEquals((int) jobsCount.get(sample1), jobsService.getJobsCount(sample1.toNamespaceKey())); }
@Deprecated public JobDataFragment previewPhysicalDataset(String table, FileFormat formatOptions) { SqlQuery query = new SqlQuery(format("select * from table(%s (%s))", table, formatOptions.toTableOptions()), null, context.getUserPrincipal().getName()); // We still need to truncate the results to 500 as the preview physical datasets doesn't support pagination yet return new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.UI_INITIAL_PREVIEW) .build(), NoOpJobStatusListener.INSTANCE)).getData().truncate(500); } }
@Test public void testDsg2Internal() { Job job = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery("select * from DG.dsg2", SampleDataPopulator.DEFAULT_USER_NAME)) .setQueryType(QueryType.UI_INTERNAL_PREVIEW) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); // internal jobs don't get counted assertEquals((int) jobsCount.get(dsg2), jobsService.getJobsCount(dsg2.toNamespaceKey())); assertEquals((int) jobsCount.get(sample2), jobsService.getJobsCount(sample2.toNamespaceKey())); }
@Test public void testDsg2UI() { Job job = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery("select * from DG.dsg2", SampleDataPopulator.DEFAULT_USER_NAME)) .setQueryType(QueryType.UI_RUN) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); assertEquals(inc(dsg2), jobsService.getJobsCount(dsg2.toNamespaceKey())); assertEquals(inc(sample2), jobsService.getJobsCount(sample2.toNamespaceKey())); }
@POST public QueryDetails runQuery(CreateFromSQL sql) { SqlQuery query = new SqlQuery(sql.getSql(), sql.getContext(), securityContext); Job job = jobs.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.REST) .build(), NoOpJobStatusListener.INSTANCE); return new QueryDetails(job.getJobId().getId()); } }
@POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public JobDataFragment query(CreateFromSQL sql) { SqlQuery query = new SqlQuery(sql.getSql(), sql.getContext(), securityContext); // Pagination is not supported in this API, so we need to truncate the results to 500 records return new JobUI(jobs.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.REST) .build(), NoOpJobStatusListener.INSTANCE)).getData().truncate(500); }
protected void setSystemOption(String optionName, String optionValue) { final String query = String.format("ALTER SYSTEM SET \"%s\"=%s", optionName, optionValue); final Job job = getJobsService().submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery(query, DEFAULT_USERNAME)) .setQueryType(QueryType.UI_INTERNAL_RUN) .setDatasetPath(DatasetPath.NONE.toNamespaceKey()) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); }