@POST @Path("file_preview/{path: .*}") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public JobDataFragment previewFormatSettings(FileFormat fileFormat, @PathParam("path") String path) throws FileNotFoundException, SourceNotFoundException { FilePath filePath = FilePath.fromURLPath(homeName, path); logger.debug("filePath: " + filePath.toPathString()); // TODO, this should be moved to dataset resource and be paginated. SqlQuery query = new SqlQuery(format("select * from table(%s (%s)) limit 500", filePath.toPathString(), fileFormat.toTableOptions()), securityContext.getUserPrincipal().getName()); JobUI job = new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.UI_INITIAL_PREVIEW) .build(), NoOpJobStatusListener.INSTANCE)); return job.getData().truncate(500); }
@POST @Path("file_preview_unsaved/{path: .*}") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public JobDataFragment previewFormatSettingsStaging(FileFormat fileFormat, @PathParam("path") String path) throws FileNotFoundException, SourceNotFoundException { FilePath filePath = FilePath.fromURLPath(homeName, path); logger.debug("filePath: " + filePath.toPathString()); // use file's location directly to query file String fileLocation = PathUtils.toDottedPath(new org.apache.hadoop.fs.Path(fileFormat.getLocation())); SqlQuery query = new SqlQuery(format("select * from table(%s.%s (%s)) limit 500", SqlUtils.quoteIdentifier(HomeFileSystemStoragePlugin.HOME_PLUGIN_NAME), fileLocation, fileFormat.toTableOptions()), securityContext.getUserPrincipal().getName()); JobUI job = new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.UI_INITIAL_PREVIEW) .build(), NoOpJobStatusListener.INSTANCE)); return job.getData().truncate(500); }
@POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public JobDataFragment query(CreateFromSQL sql) { SqlQuery query = new SqlQuery(sql.getSql(), sql.getContext(), securityContext); // Pagination is not supported in this API, so we need to truncate the results to 500 records return new JobUI(jobs.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.REST) .build(), NoOpJobStatusListener.INSTANCE)).getData().truncate(500); }
@Deprecated public JobDataFragment previewPhysicalDataset(String table, FileFormat formatOptions) { SqlQuery query = new SqlQuery(format("select * from table(%s (%s))", table, formatOptions.toTableOptions()), null, context.getUserPrincipal().getName()); // We still need to truncate the results to 500 as the preview physical datasets doesn't support pagination yet return new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.UI_INITIAL_PREVIEW) .build(), NoOpJobStatusListener.INSTANCE)).getData().truncate(500); } }
@POST public QueryDetails runQuery(CreateFromSQL sql) { SqlQuery query = new SqlQuery(sql.getSql(), sql.getContext(), securityContext); Job job = jobs.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.REST) .build(), NoOpJobStatusListener.INSTANCE); return new QueryDetails(job.getJobId().getId()); } }
private JobData runExternalQuery(String sql) { return new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery(sql, Arrays.asList("@" + DEFAULT_USER_NAME), DEFAULT_USER_NAME)) .build(), NoOpJobStatusListener.INSTANCE)).getData(); }
private JobDataFragment runQuery(String sql) { JobData completeJobData = new JobUI(jobs.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery(sql, Collections.singletonList("cp"), DEFAULT_USERNAME)) .build(), NoOpJobStatusListener.INSTANCE)).getData(); return completeJobData.truncate(500); }
protected void setSystemOption(String optionName, String optionValue) { final String query = String.format("ALTER SYSTEM SET \"%s\"=%s", optionName, optionValue); final Job job = getJobsService().submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery(query, DEFAULT_USERNAME)) .setQueryType(QueryType.UI_INTERNAL_RUN) .setDatasetPath(DatasetPath.NONE.toNamespaceKey()) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); }
public static void runQuery(String name, int rows, int columns, FolderPath parent) { FilePath filePath; if (parent == null) { filePath = new FilePath(ImmutableList.of(HomeName.getUserHomePath(DEFAULT_USER_NAME).getName(), name)); } else { List<String> path = Lists.newArrayList(parent.toPathList()); path.add(name); filePath = new FilePath(path); } final JobsService jobsService = l(JobsService.class); JobUI job = new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery(format("select * from %s", filePath.toPathString()), DEFAULT_USER_NAME)) .build(), NoOpJobStatusListener.INSTANCE)); JobDataFragment truncData = job.getData().truncate(rows + 1); assertEquals(rows, truncData.getReturnedRowCount()); assertEquals(columns, truncData.getColumns().size()); }
@Test public void testDsg2Internal() { Job job = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery("select * from DG.dsg2", SampleDataPopulator.DEFAULT_USER_NAME)) .setQueryType(QueryType.UI_INTERNAL_PREVIEW) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); // internal jobs don't get counted assertEquals((int) jobsCount.get(dsg2), jobsService.getJobsCount(dsg2.toNamespaceKey())); assertEquals((int) jobsCount.get(sample2), jobsService.getJobsCount(sample2.toNamespaceKey())); }
@Test public void testDsg1Unknown() { Job job = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery("select * from DG.dsg1", SampleDataPopulator.DEFAULT_USER_NAME)) .setQueryType(QueryType.UNKNOWN) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); // unkown jobs are not counted assertEquals((int)jobsCount.get(dsg1), jobsService.getJobsCount(dsg1.toNamespaceKey())); assertEquals((int) jobsCount.get(sample1), jobsService.getJobsCount(sample1.toNamespaceKey())); }
@Test public void testQueryOnParquetDirWithSingleFile() throws Exception { final JobsService jobsService = l(JobsService.class); JobUI job = new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(createQuery("/singlefile_parquet_dir")) .build(), NoOpJobStatusListener.INSTANCE)); JobDataFragment jobData = job.getData().truncate(500); assertEquals(25, jobData.getReturnedRowCount()); assertEquals(4, jobData.getColumns().size()); }
@Test public void testQueryOnParquetDirWithMetadata() throws Exception { final JobsService jobsService = l(JobsService.class); JobUI job = new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(createQuery("/nation_ctas")) .build(), NoOpJobStatusListener.INSTANCE)); JobDataFragment jobData = job.getData().truncate(500); assertEquals(50, jobData.getReturnedRowCount()); // extra column for "dir" (t1 and t2 are directories under nation_ctas) assertEquals(5, jobData.getColumns().size()); }
@Test public void testJobPhysicalDatasetParentTableau() throws Exception { populateInitialData(); final JobsService jobsService = l(JobsService.class); Job job = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery("select * from \"LocalFS1\".\"dac-sample1.json\"", USERNAME)) .setQueryType(QueryType.UI_RUN) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); assertEquals(1, job.getJobAttempt().getInfo().getParentsList().size()); assertEquals(Arrays.asList("LocalFS1", "dac-sample1.json"), job.getJobAttempt().getInfo().getParentsList().get(0).getDatasetPathList()); @SuppressWarnings("unused") JobUI job1 = expectSuccess(getBuilder(getAPIv2().path("job/" + job.getJobId().getId())).buildGet(), JobUI.class); assertEquals(1, job.getJobAttempt().getInfo().getParentsList().size()); assertEquals(Arrays.asList("LocalFS1", "dac-sample1.json"), job.getJobAttempt().getInfo().getParentsList().get(0).getDatasetPathList()); }
@Test public void testQueryOnFile() throws Exception { final JobsService jobService = l(JobsService.class); Job job = jobService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery("SELECT * FROM dachdfs_test.dir1.json.\"users.json\"", SampleDataPopulator.DEFAULT_USER_NAME)) .build(), NoOpJobStatusListener.INSTANCE); JobDataFragment jobData = job.getData().truncate(500); assertEquals(3, jobData.getReturnedRowCount()); assertEquals(2, jobData.getSchema().getFieldCount()); } }
@Test public void testQueryTinyAcqWithHeader() throws Exception { final JobsService jobsService = l(JobsService.class); TextFileConfig fileConfig = new TextFileConfig(); fileConfig.setFieldDelimiter("|"); fileConfig.setLineDelimiter("\n"); fileConfig.setName("tinyacq.txt"); fileConfig.setExtractHeader(true); String fileUrlPath = getUrlPath("/datasets/tinyacq.txt"); expectSuccess(getBuilder(getAPIv2().path("/source/dacfs_test/file_format/" + fileUrlPath)).buildPut(Entity.json(fileConfig))); JobUI job = new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(createQuery("/datasets/tinyacq.txt")) .build(), NoOpJobStatusListener.INSTANCE)); JobDataFragment jobData = job.getData().truncate(500); assertEquals(23, jobData.getColumns().size()); assertEquals(500, jobData.getReturnedRowCount()); }
private static QueryProfile getQueryProfile(final String query) throws JobNotFoundException { final Job job = getJobsService().submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery(query, DEFAULT_USERNAME)) .setQueryType(QueryType.UI_INTERNAL_RUN) .setDatasetPath(DatasetPath.NONE.toNamespaceKey()) .setDatasetVersion(DatasetVersion.NONE) .build(), new NoOpJobStatusListener()); job.getData().loadIfNecessary(); return getJobsService().getProfile(job.getJobId(), 0); }
@Test public void testDsg2UI() { Job job = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery("select * from DG.dsg2", SampleDataPopulator.DEFAULT_USER_NAME)) .setQueryType(QueryType.UI_RUN) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); assertEquals(inc(dsg2), jobsService.getJobsCount(dsg2.toNamespaceKey())); assertEquals(inc(sample2), jobsService.getJobsCount(sample2.toNamespaceKey())); }
@Test public void testDsg1External() { Job job = jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(new SqlQuery("select * from DG.dsg1", SampleDataPopulator.DEFAULT_USER_NAME)) .setQueryType(QueryType.UI_RUN) .build(), NoOpJobStatusListener.INSTANCE); job.getData().loadIfNecessary(); assertEquals(inc(dsg1), jobsService.getJobsCount(dsg1.toNamespaceKey())); assertEquals(inc(sample1), jobsService.getJobsCount(sample1.toNamespaceKey())); }
private DatasetUI setupIteratorTests(String datasetName) throws Exception{ TestSpacesStoragePlugin.setup(getCurrentDremioDaemon()); DatasetUI dataset = getDataset(new DatasetPath(datasetName)); // run dataset twice. We do a run and a preview since subsequent previews won't actually rerun... getPreview(dataset); l(JobsService.class).submitJob(JobRequest.newBuilder() .setSqlQuery(getQueryFromConfig(dataset)) .setQueryType(QueryType.UI_RUN) .setDatasetPath(getDatasetPath(dataset).toNamespaceKey()) .setDatasetVersion(dataset.getDatasetVersion()) .build(), NoOpJobStatusListener.INSTANCE) .getData() .loadIfNecessary(); return dataset; }