public VirtualDatasetState instanceExtract(SqlQuery query, QueryMetadata metadata) { RelDataType relDataType = metadata.getRowType(); VirtualDatasetState state; try { state = extract(query.getSql(), metadata.getSqlNode().get(), relDataType); } catch (RuntimeException e) { state = fallback("Error parsing", null, query.getSql(), e); } if (state.getColumnsList() == null || state.getColumnsList().isEmpty()) { populateSemanticFields(relDataType, state); } Optional<List<String>> referredTables = metadata.getReferredTables(); if (referredTables.isPresent()) { state.setReferredTablesList(referredTables.get()); } state.setContextList(query.getContext()); return state; }
/** * Get the preview response of dataset. Dataset could be a physical or virtual dataset. * @param limit Maximum number of records in initial response. * @return */ @GET @Path("preview") @Produces(APPLICATION_JSON) public InitialDataPreviewResponse preview(@QueryParam("limit") @DefaultValue("50") Integer limit) { final SqlQuery query = new SqlQuery( String.format("select * from %s", datasetPath.toPathString()), securityContext.getUserPrincipal().getName()); final JobUI job = new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.UI_PREVIEW) .build(), NoOpJobStatusListener.INSTANCE)); try { return InitialDataPreviewResponse.of(job.getData().truncate(limit)); } catch(UserException e) { throw DatasetTool.toInvalidQueryException(e, query.getSql(), ImmutableList.<String> of()); } }
@Override protected QueryMetadata getMetadata(SqlQuery query) { this.job = executor.runQueryWithListener(query, queryType, path, newVersion, collector); try { this.metadata = collector.getMetadata(); } catch (UserException e) { // If the original query fails, let the user knows about throw DatasetTool.toInvalidQueryException(e, query.getSql(), query.getContext(), null); } // If above QueryExecutor finds the query in the job store, QueryMetadata will never be set. // In this case, regenerate QueryMetadata below. if (this.metadata == null) { this.metadata = QueryParser.extract(query, context); } return metadata; }
@Override public JobUI answer(InvocationOnMock invocation) throws Throwable { String query = invocation.getArgumentAt(0, SqlQuery.class).getSql(); Job job = mock(Job.class); JobData jobData = mock(JobData.class); when(job.getData()).thenReturn(jobData); if ("SELECT * FROM dataset".equals(query)) { when(jobData.getJobResultsTable()).thenReturn("jobResults.previewJob"); } else if (query.contains("jobResults.previewJob")) { if (expFilter != null) { assertTrue(query, query.contains(expFilter)); } else { assertFalse(query, query.contains("WHERE")); } JobDataFragment fragment = mock(JobDataFragment.class); when(jobData.truncate(1)).thenReturn(fragment); when(fragment.getSchema()).thenReturn( BatchSchema.newBuilder() .addField(new Field("dremio_selection_count", true, new ArrowType.Int(64, true), null)) .build() ); when(fragment.extractValue("dremio_selection_count", 0)).thenReturn(expCount); } return new JobUI(job); } }
JobInfo asJobInfo(final JobId jobId, final String inSpace) { final JobInfo jobInfo = new JobInfo(jobId, sqlQuery.getSql(), datasetVersion, queryType) .setSpace(inSpace) .setUser(username) .setStartTime(System.currentTimeMillis()) .setDatasetPathList(datasetPathComponents) .setResultMetadataList(new ArrayList<ArrowFileMetadata>()) .setContextList(sqlQuery.getContext()); if (requestType == RequestType.MATERIALIZATION) { jobInfo.setMaterializationFor(materializationSummary); } if (requestType == RequestType.DOWNLOAD) { jobInfo.setDownloadInfo(new DownloadInfo() .setDownloadId(downloadId) .setFileName(fileName)); } return jobInfo; }
final SqlNode sqlNode = parseQueryInternal(converter, query.getSql()); final SqlHandlerConfig config = new SqlHandlerConfig(context, converter, observer, null); NormalHandler handler = new NormalHandler(); PhysicalPlan pp = handler.getPlan(config, query.getSql(), sqlNode); builder.addBatchSchema(pp.getRoot().getSchema(sabotContext.getFunctionImplementationRegistry())); return builder.build(); throw SqlExceptionHelper.validationError(query.getSql(), e) .build(logger); } catch (AccessControlException e) {
for (Job job : jobsForDataset) { if (job.getJobAttempt().getInfo().getQueryType() == queryType && query.getSql().equals(job.getJobAttempt().getInfo().getSql()) && job.getJobAttempt().getState() == JobState.COMPLETED && job.hasResults()) {
InitialRunResponse newUntitledAndRun(FromBase from, DatasetVersion version, List<String> context) throws DatasetNotFoundException, NamespaceException, DatasetVersionNotFoundException, InterruptedException { final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context); final SqlQuery query = new SqlQuery(newDataset.getSql(), newDataset.getState().getContextList(), username()); newDataset.setLastTransform(new Transform(TransformType.createFromParent).setTransformCreateFromParent(new TransformCreateFromParent(from.wrap()))); MetadataCollectingJobStatusListener listener = new MetadataCollectingJobStatusListener(); try { final JobUI job = executor.runQueryWithListener(query, QueryType.UI_RUN, TMP_DATASET_PATH, version, listener); final QueryMetadata queryMetadata = listener.getMetadata(); applyQueryMetaToDatasetAndSave(queryMetadata, newDataset, query, from); return createRunResponse(newDataset, job, newDataset.getVersion()); } catch(UserException e) { throw toInvalidQueryException(e, query.getSql(), context); } }
if (isPrepare) { queryRequest = CreatePreparedStatementReq.newBuilder() .setSqlQuery(jobRequest.getSqlQuery().getSql()) .build(); } else { .setType(UserBitShared.QueryType.SQL) .setSource(SubmissionSource.LOCAL) .setPlan(jobRequest.getSqlQuery().getSql()) .setPriority(QueryPriority.newBuilder() .setWorkloadClass(workloadClass)