public SqlQuery cloneWithNewSql(String sql) { return new SqlQuery(sql, context, username); }
/** * Get the preview response of dataset. Dataset could be a physical or virtual dataset. * @param limit Maximum number of records in initial response. * @return */ @GET @Path("preview") @Produces(APPLICATION_JSON) public InitialDataPreviewResponse preview(@QueryParam("limit") @DefaultValue("50") Integer limit) { final SqlQuery query = new SqlQuery( String.format("select * from %s", datasetPath.toPathString()), securityContext.getUserPrincipal().getName()); final JobUI job = new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(query) .setQueryType(QueryType.UI_PREVIEW) .build(), NoOpJobStatusListener.INSTANCE)); try { return InitialDataPreviewResponse.of(job.getData().truncate(limit)); } catch(UserException e) { throw DatasetTool.toInvalidQueryException(e, query.getSql(), ImmutableList.<String> of()); } }
@Override protected QueryMetadata getMetadata(SqlQuery query) { this.job = executor.runQueryWithListener(query, queryType, path, newVersion, collector); try { this.metadata = collector.getMetadata(); } catch (UserException e) { // If the original query fails, let the user knows about throw DatasetTool.toInvalidQueryException(e, query.getSql(), query.getContext(), null); } // If above QueryExecutor finds the query in the job store, QueryMetadata will never be set. // In this case, regenerate QueryMetadata below. if (this.metadata == null) { this.metadata = QueryParser.extract(query, context); } return metadata; }
QueryMetadata.Builder builder = QueryMetadata.builder(sabotContext.getNamespaceService(query.getUsername())); AttemptObserver observer = new MetadataCollectingObserver(builder); final SqlNode sqlNode = parseQueryInternal(converter, query.getSql()); final SqlHandlerConfig config = new SqlHandlerConfig(context, converter, observer, null); NormalHandler handler = new NormalHandler(); PhysicalPlan pp = handler.getPlan(config, query.getSql(), sqlNode); builder.addBatchSchema(pp.getRoot().getSchema(sabotContext.getFunctionImplementationRegistry())); return builder.build(); throw SqlExceptionHelper.validationError(query.getSql(), e) .build(logger); } catch (AccessControlException e) {
@Override public JobUI answer(InvocationOnMock invocation) throws Throwable { String query = invocation.getArgumentAt(0, SqlQuery.class).getSql(); Job job = mock(Job.class); JobData jobData = mock(JobData.class); when(job.getData()).thenReturn(jobData); if ("SELECT * FROM dataset".equals(query)) { when(jobData.getJobResultsTable()).thenReturn("jobResults.previewJob"); } else if (query.contains("jobResults.previewJob")) { if (expFilter != null) { assertTrue(query, query.contains(expFilter)); } else { assertFalse(query, query.contains("WHERE")); } JobDataFragment fragment = mock(JobDataFragment.class); when(jobData.truncate(1)).thenReturn(fragment); when(fragment.getSchema()).thenReturn( BatchSchema.newBuilder() .addField(new Field("dremio_selection_count", true, new ArrowType.Int(64, true), null)) .build() ); when(fragment.extractValue("dremio_selection_count", 0)).thenReturn(expCount); } return new JobUI(job); } }
username = MoreObjects.firstNonNull(username, sqlQuery.getUsername());
private SqlConverter getNewConverter(QueryContext context, SqlQuery query, AttemptObserver observerForSubstitution) { Catalog catalog = context.getCatalog(); final List<String> sqlContext = query.getContext(); if(sqlContext != null){ NamespaceKey path = new NamespaceKey(sqlContext); try { catalog = catalog.resolveCatalog(path); } catch (Exception e) { throw UserException.validationError(e) .message("Unable to resolve schema path [%s]. Failure resolving [%s] portion of path.", sqlContext, path) .build(logger); } } return new SqlConverter( context.getPlannerSettings(), context.getOperatorTable(), context, context.getMaterializationProvider(), context.getFunctionRegistry(), context.getSession(), observerForSubstitution, catalog, context.getSubstitutionProviderFactory(), context.getConfig(), context.getScanResult()); }
datasetQuery.cloneWithNewSql(sb.toString()), QueryType.UI_INTERNAL_RUN, datasetPath, version).getData(); final JobDataFragment dataFragment = completeJobData.truncate(1);
jobsService.getJobsForDataset(datasetPath.toNamespaceKey(), null, query.getUsername(), MAX_JOBS_TO_SEARCH) : jobsService.getJobsForDataset(datasetPath.toNamespaceKey(), version, query.getUsername(), MAX_JOBS_TO_SEARCH); for (Job job : jobsForDataset) { if (job.getJobAttempt().getInfo().getQueryType() == queryType && query.getSql().equals(job.getJobAttempt().getInfo().getSql()) && job.getJobAttempt().getState() == JobState.COMPLETED && job.hasResults()) {
private QueryContext newQueryContext(SqlQuery query) { try (TimedBlock b = time("initParser")) { QueryId queryId = QueryId.newBuilder().setPart1(ID_MAJOR).setPart2(ID_MINOR.incrementAndGet()).build(); UserSession session = UserSession.Builder.newBuilder() .withCredentials(UserCredentials.newBuilder() .setUserName(query.getUsername()) .build()) .withUserProperties(UserProperties.getDefaultInstance()) .withOptionManager(sabotContext.getOptionManager()) .build(); return new QueryContext(session, sabotContext, queryId); } }
JobData completeJobData = executor.runQuery(datasetQuery.cloneWithNewSql(sb.toString()), QueryType.UI_INTERNAL_RUN, datasetPath, version).getData();
protected SqlQuery getQueryFromSQL(String sql) { return new SqlQuery(sql, DEFAULT_USERNAME); }
InitialRunResponse newUntitledAndRun(FromBase from, DatasetVersion version, List<String> context) throws DatasetNotFoundException, NamespaceException, DatasetVersionNotFoundException, InterruptedException { final VirtualDatasetUI newDataset = createNewUntitledMetadataOnly(from, version, context); final SqlQuery query = new SqlQuery(newDataset.getSql(), newDataset.getState().getContextList(), username()); newDataset.setLastTransform(new Transform(TransformType.createFromParent).setTransformCreateFromParent(new TransformCreateFromParent(from.wrap()))); MetadataCollectingJobStatusListener listener = new MetadataCollectingJobStatusListener(); try { final JobUI job = executor.runQueryWithListener(query, QueryType.UI_RUN, TMP_DATASET_PATH, version, listener); final QueryMetadata queryMetadata = listener.getMetadata(); applyQueryMetaToDatasetAndSave(queryMetadata, newDataset, query, from); return createRunResponse(newDataset, job, newDataset.getVersion()); } catch(UserException e) { throw toInvalidQueryException(e, query.getSql(), context); } }
public VirtualDatasetState instanceExtract(SqlQuery query, QueryMetadata metadata) { RelDataType relDataType = metadata.getRowType(); VirtualDatasetState state; try { state = extract(query.getSql(), metadata.getSqlNode().get(), relDataType); } catch (RuntimeException e) { state = fallback("Error parsing", null, query.getSql(), e); } if (state.getColumnsList() == null || state.getColumnsList().isEmpty()) { populateSemanticFields(relDataType, state); } Optional<List<String>> referredTables = metadata.getReferredTables(); if (referredTables.isPresent()) { state.setReferredTablesList(referredTables.get()); } state.setContextList(query.getContext()); return state; }
prelimValuesQueryBuilder.append(format(" FROM %s AS dremio_values_table\n", datasetPreviewJob.getData().getJobResultsTable())); final SqlQuery prelimQuery = datasetQuery.cloneWithNewSql(prelimValuesQueryBuilder.toString()); final SqlQuery hgQuery = datasetQuery.cloneWithNewSql(hgQueryBuilder.toString());
final String endTime = DateUtility.formatTimeStampMilli.print(end + POST_TIME_BUFFER_MS); final SqlQuery query = new SqlQuery( String.format(LOG_QUERY, SqlUtils.quoteIdentifier(submissionId), startTime, endTime, "%" + id.getId() + "%"), Arrays.asList(LOGS_STORAGE_PLUGIN), userId);
final SqlQuery query = new SqlQuery(newDataset.getSql(), newDataset.getState().getContextList(), username()); toInvalidQueryException((UserException) ex, query.getSql(), context, parentSummary); parentDataset, query.getSql(), context, newDataset(newDataset, null).getDatasetType(), parentSummary), ex);
JobInfo asJobInfo(final JobId jobId, final String inSpace) { final JobInfo jobInfo = new JobInfo(jobId, sqlQuery.getSql(), datasetVersion, queryType) .setSpace(inSpace) .setUser(username) .setStartTime(System.currentTimeMillis()) .setDatasetPathList(datasetPathComponents) .setResultMetadataList(new ArrayList<ArrowFileMetadata>()) .setContextList(sqlQuery.getContext()); if (requestType == RequestType.MATERIALIZATION) { jobInfo.setMaterializationFor(materializationSummary); } if (requestType == RequestType.DOWNLOAD) { jobInfo.setDownloadInfo(new DownloadInfo() .setDownloadId(downloadId) .setFileName(fileName)); } return jobInfo; }
JobUI countJob = executor.runQuery(datasetSql.cloneWithNewSql(countQuery), QueryType.UI_INTERNAL_RUN, datasetPath, version); JobDataFragment countJobData = countJob.getData().truncate(1); JobUI exGenQueryJob = executor.runQuery(datasetSql.cloneWithNewSql(exGenQuery), QueryType.UI_INTERNAL_RUN, datasetPath, version); List<List<CardExample>> cardsExamples = getExamples(exGenQueryJob, transformRuleWrappers);
private static SqlQuery createQuery(String file) throws IOException { return new SqlQuery(format("select * from %s", getSchemaPath(file)), DEFAULT_USERNAME); }