public ClientInfoPropertiesResultSet() throws SQLException { super(Arrays.asList(COLUMNS), Arrays.asList(COLUMN_TYPES), null); List<FieldSchema> fieldSchemas = new ArrayList<>(COLUMNS.length); for (int i = 0; i < COLUMNS.length; ++i) { fieldSchemas.add(new FieldSchema(COLUMNS[i], COLUMN_TYPES[i], null)); } setSchema(new TableSchema(fieldSchemas)); }
@Override public TableSchema getResultSetSchema() throws HiveSQLException { // Since compilation is always a blocking RPC call, and schema is ready after compilation, // we can return when are in the RUNNING state. assertState(Arrays.asList(OperationState.RUNNING, OperationState.FINISHED)); if (resultSchema == null) { resultSchema = new TableSchema(driver.getSchema()); } return resultSchema; }
setSchema(new TableSchema(schema));
if (schema != null) { setHasResultSet(true); resultSchema = new TableSchema(schema); } else { setHasResultSet(false); resultSchema = new TableSchema();
@Override public TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException { try { TGetResultSetMetadataReq req = new TGetResultSetMetadataReq(opHandle.toTOperationHandle()); TGetResultSetMetadataResp resp = cliService.GetResultSetMetadata(req); checkStatus(resp.getStatus()); return new TableSchema(resp.getSchema()); } catch (HiveSQLException e) { throw e; } catch (Exception e) { throw new HiveSQLException(e); } }
public RowSet getOperationLogRowSet(OperationHandle opHandle, FetchOrientation orientation, long maxRows, HiveConf hConf) throws HiveSQLException { TableSchema tableSchema = new TableSchema(getLogSchema()); RowSet rowSet = RowSetFactory.create(tableSchema, getOperation(opHandle).getProtocolVersion(), false); if (hConf.getBoolVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED) == false) { LOG.warn("Try to get operation log when hive.server2.logging.operation.enabled is false, no log will be returned. "); return rowSet; } // get the OperationLog object from the operation OperationLog operationLog = getOperation(opHandle).getOperationLog(); if (operationLog == null) { throw new HiveSQLException("Couldn't find log associated with operation handle: " + opHandle); } // read logs List<String> logs; try { logs = operationLog.readOperationLog(isFetchFirst(orientation), maxRows); } catch (SQLException e) { throw new HiveSQLException(e.getMessage(), e.getCause()); } // convert logs to RowSet for (String log : logs) { rowSet.addRow(new String[] { log }); } return rowSet; }
TableSchema schema = new TableSchema(metastoreClient.getSchema(dbName, table.getTableName())); List<SQLPrimaryKey> primaryKeys = metastoreClient.getPrimaryKeys(new PrimaryKeysRequest(dbName, table.getTableName()));
TableSchema schema = new TableSchema(); List<TableFieldSchema> tableFieldSchema = new ArrayList<TableFieldSchema>(); TableFieldSchema schemaEntry = new TableFieldSchema(); schemaEntry.setName(myFirstFieldName); schemaEntry.setType("STRING"); tableFieldSchema.add(schemaEntry); schema.setFields(tableFieldSchema); Table table = new Table(); table.setSchema(schema); TableReference tableRef = new TableReference(); tableRef.setDatasetId(DATASET_ID); tableRef.setProjectId(PROJECT_ID); tableRef.setTableId(tableId); table.setTableReference(tableRef); try { bigquery.tables().insert(PROJECT_ID, DATASET_ID, table).execute(); } catch (IOException e) { }
public ClientInfoPropertiesResultSet() throws SQLException { super(Arrays.asList(COLUMNS), Arrays.asList(COLUMN_TYPES), null); List<FieldSchema> fieldSchemas = new ArrayList<>(COLUMNS.length); for (int i = 0; i < COLUMNS.length; ++i) { fieldSchemas.add(new FieldSchema(COLUMNS[i], COLUMN_TYPES[i], null)); } setSchema(new TableSchema(fieldSchemas)); }
@Override public TableSchema getResultSetSchema() throws HiveSQLException { // Since compilation is always a blocking RPC call, and schema is ready after compilation, // we can return when are in the RUNNING state. assertState(Arrays.asList(OperationState.RUNNING, OperationState.FINISHED)); if (resultSchema == null) { resultSchema = new TableSchema(driver.getSchema()); } return resultSchema; }
@Override public TableSchema getResultSetSchema() throws HiveSQLException { assertState(OperationState.FINISHED); if (resultSchema == null) { resultSchema = new TableSchema(driver.getSchema()); } return resultSchema; }
@Override public TableSchema getResultSetSchema() throws HiveSQLException { assertState(OperationState.FINISHED); if (resultSchema == null) { resultSchema = new TableSchema(driver.getSchema()); } return resultSchema; }
@Override public TableSchema getResultSetSchema() throws HiveSQLException { assertState(OperationState.FINISHED); if (resultSchema == null) { resultSchema = new TableSchema(driver.getSchema()); } return resultSchema; }
@Override public TableSchema getResultSetSchema() throws HiveSQLException { assertState(OperationState.FINISHED); if (resultSchema == null) { resultSchema = new TableSchema(driver.getSchema()); } return resultSchema; }
@Override public TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException { try { TGetResultSetMetadataReq req = new TGetResultSetMetadataReq(opHandle.toTOperationHandle()); TGetResultSetMetadataResp resp = cliService.GetResultSetMetadata(req); checkStatus(resp.getStatus()); return new TableSchema(resp.getSchema()); } catch (HiveSQLException e) { throw e; } catch (Exception e) { throw new HiveSQLException(e); } }
@Override public TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException { try { TGetResultSetMetadataReq req = new TGetResultSetMetadataReq(opHandle.toTOperationHandle()); TGetResultSetMetadataResp resp = cliService.GetResultSetMetadata(req); checkStatus(resp.getStatus()); return new TableSchema(resp.getSchema()); } catch (HiveSQLException e) { throw e; } catch (Exception e) { throw new HiveSQLException(e); } }
@Override public TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException { try { TGetResultSetMetadataReq req = new TGetResultSetMetadataReq(opHandle.toTOperationHandle()); TGetResultSetMetadataResp resp = cliService.GetResultSetMetadata(req); checkStatus(resp.getStatus()); return new TableSchema(resp.getSchema()); } catch (HiveSQLException e) { throw e; } catch (Exception e) { throw new HiveSQLException(e); } }
@Override public TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException { try { TGetResultSetMetadataReq req = new TGetResultSetMetadataReq(opHandle.toTOperationHandle()); TGetResultSetMetadataResp resp = cliService.GetResultSetMetadata(req); checkStatus(resp.getStatus()); return new TableSchema(resp.getSchema()); } catch (HiveSQLException e) { throw e; } catch (Exception e) { throw new HiveSQLException(e); } }
@Override public TableSchema getResultSetMetadata(OperationHandle opHandle) throws HiveSQLException { try { TGetResultSetMetadataReq req = new TGetResultSetMetadataReq(opHandle.toTOperationHandle()); TGetResultSetMetadataResp resp = cliService.GetResultSetMetadata(req); checkStatus(resp.getStatus()); return new TableSchema(resp.getSchema()); } catch (HiveSQLException e) { throw e; } catch (Exception e) { throw new HiveSQLException(e); } }
public RowSet getOperationLogRowSet(OperationHandle opHandle, FetchOrientation orientation, long maxRows) throws HiveSQLException { // get the OperationLog object from the operation OperationLog operationLog = getOperation(opHandle).getOperationLog(); if (operationLog == null) { throw new HiveSQLException("Couldn't find log associated with operation handle: " + opHandle); } // read logs List<String> logs; try { logs = operationLog.readOperationLog(isFetchFirst(orientation), maxRows); } catch (SQLException e) { throw new HiveSQLException(e.getMessage(), e.getCause()); } // convert logs to RowSet TableSchema tableSchema = new TableSchema(getLogSchema()); RowSet rowSet = RowSetFactory.create(tableSchema, getOperation(opHandle).getProtocolVersion()); for (String log : logs) { rowSet.addRow(new String[] {log}); } return rowSet; }