public ClientInfoPropertiesResultSet() throws SQLException { super(Arrays.asList(COLUMNS), Arrays.asList(COLUMN_TYPES), null); List<FieldSchema> fieldSchemas = new ArrayList<>(COLUMNS.length); for (int i = 0; i < COLUMNS.length; ++i) { fieldSchemas.add(new FieldSchema(COLUMNS[i], COLUMN_TYPES[i], null)); } setSchema(new TableSchema(fieldSchemas)); }
TableSchema schema = new TableSchema(metastoreClient.getSchema(dbName, table.getTableName())); List<SQLPrimaryKey> primaryKeys = metastoreClient.getPrimaryKeys(new PrimaryKeysRequest(dbName, table.getTableName())); pkColNames.add(key.getColumn_name().toLowerCase()); for (ColumnDescriptor column : schema.getColumnDescriptors()) { if (columnPattern != null && !columnPattern.matcher(column.getName()).matches()) { continue;
@Override public TGetResultSetMetadataResp GetResultSetMetadata(TGetResultSetMetadataReq req) throws TException { TGetResultSetMetadataResp resp = new TGetResultSetMetadataResp(); try { TableSchema schema = cliService.getResultSetMetadata(new OperationHandle(req.getOperationHandle())); resp.setSchema(schema.toTTableSchema()); resp.setStatus(OK_STATUS); } catch (Exception e) { LOG.warn("Error getting result set metadata: ", e); resp.setStatus(HiveSQLException.toTStatus(e)); } return resp; }
public ColumnBasedSet(TableSchema schema) { descriptors = schema.toTypeDescriptors(); columns = new ArrayList<ColumnBuffer>(); for (ColumnDescriptor colDesc : schema.getColumnDescriptors()) { columns.add(new ColumnBuffer(colDesc.getType())); } }
protected String getDebugMessage(final String type, final TableSchema resultSetSchema) { StringBuilder debugMessage = new StringBuilder(); debugMessage.append("Returning "); debugMessage.append(type); debugMessage.append(" metadata: "); boolean firstColumn = true; for (ColumnDescriptor column : resultSetSchema.getColumnDescriptors()) { if (!firstColumn) { debugMessage.append(", "); } debugMessage.append(column.getName()); debugMessage.append("={}"); firstColumn = false; } return debugMessage.toString(); } }
public ColumnBasedSet(TableSchema schema) { types = schema.toTypes(); columns = new ArrayList<Column>(); for (ColumnDescriptor colDesc : schema.getColumnDescriptors()) { columns.add(new Column(colDesc.getType())); } }
TableSchema schema = new TableSchema(); List<TableFieldSchema> tableFieldSchema = new ArrayList<TableFieldSchema>(); TableFieldSchema schemaEntry = new TableFieldSchema(); schemaEntry.setName(myFirstFieldName); schemaEntry.setType("STRING"); tableFieldSchema.add(schemaEntry); schema.setFields(tableFieldSchema); Table table = new Table(); table.setSchema(schema); TableReference tableRef = new TableReference(); tableRef.setDatasetId(DATASET_ID); tableRef.setProjectId(PROJECT_ID); tableRef.setTableId(tableId); table.setTableReference(tableRef); try { bigquery.tables().insert(PROJECT_ID, DATASET_ID, table).execute(); } catch (IOException e) { }
TableSchema schema = new TableSchema(); schema.setFields(new ArrayList<TableFieldSchema>()); JacksonFactory JACKSON = new JacksonFactory(); JACKSON.createJsonParser(new FileInputStream("schema.json")) .parseArrayAndClose(schema.getFields(), TableFieldSchema.class, null); schema.setFactory(JACKSON); TableReference destTable = new TableReference(); destTable.setProjectId(projectId); destTable.setDatasetId(datasetId); destTable.setTableId(tableId); FileContent content = new FileContent("application/octet-stream", new File(csv)); Job job = new Job(); JobConfiguration config = new JobConfiguration(); JobConfigurationLoad configLoad = new JobConfigurationLoad(); configLoad.setSchema(schema); configLoad.setDestinationTable(destTable); configLoad.setEncoding("UTF-8"); configLoad.setCreateDisposition("CREATE_IF_NEEDED"); config.setLoad(configLoad); job.setConfiguration(config); Insert insert = bigquery.jobs().insert(projectId, job, content); insert.setProjectId(projectId); JobReference jobRef = insert.execute().getJobReference(); String jobId = jobRef.getJobId();
private Object getColumnValue(int columnIndex) throws SQLException { if (row == null) { throw new SQLException("No row found."); } if (row.length == 0) { throw new SQLException("RowSet does not contain any columns!"); } if (columnIndex > row.length) { throw new SQLException("Invalid columnIndex: " + columnIndex); } Type columnType = getSchema().getColumnDescriptorAt(columnIndex - 1).getType(); try { Object evaluated = evaluate(columnType, row[columnIndex - 1]); wasNull = evaluated == null; return evaluated; } catch (Exception e) { e.printStackTrace(); throw new SQLException("Unrecognized column type:" + columnType, e); } }
public RowBasedSet(TableSchema schema) { types = schema.toTypes(); rows = new RemovableList<TRow>(); }
/** * Instantiates a new hive in memory result set. * * @param hiveHandle the hive handle * @param client the client * @param closeAfterFecth the close after fecth * @throws HiveSQLException the hive sql exception */ public HiveInMemoryResultSet(OperationHandle hiveHandle, CLIServiceClient client, boolean closeAfterFecth) throws HiveSQLException { this.client = client; this.opHandle = hiveHandle; this.closeAfterFecth = closeAfterFecth; this.metadata = client.getResultSetMetadata(opHandle); this.numColumns = metadata.getColumnDescriptors().size(); this.orientation = FetchOrientation.FETCH_FIRST; }
public ColumnBasedSet(TableSchema schema) { types = schema.toTypes(); columns = new ArrayList<Column>(); for (ColumnDescriptor colDesc : schema.getColumnDescriptors()) { columns.add(new Column(colDesc.getType())); } }
public ColumnBasedSet(TableSchema schema) { descriptors = schema.toTypeDescriptors(); columns = new ArrayList<ColumnBuffer>(); for (ColumnDescriptor colDesc : schema.getColumnDescriptors()) { columns.add(new ColumnBuffer(colDesc.getType())); } }
@Test public void testGetFunctions() throws Exception { SessionHandle sessionHandle = client.openSession("tom", "password"); assertNotNull(sessionHandle); OperationHandle opHandle = client.getFunctions(sessionHandle, null, null, "*"); TableSchema schema = client.getResultSetMetadata(opHandle); ColumnDescriptor columnDesc = schema.getColumnDescriptorAt(0); assertEquals("FUNCTION_CAT", columnDesc.getName()); assertEquals(Type.STRING_TYPE, columnDesc.getType()); columnDesc = schema.getColumnDescriptorAt(1); assertEquals("FUNCTION_SCHEM", columnDesc.getName()); assertEquals(Type.STRING_TYPE, columnDesc.getType()); columnDesc = schema.getColumnDescriptorAt(2); assertEquals("FUNCTION_NAME", columnDesc.getName()); assertEquals(Type.STRING_TYPE, columnDesc.getType()); columnDesc = schema.getColumnDescriptorAt(3); assertEquals("REMARKS", columnDesc.getName()); assertEquals(Type.STRING_TYPE, columnDesc.getType()); columnDesc = schema.getColumnDescriptorAt(4); assertEquals("FUNCTION_TYPE", columnDesc.getName()); assertEquals(Type.INT_TYPE, columnDesc.getType()); columnDesc = schema.getColumnDescriptorAt(5); assertEquals("SPECIFIC_NAME", columnDesc.getName()); assertEquals(Type.STRING_TYPE, columnDesc.getType()); // Cleanup client.closeOperation(opHandle); client.closeSession(sessionHandle); }
public RowBasedSet(TableSchema schema) { types = schema.toTypes(); rows = new RemovableList<TRow>(); }
@Override public TableSchema getResultSetSchema() throws HiveSQLException { // Since compilation is always a blocking RPC call, and schema is ready after compilation, // we can return when are in the RUNNING state. assertState(Arrays.asList(OperationState.RUNNING, OperationState.FINISHED)); if (resultSchema == null) { resultSchema = new TableSchema(driver.getSchema()); } return resultSchema; }
TableSchema schema = new TableSchema(metastoreClient.getSchema(dbName, table.getTableName())); List<SQLPrimaryKey> primaryKeys = metastoreClient.getPrimaryKeys(new PrimaryKeysRequest(dbName, table.getTableName())); pkColNames.add(key.getColumn_name().toLowerCase()); for (ColumnDescriptor column : schema.getColumnDescriptors()) { if (columnPattern != null && !columnPattern.matcher(column.getName()).matches()) { continue;
/** * Instantiates a new hive in memory result set. * * @param hiveHandle the hive handle * @param client the client * @param closeAfterFecth the close after fecth * @throws HiveSQLException the hive sql exception */ public HiveInMemoryResultSet(OperationHandle hiveHandle, CLIServiceClient client, boolean closeAfterFecth) throws HiveSQLException { this.client = client; this.opHandle = hiveHandle; this.closeAfterFecth = closeAfterFecth; this.metadata = client.getResultSetMetadata(opHandle); this.numColumns = metadata.getColumnDescriptors().size(); this.orientation = FetchOrientation.FETCH_FIRST; }
public ColumnBasedSet(TableSchema schema) { types = schema.toTypes(); columns = new ArrayList<Column>(); for (ColumnDescriptor colDesc : schema.getColumnDescriptors()) { columns.add(new Column(colDesc.getType())); } }
@Override public TGetResultSetMetadataResp GetResultSetMetadata(TGetResultSetMetadataReq req) throws TException { TGetResultSetMetadataResp resp = new TGetResultSetMetadataResp(); try { TableSchema schema = cliService.getResultSetMetadata(new OperationHandle(req.getOperationHandle())); resp.setSchema(schema.toTTableSchema()); resp.setStatus(OK_STATUS); } catch (Exception e) { LOG.warn("Error getting result set metadata: ", e); resp.setStatus(HiveSQLException.toTStatus(e)); } return resp; }