@Override public void configure(Binder binder) binder.bind(HiveConnectorId.class).toInstance(new HiveConnectorId(connectorId)); binder.bind(TypeTranslator.class).toInstance(new HiveTypeTranslator()); binder.bind(CoercionPolicy.class).to(HiveCoercionPolicy.class).in(Scopes.SINGLETON);
@Inject public SqlStandardAccessControl( HiveConnectorId connectorId, Function<HiveTransactionHandle, SemiTransactionalHiveMetastore> metastoreProvider) { this.connectorId = requireNonNull(connectorId, "connectorId is null").toString(); this.metastoreProvider = requireNonNull(metastoreProvider, "metastoreProvider is null"); }
protected final void setup(String databaseName, HiveClientConfig hiveClientConfig, ExtendedHiveMetastore hiveMetastore) HiveConnectorId connectorId = new HiveConnectorId("hive-test"); setupHive(connectorId.toString(), databaseName, hiveClientConfig.getTimeZone());
protected final void setup(String databaseName, HiveClientConfig hiveClientConfig, ExtendedHiveMetastore hiveMetastore) HiveConnectorId connectorId = new HiveConnectorId("hive-test"); setupHive(connectorId.toString(), databaseName, hiveClientConfig.getTimeZone());
public HivePartitionManager( HiveConnectorId connectorId, DateTimeZone timeZone, int maxOutstandingSplits, boolean assumeCanonicalPartitionKeys, int domainCompactionThreshold) { this.connectorId = requireNonNull(connectorId, "connectorId is null").toString(); this.timeZone = requireNonNull(timeZone, "timeZone is null"); checkArgument(maxOutstandingSplits >= 1, "maxOutstandingSplits must be at least 1"); this.assumeCanonicalPartitionKeys = assumeCanonicalPartitionKeys; checkArgument(domainCompactionThreshold >= 1, "domainCompactionThreshold must be at least 1"); this.domainCompactionThreshold = domainCompactionThreshold; }
@Override public void configure(Binder binder) binder.bind(HiveConnectorId.class).toInstance(new HiveConnectorId(connectorId));
@Inject public SqlStandardAccessControl(HiveConnectorId connectorId, HiveMetastore metastore, HiveClientConfig hiveClientConfig) { this.connectorId = requireNonNull(connectorId, "connectorId is null").toString(); this.metastore = requireNonNull(metastore, "metastore is null"); requireNonNull(hiveClientConfig, "hiveClientConfig is null"); allowDropTable = hiveClientConfig.getAllowDropTable(); allowRenameTable = hiveClientConfig.getAllowRenameTable(); }
HiveConnectorId connectorId = new HiveConnectorId("hive-test"); HiveCluster hiveCluster = new TestingHiveCluster(hiveClientConfig, host, port); ExecutorService executor = newCachedThreadPool(daemonThreadsNamed("hive-s3-%s"));
@Override public List<String> listSchemaNames(final ConnectorSession session) { final List<Database> databases = sourceDao.getByName(connectorId.toString(), false).getDatabases(); return databases.stream().map(database -> database.getName().toLowerCase(Locale.ENGLISH)) .collect(Collectors.toList()); }
HiveConnectorId connectorId = new HiveConnectorId(connectorName); HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationUpdater(hiveClientConfig));
@Override public List<SchemaTableName> listTables(final ConnectorSession session, final String schemaNameOrNull) { final ImmutableList.Builder<SchemaTableName> tableNames = ImmutableList.builder(); for (String schemaName : listSchemas(session, schemaNameOrNull)) { final Database database = databaseDao.getBySourceDatabaseName(connectorId.toString(), schemaName); if (database != null) { for (Table table : database.getTables()) { tableNames.add(new SchemaTableName(schemaName, table.getName().toLowerCase(Locale.ENGLISH))); } } } return tableNames.build(); }
@Override public ConnectorTableHandle getTableHandle(final ConnectorSession session, final SchemaTableName tableName) { return new HiveTableHandle(connectorId.toString(), tableName.getSchemaName(), tableName.getTableName()); }
@Override public void createSchema(final ConnectorSession session, final ConnectorSchemaMetadata schema) { final String schemaName = schema.getSchemaName(); Preconditions.checkNotNull(schemaName, "Schema name is null"); if (databaseDao.getBySourceDatabaseName(connectorId.toString(), schemaName) != null) { throw new SchemaAlreadyExistsException(schemaName); } final Database database = new Database(); database.setName(schemaName); database.setSource(sourceDao.getByName(connectorId.toString())); databaseDao.save(database); }
@Override public List<ConnectorTableMetadata> listTableMetadatas(final ConnectorSession session, final String schemaName, final List<String> tableNames) { final List<Table> tables = tableDao.getBySourceDatabaseTableNames(connectorId.toString(), schemaName, tableNames); return tables.stream() .map(table -> new ConnectorTableDetailMetadata(new SchemaTableName(schemaName, table.getName()), converterUtil.toColumnMetadatas(table), converterUtil.getOwner(table), converterUtil.toStorageInfo(table), null, converterUtil.toAuditInfo(table)) ) .collect(Collectors.toList()); }
private Path getTargetPath(final String schemaName, final String tableName, final SchemaTableName schemaTableName) { final String location = sourceDao.getByName(connectorId.toString()).getThriftUri(); if (Strings.isNullOrEmpty(location)) { throw new PrestoException(HiveErrorCode.HIVE_DATABASE_LOCATION_ERROR, String.format("Database '%s' location is not set", schemaName)); } final Path databasePath = new Path(location); if (!pathExists(databasePath)) { throw new PrestoException(HiveErrorCode.HIVE_DATABASE_LOCATION_ERROR, String.format("Database '%s' location does not exist: %s", schemaName, databasePath)); } if (!isDirectory(databasePath)) { throw new PrestoException(HiveErrorCode.HIVE_DATABASE_LOCATION_ERROR, String.format("Database '%s' location is not a directory: %s", schemaName, databasePath)); } // verify the target directory for the table final Path targetPath = new Path(databasePath, tableName); if (pathExists(targetPath)) { throw new PrestoException(HiveErrorCode.HIVE_PATH_ALREADY_EXISTS, String.format("Target directory for table '%s' already exists: %s", schemaTableName, targetPath)); } return targetPath; }
@Override public Integer getPartitionCount(final ConnectorTableHandle tableHandle) { final SchemaTableName schemaTableName = HiveUtil.schemaTableName(tableHandle); return partitionDao.count(connectorId.toString(), schemaTableName.getSchemaName(), schemaTableName.getTableName()); }
connectorId.toString(), schemaName, tableName,
@Override @Transactional public void deletePartitions(final ConnectorTableHandle tableHandle, final List<String> partitionIds) { final SchemaTableName schemaTableName = HiveUtil.schemaTableName(tableHandle); partitionDao .deleteByNames(connectorId.toString(), schemaTableName.getSchemaName(), schemaTableName.getTableName(), partitionIds); }
final SchemaTableName tableName = HiveUtil.schemaTableName(tableHandle); final Table table = tableDao.getBySourceDatabaseTableName(connectorId.toString(), tableName.getSchemaName(), tableName.getTableName()); if (table == null) { partitionDao.deleteByNames(connectorId.toString(), tableName.getSchemaName(), tableName.getTableName(), partitionIdsForDeletes);
@Override public Map<String, ColumnHandle> getColumnHandles(final ConnectorSession session, final ConnectorTableHandle tableHandle) { final SchemaTableName schemaTableName = HiveUtil.schemaTableName(tableHandle); final Table table = tableDao.getBySourceDatabaseTableName(connectorId.toString(), schemaTableName.getSchemaName(), schemaTableName.getTableName()); if (table == null) { throw new TableNotFoundException(schemaTableName); } final ImmutableMap.Builder<String, ColumnHandle> columnHandles = ImmutableMap.builder(); for (Field field : getFields(table)) { final String type = field.getType(); final Type prestoType = converterUtil.toType(type); final HiveType hiveType = HiveType.toHiveType(prestoType); columnHandles .put(field.getName(), new HiveColumnHandle(connectorId.toString(), field.getName(), field.getPos(), hiveType, prestoType.getTypeSignature(), field.getPos(), field.isPartitionKey())); } return columnHandles.build(); }