public TableBuilder inDb(Database db) { this.dbName = db.getName(); this.catName = db.getCatalogName(); return this; }
public FunctionBuilder inDb(Database db) { this.dbName = db.getName(); this.catName = db.getCatalogName(); return this; }
public boolean isDummy() { if (typ == Type.DATABASE) { return database.getName().equals(SemanticAnalyzer.DUMMY_DATABASE); } if (typ == Type.TABLE) { return t.isDummyTable(); } return false; }
public ISchemaBuilder inDb(Database db) { this.catName = db.getCatalogName(); this.dbName = db.getName(); return this; }
@Override public void onAlterDatabase(AlterDatabaseEvent dbEvent) throws MetaException { Database oldDb = dbEvent.getOldDatabase(); Database newDb = dbEvent.getNewDatabase(); if(!oldDb.getCatalogName().equalsIgnoreCase(newDb.getCatalogName()) || !oldDb.getName().equalsIgnoreCase(newDb.getName())) { txnHandler = getTxnHandler(); txnHandler.onRename( oldDb.getCatalogName(), oldDb.getName(), null, null, newDb.getCatalogName(), newDb.getName(), null, null); } }
@Override public DropDatabaseMessage buildDropDatabaseMessage(Database db) { return new JSONDropDatabaseMessage(HCAT_SERVER_URL, HCAT_SERVICE_PRINCIPAL, db.getName(), now()); }
@Override public void renameDatabase(String databaseName, String newDatabaseName) { org.apache.hadoop.hive.metastore.api.Database database = delegate.getDatabase(databaseName) .orElseThrow(() -> new SchemaNotFoundException(databaseName)); database.setName(newDatabaseName); delegate.alterDatabase(databaseName, database); delegate.getDatabase(databaseName).ifPresent(newDatabase -> { if (newDatabase.getName().equals(databaseName)) { throw new PrestoException(NOT_SUPPORTED, "Hive metastore does not support renaming schemas"); } }); }
HCatDatabase(Database db) { this.dbName = db.getName(); this.props = db.getParameters(); this.dbLocation = db.getLocationUri(); this.comment = db.getDescription(); }
private Task<? extends Serializable> setOwnerInfoTask(Database dbObj) { AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbObj.getName(), new PrincipalDesc(dbObj.getOwnerName(), dbObj.getOwnerType()), null); DDLWork work = new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc); return TaskFactory.get(work, context.hiveConf); }
public Path getDefaultTablePath(Database db, String tableName, boolean isExternal) throws MetaException { Path dbPath = null; if (isExternal && hasExternalWarehouseRoot()) { dbPath = getDefaultExternalDatabasePath(db.getName()); } else { dbPath = getDatabasePath(db); } return getDnsPath( new Path(dbPath, MetaStoreUtils.encodeTableName(tableName.toLowerCase()))); }
@Override public void authorize(Database db, Privilege[] inputRequiredPriv, Privilege[] outputRequiredPriv) throws HiveException, AuthorizationException { BitSetChecker checker = BitSetChecker.getBitSetChecker(inputRequiredPriv, outputRequiredPriv); boolean[] inputCheck = checker.inputCheck; boolean[] outputCheck = checker.outputCheck; authorizeUserAndDBPriv(db, inputRequiredPriv, outputRequiredPriv, inputCheck, outputCheck); checkAndThrowAuthorizationException(inputRequiredPriv, outputRequiredPriv, inputCheck, outputCheck, db.getName(), null, null, null); }
@Override public synchronized void createDatabase(Database database) { requireNonNull(database, "database is null"); File directory; if (database.getLocationUri() != null) { directory = new File(URI.create(database.getLocationUri())); } else { // use Hive default naming convention directory = new File(baseDirectory, database.getName() + ".db"); database = database.deepCopy(); database.setLocationUri(directory.toURI().toString()); } checkArgument(!directory.exists(), "Database directory already exists"); checkArgument(isParentDir(directory, baseDirectory), "Database directory must be inside of the metastore base directory"); checkArgument(directory.mkdirs(), "Could not create database directory"); if (databases.putIfAbsent(database.getName(), database) != null) { throw new SchemaAlreadyExistsException(database.getName()); } }
public static Database fromMetastoreApiDatabase(org.apache.hadoop.hive.metastore.api.Database database) { String ownerName = "PUBLIC"; PrincipalType ownerType = PrincipalType.ROLE; if (database.getOwnerName() != null) { ownerName = database.getOwnerName(); ownerType = fromMetastoreApiPrincipalType(database.getOwnerType()); } Map<String, String> parameters = database.getParameters(); if (parameters == null) { parameters = ImmutableMap.of(); } return Database.builder() .setDatabaseName(database.getName()) .setLocation(Optional.ofNullable(database.getLocationUri())) .setOwnerName(ownerName) .setOwnerType(ownerType) .setComment(Optional.ofNullable(database.getDescription())) .setParameters(parameters) .build(); }
@Test(expected = InvalidOperationException.class) public void testDropDatabaseWithFunction() throws Exception { Database database = testDatabases[0]; Function testFunction = new FunctionBuilder() .setDbName(database.getName()) .setName("test_function") .setClass("org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper") .create(client, metaStore.getConf()); client.dropDatabase(database.getName(), true, true, false); }
@Test(expected = InvalidOperationException.class) public void testDropDatabaseWithTable() throws Exception { Database database = testDatabases[0]; Table testTable = new TableBuilder() .setDbName(database.getName()) .setTableName("test_table") .addCol("test_col", "int") .create(client, metaStore.getConf()); client.dropDatabase(database.getName(), true, true, false); }
@Test public void testIpAddress() throws Exception { Database db = new DatabaseBuilder() .setName("testIpAddressIp") .create(msc, conf); msc.dropDatabase(db.getName()); } }
@Override public synchronized void alterDatabase(String databaseName, Database newDatabase) { String newDatabaseName = newDatabase.getName(); if (databaseName.equals(newDatabaseName)) { if (databases.replace(databaseName, newDatabase) == null) { throw new SchemaNotFoundException(databaseName); } return; } Database database = databases.get(databaseName); if (database == null) { throw new SchemaNotFoundException(databaseName); } if (databases.putIfAbsent(newDatabaseName, database) != null) { throw new SchemaAlreadyExistsException(newDatabaseName); } databases.remove(databaseName); rewriteKeys(relations, name -> new SchemaTableName(newDatabaseName, name.getTableName())); rewriteKeys(views, name -> new SchemaTableName(newDatabaseName, name.getTableName())); rewriteKeys(partitions, name -> name.withSchemaName(newDatabaseName)); rewriteKeys(tablePrivileges, name -> name.withDatabase(newDatabaseName)); }
@Test public void testDropDb() throws IOException { Database db = new Database(); db.setName("testdb"); NotificationEvent event = new NotificationEvent(getEventId(), getTime(), HCatConstants.HCAT_DROP_DATABASE_EVENT, msgFactory.buildCreateDatabaseMessage(db).toString()); event.setDbName(db.getName()); HCatNotificationEvent hev = new HCatNotificationEvent(event); ReplicationTask rtask = ReplicationTask.create(client,hev); assertEquals(hev.toString(), rtask.getEvent().toString()); verifyDropDbReplicationTask(rtask); }
@Test public void testCreateDb(){ Database db = new Database(); db.setName("testdb"); NotificationEvent event = new NotificationEvent(getEventId(), getTime(), HCatConstants.HCAT_CREATE_DATABASE_EVENT, msgFactory.buildCreateDatabaseMessage(db).toString()); event.setDbName(db.getName()); HCatNotificationEvent hev = new HCatNotificationEvent(event); ReplicationTask rtask = ReplicationTask.create(client,hev); assertEquals(hev.toString(), rtask.getEvent().toString()); verifyCreateDbReplicationTask(rtask); // CREATE DB currently replicated as Noop. }