private Database deepCopy(Database database) { Database copy = null; if (database != null) { copy = new Database(database); } return copy; }
public Database deepCopy() { return new Database(this); }
private Database deepCopy(Database database) { Database copy = null; if (database != null) { copy = new Database(database); } return copy; }
public DatabaseBuilder(File warehouseFolder) { this.warehouseFolder = warehouseFolder; database = new Database(); }
private Database database() throws TException { return deserialize(new Database(), jsonEntry(DBSerializer.FIELD_NAME)); }
@Override public Database getDatabase(String name) throws TException { accessCount.incrementAndGet(); if (throwException) { throw new RuntimeException(); } if (!name.equals(TEST_DATABASE)) { throw new NoSuchObjectException(); } return new Database(TEST_DATABASE, null, null, null); }
private static ReadEntity createReadEntity(String functionName, FunctionInfo functionInfo) throws HiveException { String[] qualFunctionName = FunctionUtils.getQualifiedFunctionNameParts(functionName); // this is only for the purpose of authorization, only the name matters. Database db = new Database(qualFunctionName[0], "", "", null); return new ReadEntity(db, qualFunctionName[1], functionInfo.getClassName(), Type.FUNCTION); }
public void createTestDb(String dbName) throws Exception { Database db = new Database(dbName, "Some description", "/tmp/" + dbName, new HashMap<String, String>()); try { this.localMetastoreClient.createDatabase(db); } catch (AlreadyExistsException e) { log.warn(dbName + " already exits"); } }
Database toHiveDb() { Database hiveDB = new Database(); hiveDB.setDescription(this.comment); hiveDB.setLocationUri(this.locationUri); hiveDB.setName(this.dbName); hiveDB.setParameters(this.dbProperties); return hiveDB; }
private void authorizeReadDatabase(PreReadDatabaseEvent context) throws InvalidOperationException, MetaException { if (!isReadAuthzEnabled()) { return; } try { for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { authorizer.authorize(new Database(context.getDatabase()), new Privilege[] { Privilege.SELECT }, null); } } catch (AuthorizationException e) { throw invalidOperationException(e); } catch (HiveException e) { throw metaException(e); } }
private void createDefaultDB_core(RawStore ms) throws MetaException, InvalidObjectException { try { ms.getDatabase(DEFAULT_CATALOG_NAME, DEFAULT_DATABASE_NAME); } catch (NoSuchObjectException e) { Database db = new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT, wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null); db.setOwnerName(PUBLIC); db.setOwnerType(PrincipalType.ROLE); db.setCatalogName(DEFAULT_CATALOG_NAME); long time = System.currentTimeMillis() / 1000; db.setCreateTime((int) time); ms.createDatabase(db); } }
public static org.apache.hadoop.hive.metastore.api.Database toMetastoreApiDatabase(Database database) { org.apache.hadoop.hive.metastore.api.Database result = new org.apache.hadoop.hive.metastore.api.Database(); result.setName(database.getDatabaseName()); database.getLocation().ifPresent(result::setLocationUri); result.setOwnerName(database.getOwnerName()); result.setOwnerType(toMetastoreApiPrincipalType(database.getOwnerType())); database.getComment().ifPresent(result::setDescription); result.setParameters(database.getParameters()); return result; }
private void authorizeDropDatabase(PreDropDatabaseEvent context) throws InvalidOperationException, MetaException { try { for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { authorizer.authorize(new Database(context.getDatabase()), HiveOperation.DROPDATABASE.getInputRequiredPrivileges(), HiveOperation.DROPDATABASE.getOutputRequiredPrivileges()); } } catch (AuthorizationException e) { throw invalidOperationException(e); } catch (HiveException e) { throw metaException(e); } }
private void authorizeCreateDatabase(PreCreateDatabaseEvent context) throws InvalidOperationException, MetaException { try { for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { authorizer.authorize(new Database(context.getDatabase()), HiveOperation.CREATEDATABASE.getInputRequiredPrivileges(), HiveOperation.CREATEDATABASE.getOutputRequiredPrivileges()); } } catch (AuthorizationException e) { throw invalidOperationException(e); } catch (HiveException e) { throw metaException(e); } }
private void authorizeAlterDatabase(PreAlterDatabaseEvent context) throws InvalidOperationException, MetaException { try { for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { authorizer.authorize(new Database(context.getOldDatabase()), HiveOperation.ALTERDATABASE_LOCATION.getInputRequiredPrivileges(), HiveOperation.ALTERDATABASE_LOCATION.getOutputRequiredPrivileges()); } } catch (AuthorizationException e) { throw invalidOperationException(e); } catch (HiveException e) { throw metaException(e); } }
private Database createTestDb(String dbName, String dbOwner) { String dbDescription = dbName; String dbLocation = "file:/tmp"; Map<String, String> dbParams = new HashMap<>(); Database db = new Database(dbName, dbDescription, dbLocation, dbParams); db.setOwnerName(dbOwner); db.setOwnerType(PrincipalType.USER); db.setCatalogName(DEFAULT_CATALOG_NAME); db.setCreateTime((int) (System.currentTimeMillis() / 1000)); return db; }
@Override protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext context, Hive hive, DDLWork work) throws HiveException { CreateDatabaseDesc createDb = work.getCreateDatabaseDesc(); if (createDb != null) { Database db = new Database(createDb.getName(), createDb.getComment(), createDb.getLocationUri(), createDb.getDatabaseProperties()); authorize(db, Privilege.CREATE); } } }
@Test public void testGetTableMetaFetchGroup() throws MetaException, InvalidObjectException, InvalidOperationException { objectStore = createObjectStore(); Database db = new Database(DB1, "description", "locurl", null); db.setCatalogName("hive"); objectStore.createDatabase(db); objectStore.createTable(makeTable(DB1, TBL1)); List<TableMeta> tableMeta = objectStore.getTableMeta("hive", "*", "*", Collections.emptyList()); Assert.assertEquals("Number of items for tableMeta is incorrect", 1, tableMeta.size()); Assert.assertEquals("Table name incorrect", TBL1, tableMeta.get(0).getTableName()); Assert.assertEquals("Db name incorrect", DB1, tableMeta.get(0).getDbName()); }
@Test public void testCreateDb(){ Database db = new Database(); db.setName("testdb"); NotificationEvent event = new NotificationEvent(getEventId(), getTime(), HCatConstants.HCAT_CREATE_DATABASE_EVENT, msgFactory.buildCreateDatabaseMessage(db).toString()); event.setDbName(db.getName()); HCatNotificationEvent hev = new HCatNotificationEvent(event); ReplicationTask rtask = ReplicationTask.create(client,hev); assertEquals(hev.toString(), rtask.getEvent().toString()); verifyCreateDbReplicationTask(rtask); // CREATE DB currently replicated as Noop. }
@Test public void testDropDb() throws IOException { Database db = new Database(); db.setName("testdb"); NotificationEvent event = new NotificationEvent(getEventId(), getTime(), HCatConstants.HCAT_DROP_DATABASE_EVENT, msgFactory.buildCreateDatabaseMessage(db).toString()); event.setDbName(db.getName()); HCatNotificationEvent hev = new HCatNotificationEvent(event); ReplicationTask rtask = ReplicationTask.create(client,hev); assertEquals(hev.toString(), rtask.getEvent().toString()); verifyDropDbReplicationTask(rtask); }