public Database getDatabaseFromCache(String catName, String name) { Database db = null; try { cacheLock.readLock().lock(); String key = CacheUtils.buildDbKey(catName, name); if (databaseCache.get(key) != null) { db = databaseCache.get(key).deepCopy(); } } finally { cacheLock.readLock().unlock(); } return db; }
/** * Replaces the old db object with the new one. This will add the new database to cache if it does * not exist. */ public void alterDatabaseInCache(String catName, String dbName, Database newDb) { try { cacheLock.writeLock().lock(); removeDatabaseFromCache(catName, dbName); addDatabaseToCache(newDb.deepCopy()); isDatabaseCacheDirty.set(true); } finally { cacheLock.writeLock().unlock(); } }
public void addDatabaseToCache(Database db) { try { cacheLock.writeLock().lock(); Database dbCopy = db.deepCopy(); // ObjectStore also stores db name in lowercase dbCopy.setName(dbCopy.getName().toLowerCase()); dbCopy.setCatalogName(dbCopy.getCatalogName().toLowerCase()); databaseCache.put(CacheUtils.buildDbKey(dbCopy.getCatalogName(), dbCopy.getName()), dbCopy); isDatabaseCacheDirty.set(true); } finally { cacheLock.writeLock().unlock(); } }
public void populateDatabasesInCache(List<Database> databases) { for (Database db : databases) { Database dbCopy = db.deepCopy(); // ObjectStore also stores db name in lowercase dbCopy.setName(dbCopy.getName().toLowerCase()); try { cacheLock.writeLock().lock(); // Since we allow write operations on cache while prewarm is happening: // 1. Don't add databases that were deleted while we were preparing list for prewarm // 2. Skip overwriting exisiting db object // (which is present because it was added after prewarm started) String key = CacheUtils.buildDbKey(dbCopy.getCatalogName().toLowerCase(), dbCopy.getName().toLowerCase()); if (databasesDeletedDuringPrewarm.contains(key)) { continue; } databaseCache.putIfAbsent(key, dbCopy); databasesDeletedDuringPrewarm.clear(); isDatabaseCachePrewarmed = true; } finally { cacheLock.writeLock().unlock(); } } }
@Override public synchronized void createDatabase(Database database) { requireNonNull(database, "database is null"); File directory; if (database.getLocationUri() != null) { directory = new File(URI.create(database.getLocationUri())); } else { // use Hive default naming convention directory = new File(baseDirectory, database.getName() + ".db"); database = database.deepCopy(); database.setLocationUri(directory.toURI().toString()); } checkArgument(!directory.exists(), "Database directory already exists"); checkArgument(isParentDir(directory, baseDirectory), "Database directory must be inside of the metastore base directory"); checkArgument(directory.mkdirs(), "Could not create database directory"); if (databases.putIfAbsent(database.getName(), database) != null) { throw new SchemaAlreadyExistsException(database.getName()); } }
@Test public void testAlterDatabaseCaseInsensitive() throws Exception { Database originalDatabase = testDatabases[0]; Database newDatabase = originalDatabase.deepCopy(); newDatabase.setDescription("Altered database"); // Test in upper case client.alterDatabase(originalDatabase.getName().toUpperCase(), newDatabase); Database alteredDatabase = client.getDatabase(newDatabase.getName()); Assert.assertEquals("Comparing databases", newDatabase, alteredDatabase); // Test in mixed case originalDatabase = testDatabases[2]; newDatabase = originalDatabase.deepCopy(); newDatabase.setDescription("Altered database 2"); client.alterDatabase("TeST_daTAbaSe_TO_FiNd_2", newDatabase); alteredDatabase = client.getDatabase(newDatabase.getName()); Assert.assertEquals("Comparing databases", newDatabase, alteredDatabase); }
@Override public synchronized void createDatabase(Database database) { requireNonNull(database, "database is null"); File directory; if (database.getLocationUri() != null) { directory = new File(URI.create(database.getLocationUri())); } else { // use Hive default naming convention directory = new File(baseDirectory, database.getName() + ".db"); database = database.deepCopy(); database.setLocationUri(directory.toURI().toString()); } checkArgument(!directory.exists(), "Database directory already exists"); checkArgument(isParentDir(directory, baseDirectory), "Database directory must be inside of the metastore base directory"); checkArgument(directory.mkdirs(), "Could not create database directory"); if (databases.putIfAbsent(database.getName(), database) != null) { throw new SchemaAlreadyExistsException(database.getName()); } }
@Override public synchronized void createDatabase(Database database) { requireNonNull(database, "database is null"); File directory; if (database.getLocationUri() != null) { directory = new File(URI.create(database.getLocationUri())); } else { // use Hive default naming convention directory = new File(baseDirectory, database.getName() + ".db"); database = database.deepCopy(); database.setLocationUri(directory.toURI().toString()); } checkArgument(!directory.exists(), "Database directory already exists"); checkArgument(isParentDir(directory, baseDirectory), "Database directory must be inside of the metastore base directory"); checkArgument(directory.mkdirs(), "Could not create database directory"); if (databases.putIfAbsent(database.getName(), database) != null) { throw new SchemaAlreadyExistsException(database.getName()); } }
@Override public synchronized void createDatabase(Database database) { requireNonNull(database, "database is null"); File directory; if (database.getLocationUri() != null) { directory = new File(URI.create(database.getLocationUri())); } else { // use Hive default naming convention directory = new File(baseDirectory, database.getName() + ".db"); database = database.deepCopy(); database.setLocationUri(directory.toURI().toString()); } checkArgument(!directory.exists(), "Database directory already exists"); checkArgument(isParentDir(directory, baseDirectory), "Database directory must be inside of the metastore base directory"); checkArgument(directory.mkdirs(), "Could not create database directory"); if (databases.putIfAbsent(database.getName(), database) != null) { throw new SchemaAlreadyExistsException(database.getName()); } }