private void analyzeLockDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); inputs.add(new ReadEntity(getDatabase(dbName))); // Lock database operation is to acquire the lock explicitly, the operation // itself doesn't need to be locked. Set the WriteEntity as WriteType: // DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK)); LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID)); lockDatabaseDesc.setQueryStr(ctx.getCmd()); DDLWork work = new DDLWork(getInputs(), getOutputs(), lockDatabaseDesc); rootTasks.add(TaskFactory.get(work)); ctx.setNeedLockMgr(true); }
@Override public int lockDatabase(Hive hiveDB, LockDatabaseDesc lockDb) throws HiveException { HiveLockManager lockMgr = getAndCheckLockManager(); HiveLockMode mode = HiveLockMode.valueOf(lockDb.getMode()); String dbName = lockDb.getDatabaseName(); Database dbObj = hiveDB.getDatabase(dbName); if (dbObj == null) { throw new HiveException("Database " + dbName + " does not exist "); } HiveLockObjectData lockData = new HiveLockObjectData(lockDb.getQueryId(), String.valueOf(System.currentTimeMillis()), "EXPLICIT", lockDb.getQueryStr(), conf); HiveLock lck = lockMgr.lock(new HiveLockObject(dbObj.getName(), lockData), mode, true); if (lck == null) { return 1; } return 0; }
@Override public int lockDatabase(Hive hiveDB, LockDatabaseDesc lockDb) throws HiveException { HiveLockManager lockMgr = getAndCheckLockManager(); HiveLockMode mode = HiveLockMode.valueOf(lockDb.getMode()); String dbName = lockDb.getDatabaseName(); Database dbObj = hiveDB.getDatabase(dbName); if (dbObj == null) { throw new HiveException("Database " + dbName + " does not exist "); } HiveLockObjectData lockData = new HiveLockObjectData(lockDb.getQueryId(), String.valueOf(System.currentTimeMillis()), "EXPLICIT", lockDb.getQueryStr()); HiveLock lck = lockMgr.lock(new HiveLockObject(dbObj.getName(), lockData), mode, true); if (lck == null) { return 1; } return 0; }
private void analyzeLockDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); inputs.add(new ReadEntity(getDatabase(dbName))); // Lock database operation is to acquire the lock explicitly, the operation // itself doesn't need to be locked. Set the WriteEntity as WriteType: // DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK)); LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID)); lockDatabaseDesc.setQueryStr(ctx.getCmd()); DDLWork work = new DDLWork(getInputs(), getOutputs(), lockDatabaseDesc); rootTasks.add(TaskFactory.get(work, conf)); ctx.setNeedLockMgr(true); }
HiveLockMode mode = HiveLockMode.valueOf(lockDb.getMode()); String dbName = lockDb.getDatabaseName(); new HiveLockObjectData(lockDb.getQueryId(), String.valueOf(System.currentTimeMillis()), "EXPLICIT", lockDb.getQueryStr());
private void analyzeLockDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); //inputs.add(new ReadEntity(dbName)); //outputs.add(new WriteEntity(dbName)); LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID)); lockDatabaseDesc.setQueryStr(ctx.getCmd()); DDLWork work = new DDLWork(getInputs(), getOutputs(), lockDatabaseDesc); rootTasks.add(TaskFactory.get(work, conf)); ctx.setNeedLockMgr(true); }