private HiveLockObject verify(boolean verify, String[] names, HiveLockObjectData data, HiveConf conf) throws LockException { if (!verify) { return new HiveLockObject(names, data); return new HiveLockObject(tab, data); return new HiveLockObject(new DummyPartition(tab, null, partSpec), data); return new HiveLockObject(partn, data); } catch (Exception e) { throw new LockException(e);
private HiveLockObject verify(boolean verify, String[] names, HiveLockObjectData data, HiveConf conf) throws LockException { if (!verify) { return new HiveLockObject(names, data); return new HiveLockObject(tab, data); return new HiveLockObject(new DummyPartition(tab, null, partSpec), data); return new HiveLockObject(partn, data); } catch (Exception e) { throw new LockException(e);
return new HiveLockObject(names, data); return new HiveLockObject(tab, data); return new HiveLockObject(new DummyPartition(tab, path, partSpec), data); return new HiveLockObject(partn, data); } catch (Exception e) { LOG.error("Failed to create ZooKeeper object: " + e);
private HiveLockObject lockObj(String path, String query) { HiveLockObjectData data = new HiveLockObjectData(String.valueOf(++counter), null, null, query, conf); return new HiveLockObject(path.split("/"), data); } }
/** * Creates a locking object for a table (when partition spec is not provided) * or a table partition * @param hiveDB an object to communicate with the metastore * @param tableName the table to create the locking object on * @param partSpec the spec of a partition to create the locking object on * @return the locking object * @throws HiveException */ public static HiveLockObject createFrom(Hive hiveDB, String tableName, Map<String, String> partSpec) throws HiveException { Table tbl = hiveDB.getTable(tableName); if (tbl == null) { throw new HiveException("Table " + tableName + " does not exist "); } HiveLockObject obj = null; if (partSpec == null) { obj = new HiveLockObject(tbl, null); } else { Partition par = hiveDB.getPartition(tbl, partSpec, false); if (par == null) { throw new HiveException("Partition " + partSpec + " for table " + tableName + " does not exist"); } obj = new HiveLockObject(par, null); } return obj; }
return new HiveLockObject(names, data); return new HiveLockObject(tab, data); return new HiveLockObject(new DummyPartition(tab, path, partSpec), data); return new HiveLockObject(partn, data); } catch (Exception e) { LOG.error("Failed to create ZooKeeper object: " + e);
locks.add(new HiveLockObj(new HiveLockObject(db.getName(), lockData), mode)); return locks; locks.add(new HiveLockObj(new HiveLockObject(t, lockData), mode)); mode = HiveLockMode.SHARED; locks.add(new HiveLockObj(new HiveLockObject(t.getDbName(), lockData), mode)); return locks; locks.add(new HiveLockObj(new HiveLockObject(p, lockData), mode)); try { locks.add(new HiveLockObj( new HiveLockObject(new DummyPartition(p.getTable(), p.getTable().getDbName() + "/" + org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.encodeTableName(p.getTable().getTableName()) + "/" + partialName, locks.add(new HiveLockObj(new HiveLockObject(p.getTable(), lockData), mode)); locks.add(new HiveLockObj(new HiveLockObject(p.getTable().getDbName(), lockData), mode));
/** * Creates a locking object for a table (when partition spec is not provided) * or a table partition * @param hiveDB an object to communicate with the metastore * @param tableName the table to create the locking object on * @param partSpec the spec of a partition to create the locking object on * @return the locking object * @throws HiveException */ public static HiveLockObject createFrom(Hive hiveDB, String tableName, Map<String, String> partSpec) throws HiveException { Table tbl = hiveDB.getTable(tableName); if (tbl == null) { throw new HiveException("Table " + tableName + " does not exist "); } HiveLockObject obj = null; if (partSpec == null) { obj = new HiveLockObject(tbl, null); } else { Partition par = hiveDB.getPartition(tbl, partSpec, false); if (par == null) { throw new HiveException("Partition " + partSpec + " for table " + tableName + " does not exist"); } obj = new HiveLockObject(par, null); } return obj; }
locks.add(new HiveLockObj(new HiveLockObject(db.getName(), lockData), mode)); return locks; locks.add(new HiveLockObj(new HiveLockObject(t, lockData), mode)); mode = HiveLockMode.SHARED; locks.add(new HiveLockObj(new HiveLockObject(t.getDbName(), lockData), mode)); return locks; locks.add(new HiveLockObj(new HiveLockObject(p, lockData), mode)); try { locks.add(new HiveLockObj( new HiveLockObject(new DummyPartition(p.getTable(), p.getTable().getDbName() + "/" + MetaStoreUtils.encodeTableName(p.getTable().getTableName()) + "/" + partialName, locks.add(new HiveLockObj(new HiveLockObject(p.getTable(), lockData), mode)); locks.add(new HiveLockObj(new HiveLockObject(p.getTable().getDbName(), lockData), mode));
@Test public void testEqualsAndHashCode() { HiveLockObjectData data1 = new HiveLockObjectData("ID1", "SHARED", "1997-07-01", "select * from mytable", conf); HiveLockObjectData data2 = new HiveLockObjectData("ID1", "SHARED", "1997-07-01", "select * from mytable", conf); Assert.assertEquals(data1, data2); Assert.assertEquals(data1.hashCode(), data2.hashCode()); HiveLockObject obj1 = new HiveLockObject("mytable", data1); HiveLockObject obj2 = new HiveLockObject("mytable", data2); Assert.assertEquals(obj1, obj2); Assert.assertEquals(obj1.hashCode(), obj2.hashCode()); }
@Override public int unlockDatabase(Hive hiveDB, UnlockDatabaseDesc unlockDb) throws HiveException { HiveLockManager lockMgr = getAndCheckLockManager(); String dbName = unlockDb.getDatabaseName(); Database dbObj = hiveDB.getDatabase(dbName); if (dbObj == null) { throw new HiveException("Database " + dbName + " does not exist "); } HiveLockObject obj = new HiveLockObject(dbObj.getName(), null); List<HiveLock> locks = lockMgr.getLocks(obj, false, false); if ((locks == null) || (locks.isEmpty())) { throw new HiveException("Database " + dbName + " is not locked "); } for (HiveLock lock: locks) { lockMgr.unlock(lock); } return 0; }
lockObjs.add(new HiveLockObj(new HiveLockObject(path1, lockData1), HiveLockMode.SHARED)); String name1 = lockObjs.get(lockObjs.size() - 1).getName(); lockObjs.add(new HiveLockObj(new HiveLockObject(path1, lockData1), HiveLockMode.EXCLUSIVE)); lockObjs.add(new HiveLockObj(new HiveLockObject(path2, lockData2), HiveLockMode.SHARED)); String name2 = lockObjs.get(lockObjs.size() - 1).getName(); lockObjs.add(new HiveLockObj(new HiveLockObject(path2, lockData2), HiveLockMode.SHARED)); lockObjs.add(new HiveLockObj(new HiveLockObject(path2, lockData2), HiveLockMode.SHARED));
@Before public void setup() { conf = new HiveConf(); lockObjData = new HiveLockObjectData("1", "10", "SHARED", "show tables", conf); hiveLock = new HiveLockObject(TABLE, lockObjData); zLock = new ZooKeeperHiveLock(TABLE_LOCK_PATH, hiveLock, HiveLockMode.SHARED); while (server == null) { try { server = new TestingServer(); CuratorFrameworkFactory.Builder builder = CuratorFrameworkFactory.builder(); client = builder.connectString(server.getConnectString()).retryPolicy(new RetryOneTime(1)).build(); client.start(); } catch (Exception e) { System.err.println("Getting bind exception - retrying to allocate server"); server = null; } } }
@Override public int lockDatabase(Hive hiveDB, LockDatabaseDesc lockDb) throws HiveException { HiveLockManager lockMgr = getAndCheckLockManager(); HiveLockMode mode = HiveLockMode.valueOf(lockDb.getMode()); String dbName = lockDb.getDatabaseName(); Database dbObj = hiveDB.getDatabase(dbName); if (dbObj == null) { throw new HiveException("Database " + dbName + " does not exist "); } HiveLockObjectData lockData = new HiveLockObjectData(lockDb.getQueryId(), String.valueOf(System.currentTimeMillis()), "EXPLICIT", lockDb.getQueryStr(), conf); HiveLock lck = lockMgr.lock(new HiveLockObject(dbObj.getName(), lockData), mode, true); if (lck == null) { return 1; } return 0; }
@Override public int unlockDatabase(Hive hiveDB, UnlockDatabaseDesc unlockDb) throws HiveException { HiveLockManager lockMgr = getAndCheckLockManager(); String dbName = unlockDb.getDatabaseName(); Database dbObj = hiveDB.getDatabase(dbName); if (dbObj == null) { throw new HiveException("Database " + dbName + " does not exist "); } HiveLockObject obj = new HiveLockObject(dbObj.getName(), null); List<HiveLock> locks = lockMgr.getLocks(obj, false, false); if ((locks == null) || (locks.isEmpty())) { throw new HiveException("Database " + dbName + " is not locked "); } for (HiveLock lock: locks) { lockMgr.unlock(lock); } return 0; }
HiveLock lck = lockMgr.lock(new HiveLockObject(tbl, lockData), mode, true); if (lck == null) { return 1; tabName + " does not exist"); HiveLock lck = lockMgr.lock(new HiveLockObject(par, lockData), mode, true); if (lck == null) { return 1;
HiveLock lck = lockMgr.lock(new HiveLockObject(tbl, lockData), mode, true); if (lck == null) { return 1; tabName + " does not exist"); HiveLock lck = lockMgr.lock(new HiveLockObject(par, lockData), mode, true); if (lck == null) { return 1;
@Override public int lockDatabase(Hive hiveDB, LockDatabaseDesc lockDb) throws HiveException { HiveLockManager lockMgr = getAndCheckLockManager(); HiveLockMode mode = HiveLockMode.valueOf(lockDb.getMode()); String dbName = lockDb.getDatabaseName(); Database dbObj = hiveDB.getDatabase(dbName); if (dbObj == null) { throw new HiveException("Database " + dbName + " does not exist "); } HiveLockObjectData lockData = new HiveLockObjectData(lockDb.getQueryId(), String.valueOf(System.currentTimeMillis()), "EXPLICIT", lockDb.getQueryStr()); HiveLock lck = lockMgr.lock(new HiveLockObject(dbObj.getName(), lockData), mode, true); if (lck == null) { return 1; } return 0; }
expectedLocks.add(new ZooKeeperHiveLock("default", new HiveLockObject(), HiveLockMode.SHARED)); expectedLocks.add(new ZooKeeperHiveLock("default.table1", new HiveLockObject(), HiveLockMode.SHARED)); LockedDriverState lDrvState = new LockedDriverState(); LockedDriverState lDrvInp = new LockedDriverState();
private HiveLockObject getHiveObject(String tabName, Map<String, String> partSpec) throws HiveException { Table tbl = db.getTable(tabName); if (tbl == null) { throw new HiveException("Table " + tabName + " does not exist "); } HiveLockObject obj = null; if (partSpec == null) { obj = new HiveLockObject(tbl, null); } else { Partition par = db.getPartition(tbl, partSpec, false); if (par == null) { throw new HiveException("Partition " + partSpec + " for table " + tabName + " does not exist"); } obj = new HiveLockObject(par, null); } return obj; }