@After public void tearDown() throws Exception { conn.close(); TxnDbUtil.cleanDb(conf); } }
@After public void tearDown() throws Exception { TxnDbUtil.cleanDb(conf); } }
@After public void tearDown() throws Exception { TxnDbUtil.cleanDb(conf); }
@After public void tearDown() throws Exception { TxnDbUtil.cleanDb(conf); }
@After public void tearDown() throws Exception { TxnDbUtil.cleanDb(conf); }
public void prepareTransactionDatabase(HiveConf conf) throws Exception { TxnDbUtil.setConfValues(conf); TxnDbUtil.cleanDb(conf); TxnDbUtil.prepDb(conf); }
@Before public void setup() throws Exception { conf = new HiveConf(); TxnDbUtil.setConfValues(conf); TxnDbUtil.cleanDb(conf); ms = new HiveMetaStoreClient(conf); txnHandler = TxnUtils.getTxnStore(conf); tmpdir = new File(Files.createTempDirectory("compactor_test_table_").toString()); }
@After public void tearDown() throws Exception { if (txnMgr != null) txnMgr.closeTxnManager(); TxnDbUtil.cleanDb(conf); ThreadPool.shutdown(); }
@After public void tearDown() throws Exception { try { if (d != null) { dropTables(); d.close(); d.destroy(); d = null; } } finally { TxnDbUtil.cleanDb(hiveConf); FileUtils.deleteDirectory(new File(getTestDataDir())); } } protected String getWarehouseDir() {
public TestStreaming() throws Exception { partitionVals = new ArrayList<String>(2); partitionVals.add(PART1_CONTINENT); partitionVals.add(PART1_COUNTRY); partitionVals2 = new ArrayList<String>(1); partitionVals2.add(PART1_COUNTRY); conf = new HiveConf(this.getClass()); conf.set("fs.raw.impl", RawFileSystem.class.getName()); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); TxnDbUtil.setConfValues(conf); conf.setBoolVar(HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI, true); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); dbFolder.create(); //1) Start from a clean slate (metastore) TxnDbUtil.cleanDb(conf); TxnDbUtil.prepDb(conf); //2) obtain metastore clients msClient = new HiveMetaStoreClient(conf); }
@After public void tearDown() throws Exception { try { if (d != null) { dropTables(); d.close(); d.destroy(); d = null; } TxnDbUtil.cleanDb(hiveConf); } finally { FileUtils.deleteDirectory(new File(TEST_DATA_DIR)); } } @Test
public TestStreaming() throws Exception { partitionVals = new ArrayList<String>(2); partitionVals.add(PART1_CONTINENT); partitionVals.add(PART1_COUNTRY); partitionVals2 = new ArrayList<String>(1); partitionVals2.add(PART1_COUNTRY); conf = new HiveConf(this.getClass()); conf.set("fs.raw.impl", RawFileSystem.class.getName()); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); TxnDbUtil.setConfValues(conf); if (metaStoreURI!=null) { conf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); } conf.setBoolVar(HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI, true); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); dbFolder.create(); //1) Start from a clean slate (metastore) TxnDbUtil.cleanDb(conf); TxnDbUtil.prepDb(conf); //2) obtain metastore clients msClient = new HiveMetaStoreClient(conf); }
public TestStreamingDynamicPartitioning() throws Exception { conf = new HiveConf(this.getClass()); conf.set("fs.raw.impl", RawFileSystem.class.getName()); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); TxnDbUtil.setConfValues(conf); conf.setBoolVar(HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI, true); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict"); dbFolder.create(); loc1 = dbFolder.newFolder(dbName + ".db").toString(); //1) Start from a clean slate (metastore) TxnDbUtil.cleanDb(conf); TxnDbUtil.prepDb(conf); //2) obtain metastore clients msClient = new HiveMetaStoreClient(conf); }
private void setUpInternal() throws Exception { initHiveConf(); TxnDbUtil.cleanDb();//todo: api changed in 3.0 FileUtils.deleteDirectory(new File(getTestDataDir()));
@Before public void setUp() throws Exception { SessionState.start(conf); ctx = new Context(conf); driver = new Driver(new QueryState.Builder().withHiveConf(conf).nonIsolated().build(), null); driver2 = new Driver(new QueryState.Builder().withHiveConf(conf).build(), null); TxnDbUtil.cleanDb(conf); TxnDbUtil.prepDb(conf); SessionState ss = SessionState.get(); ss.initTxnMgr(conf); txnMgr = ss.getTxnMgr(); Assert.assertTrue(txnMgr instanceof DbTxnManager); txnHandler = TxnUtils.getTxnStore(conf); } @After