private HiveConf createHiveConf(String metaStoreURI, boolean tokenAuthEnabled) { if (!tokenAuthEnabled) { return null; } HiveConf hcatConf = new HiveConf(); hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); hcatConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true); return hcatConf; }
/** * @return HiveConf with authorization V2 enabled with a dummy authorization factory * that captures the given user name */ private HiveConf getAuthV2HiveConf() { HiveConf conf = new HiveConf(); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, HiveAuthorizerStoringUserNameFactory.class.getName()); conf.setVar(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); return conf; }
public TestDbTxnManager2() throws Exception { conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); conf.setBoolVar(HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, false); TxnDbUtil.setConfValues(conf); conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict"); } @Before
public TestHiveWriter() throws Exception { port = 9083; metaStoreURI = null; int callTimeoutPoolSize = 1; callTimeoutPool = Executors.newFixedThreadPool(callTimeoutPoolSize, new ThreadFactoryBuilder().setNameFormat("hiveWriterTest").build()); // 1) Start metastore conf = HiveSetupUtil.getHiveConf(); TxnDbUtil.setConfValues(conf); if (metaStoreURI != null) { conf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); } }
public TestHiveBolt() throws Exception { //metaStoreURI = "jdbc:derby:;databaseName="+System.getProperty("java.io.tmpdir") +"metastore_db;create=true"; metaStoreURI = null; conf = HiveSetupUtil.getHiveConf(); TxnDbUtil.setConfValues(conf); if (metaStoreURI != null) { conf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); } }
public HiveConf createHiveConf(String metaStoreURI, String hiveMetaStorePrincipal) throws IOException { HiveConf hcatConf = new HiveConf(); hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hcatConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hcatConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true); hcatConf.set(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname, hiveMetaStorePrincipal); return hcatConf; }
@Test public void testCreateCandidatePrincipalsUserPatternsDefaultBaseDn() { HiveConf conf = new HiveConf(); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GUIDKEY, "sAMAccountName"); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN, "dc=mycompany,dc=com"); List<String> expected = Arrays.asList("sAMAccountName=user1,dc=mycompany,dc=com"); List<String> actual = LdapUtils.createCandidatePrincipals(conf, "user1"); assertEquals(expected, actual); }
@Test public void testUserMembershipKeyFilterApplyPositiveWithUserId() throws AuthenticationException, NamingException, IOException { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERMEMBERSHIP_KEY, "memberOf"); conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, "Group1,Group2"); when(search.findGroupDn("Group1")).thenReturn("cn=Group1,dc=a,dc=b"); when(search.findGroupDn("Group2")).thenReturn("cn=Group2,dc=a,dc=b"); when(search.isUserMemberOfGroup("User1", "cn=Group2,dc=a,dc=b")).thenReturn(true); Filter filter = factory.getInstance(conf); filter.apply(search, "User1"); }
public static HiveConf getHiveConf() { HiveConf conf = new HiveConf(); // String metastoreDBLocation = "jdbc:derby:databaseName=/tmp/metastore_db;create=true"; // conf.set("javax.jdo.option.ConnectionDriverName","org.apache.derby.jdbc.EmbeddedDriver"); // conf.set("javax.jdo.option.ConnectionURL",metastoreDBLocation); conf.set("fs.raw.impl", RawFileSystem.class.getName()); conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, txnMgr); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true); return conf; }
public static VectorizedParquetRecordReader createTestParquetReader(String schemaString, Configuration conf) throws IOException, InterruptedException, HiveException { conf.set(PARQUET_READ_SCHEMA, schemaString); HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_VECTORIZATION_ENABLED, true); HiveConf.setVar(conf, HiveConf.ConfVars.PLAN, "//tmp"); Job vectorJob = new Job(conf, "read vector"); ParquetInputFormat.setInputPaths(vectorJob, file); initialVectorizedRowBatchCtx(conf); return new VectorizedParquetRecordReader(getFileSplit(vectorJob), new JobConf(conf)); }
@Test public void testGetInstanceOfGroupMembershipKeyFilter() { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER, "G1"); Filter instance = factory.getInstance(conf); assertNotNull(instance); assertThat(instance, instanceOf(GroupFilterFactory.GroupMembershipKeyFilter.class)); }
@Test(expected = AuthenticationException.class) public void testApplyNegative() throws AuthenticationException, NamingException, IOException { conf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERFILTER, "User1,User2"); Filter filter = factory.getInstance(conf); filter.apply(search, "User3"); } }
@Test public void testStatCachingQuery() throws Exception { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setVar(ConfVars.HIVE_QUERY_REEXECUTION_STATS_PERSISTENCE, "query"); conf.setBoolVar(ConfVars.HIVE_QUERY_REEXECUTION_ALWAYS_COLLECT_OPERATOR_STATS, true); checkRuntimeStatsReuse(false, false, false); }
public TestDbTxnManager() throws Exception { conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); TxnDbUtil.setConfValues(conf); SessionState.start(conf); ctx = new Context(conf); tearDown(); }
private static IDriver createDriver() { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED, false); conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); SessionState.start(conf); IDriver driver = DriverFactory.newDriver(conf); return driver; } }
private void verifySettability(List<String> paramRegexes, List<String> settableParams, ConfVars whiteListParam) throws HiveAuthzPluginException { HiveConf processedConf = newAuthEnabledConf(); processedConf.setVar(whiteListParam, Joiner.on("|").join(paramRegexes)); SQLStdHiveAccessController accessController = new SQLStdHiveAccessController(null, processedConf, new HadoopDefaultAuthenticator(), getHS2SessionCtx()); accessController.applyAuthorizationConfigPolicy(processedConf); verifyParamSettability(settableParams, processedConf); }
private static IDriver createDriver() { HiveConf conf = env_setup.getTestCtx().hiveConf; conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); SessionState.start(conf); IDriver driver = DriverFactory.newDriver(conf); return driver; }
@Test public void testSslIsFalse() { thrown.expect(IllegalArgumentException.class); thrown.expectMessage(is(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL.varname + " has false value. It is recommended to set to true when PAM is used.")); hiveConf.setVar(ConfVars.HIVE_SERVER2_PAM_SERVICES, "sshd"); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL, false); hiveServer2 = new HiveServer2(); hiveServer2.init(hiveConf); }
public TestVectorNegative() { // Arithmetic operations rely on getting conf from SessionState, need to initialize here. SessionState ss = new SessionState(new HiveConf()); ss.getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest"); SessionState.setCurrentSessionState(ss); }
@BeforeClass public static void onetimeSetup() throws Exception { HiveConf conf = new HiveConf(TestHooks.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); Driver driver = createDriver(conf); int ret = driver.run("create table t1(i int)").getResponseCode(); assertEquals("Checking command success", 0, ret); }