@Test public void testInvalidUserName() throws HiveSQLException { conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER, 10); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS, 0); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS, 0); CLIService service = getService(conf); List<SessionHandle> sessionHandles = new ArrayList<>(); try { for (int i = 0; i < limit + 1; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, null, "bar", "127.0.0.1", null); sessionHandles.add(session); } for (int i = 0; i < limit + 1; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, "", "bar", "127.0.0.1", null); sessionHandles.add(session); } } finally { for (SessionHandle sessionHandle : sessionHandles) { service.closeSession(sessionHandle); } service.stop(); } }
/** * Create a session handle * @param req * @param res * @return * @throws HiveSQLException * @throws LoginException * @throws IOException */ SessionHandle getSessionHandle(TOpenSessionReq req, TOpenSessionResp res) throws HiveSQLException, LoginException, IOException { String userName = getUserName(req); String ipAddress = getIpAddress(); TProtocolVersion protocol = getMinVersion(CLIService.SERVER_VERSION, req.getClient_protocol()); SessionHandle sessionHandle; if (cliService.getHiveConf().getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) && (userName != null)) { String delegationTokenStr = getDelegationToken(userName); sessionHandle = cliService.openSessionWithImpersonation(protocol, userName, req.getPassword(), ipAddress, req.getConfiguration(), delegationTokenStr); } else { sessionHandle = cliService.openSession(protocol, userName, req.getPassword(), ipAddress, req.getConfiguration()); } res.setServerProtocolVersion(protocol); return sessionHandle; }
@Test public void testInvalidIpaddress() throws HiveSQLException { conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER, 0); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS, 10); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS, 0); CLIService service = getService(conf); List<SessionHandle> sessionHandles = new ArrayList<>(); try { for (int i = 0; i < limit + 1; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, "foo", "bar", null, null); sessionHandles.add(session); } for (int i = 0; i < limit + 1; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, "foo", "bar", "", null); sessionHandles.add(session); } } finally { for (SessionHandle sessionHandle : sessionHandles) { service.closeSession(sessionHandle); } service.stop(); } }
/** * Create a session handle * @param req * @param res * @return * @throws HiveSQLException * @throws LoginException * @throws IOException */ SessionHandle getSessionHandle(TOpenSessionReq req, TOpenSessionResp res) throws HiveSQLException, LoginException, IOException { String userName = getUserName(req); String ipAddress = getIpAddress(); TProtocolVersion protocol = getMinVersion(CLIService.SERVER_VERSION, req.getClient_protocol()); SessionHandle sessionHandle; if (cliService.getHiveConf().getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) && (userName != null)) { String delegationTokenStr = getDelegationToken(userName); sessionHandle = cliService.openSessionWithImpersonation(protocol, userName, req.getPassword(), ipAddress, req.getConfiguration(), delegationTokenStr); } else { sessionHandle = cliService.openSession(protocol, userName, req.getPassword(), ipAddress, req.getConfiguration()); } res.setServerProtocolVersion(protocol); return sessionHandle; }
@Test public void testIncrementAndDecrementConnectionsUser() throws HiveSQLException { conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER, 10); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS, 0); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS, 0); CLIService service = getService(conf); List<SessionHandle> sessionHandles = new ArrayList<>(); try { // open 5 connections for (int i = 0; i < limit / 2; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, "foo", "bar", "127.0.0.1", null); sessionHandles.add(session); } // close them all for (SessionHandle sessionHandle : sessionHandles) { service.closeSession(sessionHandle); } sessionHandles.clear(); // open till limit but not exceed for (int i = 0; i < limit; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, "ff", "bar", "127.0.0.1", null); sessionHandles.add(session); } } finally { for (SessionHandle sessionHandle : sessionHandles) { service.closeSession(sessionHandle); } service.stop(); } }
/** * Create a session handle * @param req * @param res * @return * @throws HiveSQLException * @throws LoginException * @throws IOException */ SessionHandle getSessionHandle(TOpenSessionReq req, TOpenSessionResp res) throws HiveSQLException, LoginException, IOException { String userName = getUserName(req); String ipAddress = getIpAddress(); TProtocolVersion protocol = getMinVersion(CLIService.SERVER_VERSION, req.getClient_protocol()); SessionHandle sessionHandle; if (cliService.getHiveConf().getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) && (userName != null)) { String delegationTokenStr = getDelegationToken(userName); sessionHandle = cliService.openSessionWithImpersonation(protocol, userName, req.getPassword(), ipAddress, req.getConfiguration(), delegationTokenStr); } else { sessionHandle = cliService.openSession(protocol, userName, req.getPassword(), ipAddress, req.getConfiguration()); } res.setServerProtocolVersion(protocol); return sessionHandle; }
@Test public void testNoLimit() throws HiveSQLException { conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER, 0); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS, 0); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS, 0); CLIService service = getService(conf); List<SessionHandle> sessionHandles = new ArrayList<>(); try { for (int i = 0; i < limit + 1; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, "foo", "bar", "127.0.0.1", null); sessionHandles.add(session); } } finally { for (SessionHandle sessionHandle : sessionHandles) { service.closeSession(sessionHandle); } service.stop(); } }
/** * Create a session handle * @param req * @param res * @return * @throws HiveSQLException * @throws LoginException * @throws IOException */ SessionHandle getSessionHandle(TOpenSessionReq req, TOpenSessionResp res) throws HiveSQLException, LoginException, IOException { String userName = getUserName(req); String ipAddress = getIpAddress(); TProtocolVersion protocol = getMinVersion(CLIService.SERVER_VERSION, req.getClient_protocol()); SessionHandle sessionHandle; if (cliService.getHiveConf().getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS) && (userName != null)) { String delegationTokenStr = getDelegationToken(userName); sessionHandle = cliService.openSessionWithImpersonation(protocol, userName, req.getPassword(), ipAddress, req.getConfiguration(), delegationTokenStr); } else { sessionHandle = cliService.openSession(protocol, userName, req.getPassword(), ipAddress, req.getConfiguration()); } res.setServerProtocolVersion(protocol); return sessionHandle; }
@Test public void testInvalidUserIpaddress() throws HiveSQLException { conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER, 0); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS, 10); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS, 0); CLIService service = getService(conf); List<SessionHandle> sessionHandles = new ArrayList<>(); try { for (int i = 0; i < limit + 1; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, " ", "bar", null, null); sessionHandles.add(session); } } finally { for (SessionHandle sessionHandle : sessionHandles) { service.closeSession(sessionHandle); } service.stop(); } }
@Test public void testConnectionForwardedIpAddresses() throws HiveSQLException { thrown.expect(HiveSQLException.class); thrown.expectMessage("Connection limit per ipaddress reached (ipaddress: 194.167.0.3 limit: 10)"); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER, 0); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS, 10); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS, 10); CLIService service = getService(conf); SessionManager.setForwardedAddresses(Lists.newArrayList("194.167.0.3", "194.167.0.2", "194.167.0.1")); List<SessionHandle> sessionHandles = new ArrayList<>(); try { for (int i = 0; i < limit + 1; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, "foo", "bar", "194.167.0.1", null); sessionHandles.add(session); } } finally { SessionManager.setForwardedAddresses(Collections.emptyList()); for (SessionHandle sessionHandle : sessionHandles) { service.closeSession(sessionHandle); } service.stop(); } }
@Test public void testConnectionLimitPerUserIpAddress() throws HiveSQLException { thrown.expect(HiveSQLException.class); thrown.expectMessage("Connection limit per user:ipaddress reached (user:ipaddress: foo:127.0.0.1 limit: 10)"); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER, 0); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS, 0); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS, 10); CLIService service = getService(conf); List<SessionHandle> sessionHandles = new ArrayList<>(); try { for (int i = 0; i < limit + 1; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, "foo", "bar", "127.0.0.1", null); sessionHandles.add(session); } } finally { for (SessionHandle sessionHandle : sessionHandles) { service.closeSession(sessionHandle); } service.stop(); } }
@Test public void testConnectionMultipleLimitsIPAndUserIP() throws HiveSQLException { thrown.expect(HiveSQLException.class); thrown.expectMessage("Connection limit per ipaddress reached (ipaddress: 127.0.0.1 limit: 5)"); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER, 0); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS, 5); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS, 10); CLIService service = getService(conf); List<SessionHandle> sessionHandles = new ArrayList<>(); try { for (int i = 0; i < limit + 1; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, "foo", "bar", "127.0.0.1", null); sessionHandles.add(session); } } finally { for (SessionHandle sessionHandle : sessionHandles) { service.closeSession(sessionHandle); } service.stop(); } }
@Test public void testConnectionLimitPerIpAddress() throws HiveSQLException { thrown.expect(HiveSQLException.class); thrown.expectMessage("Connection limit per ipaddress reached (ipaddress: 127.0.0.1 limit: 10)"); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER, 0); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS, 10); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS, 0); CLIService service = getService(conf); List<SessionHandle> sessionHandles = new ArrayList<>(); try { for (int i = 0; i < limit + 1; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, "foo", "bar", "127.0.0.1", null); sessionHandles.add(session); } } finally { for (SessionHandle sessionHandle : sessionHandles) { service.closeSession(sessionHandle); } service.stop(); } }
@Test public void testConnectionLimitPerUser() throws HiveSQLException { thrown.expect(HiveSQLException.class); thrown.expectMessage("Connection limit per user reached (user: foo limit: 10)"); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER, 10); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS, 0); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS, 0); CLIService service = getService(conf); List<SessionHandle> sessionHandles = new ArrayList<>(); try { for (int i = 0; i < limit + 1; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, "foo", "bar", "127.0.0.1", null); sessionHandles.add(session); } } finally { for (SessionHandle sessionHandle : sessionHandles) { service.closeSession(sessionHandle); } service.stop(); } }
@Test public void testConnectionMultipleLimitsUserAndIP() throws HiveSQLException { thrown.expect(HiveSQLException.class); thrown.expectMessage("Connection limit per user reached (user: foo limit: 5)"); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER, 5); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS, 10); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS, 0); CLIService service = getService(conf); List<SessionHandle> sessionHandles = new ArrayList<>(); try { for (int i = 0; i < limit + 1; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, "foo", "bar", "127.0.0.1", null); sessionHandles.add(session); } } finally { for (SessionHandle sessionHandle : sessionHandles) { service.closeSession(sessionHandle); } service.stop(); } }
@Test public void testConnectionMultipleLimitsUserIPAndUser() throws HiveSQLException { thrown.expect(HiveSQLException.class); thrown.expectMessage("Connection limit per user:ipaddress reached (user:ipaddress: foo:127.0.0.1 limit: 10)"); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER, 15); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_IPADDRESS, 0); conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_LIMIT_CONNECTIONS_PER_USER_IPADDRESS, 10); CLIService service = getService(conf); List<SessionHandle> sessionHandles = new ArrayList<>(); try { for (int i = 0; i < limit + 1; i++) { SessionHandle session = service.openSession(CLIService.SERVER_VERSION, "foo", "bar", "127.0.0.1", null); sessionHandles.add(session); } } finally { for (SessionHandle sessionHandle : sessionHandles) { service.closeSession(sessionHandle); } service.stop(); } }
@Test public void testRestore() throws HiveSQLException { SessionHandle session = service.openSession("foo", "bar", null); service.stop(); service = getService(); try { service.getSessionManager().getSession(session); Assert.fail("session already exists before restore"); } catch (HiveSQLException e) { Assert.assertTrue(e.getMessage().contains("Invalid SessionHandle")); } service.createSessionWithSessionHandle(session, "foo", "bar", null); Assert.assertNotNull(service.getSessionManager().getSession(session)); service.stop(); }
protected SessionHandle doOpenHiveSession(Map<String, String> sessionConf) throws HiveSQLException { try { return cliService.openSession(UserGroupInformation.getCurrentUser().getShortUserName(), "", sessionConf); } catch (IOException e) { throw Throwables.propagate(e); } }
protected SessionHandle doOpenHiveSession(Map<String, String> sessionConf) throws HiveSQLException { try { return cliService.openSession(UserGroupInformation.getCurrentUser().getShortUserName(), "", sessionConf); } catch (IOException e) { throw Throwables.propagate(e); } }
private void createHiveServer() { HiveServer2 server = new HiveServer2(); server.init(new HiveConf()); for (Service service : server.getServices()) { if (service instanceof CLIService) { _client = (CLIService) service; } } Preconditions.checkNotNull(_client, "CLI service not found in local Hive server"); try { _sessionHandle = _client.openSession(null, null, null); _functionRegistry = SessionState.getRegistryForWrite(); // "map_from_entries" UDF is required to create maps with non-primitive key types _functionRegistry.registerGenericUDF("map_from_entries", MapFromEntriesWrapper.class); // TODO: This is a hack. Hive's public API does not have a way to register an already created GenericUDF object // It only accepts a class name after which the parameterless constructor of the class is called to create a // GenericUDF object. This does not work for HiveTestStdUDFWrapper as it accepts the UDF classes as parameters. // However, Hive has an internal method which does allow passing GenericUDF objects instead of classes. _functionRegistryAddFunctionMethod = _functionRegistry.getClass().getDeclaredMethod("addFunction", String.class, FunctionInfo.class); _functionRegistryAddFunctionMethod.setAccessible(true); } catch (HiveSQLException | NoSuchMethodException e) { throw new RuntimeException(e); } }