private void startHiveServer() throws InterruptedException { // Start hive server2 server = new HiveServer2(); server.init(hiveConf); server.start(); Thread.sleep(5000); System.out.println("## HiveServer started"); }
scheduleClearDanglingScratchDir(hiveConf, new Random().nextInt(600)); server = new HiveServer2(); server.init(hiveConf); server.start(); if (server != null) { try { server.stop(); } catch (Throwable t) { LOG.info("Exception caught when calling stop of HiveServer2 before retrying start", t);
@Override public void notLeader() { LOG.info("HS2 instance {} LOST LEADERSHIP. Stopping/Disconnecting tez sessions..", hiveServer2.serviceUri); hiveServer2.isLeader.set(false); hiveServer2.closeAndDisallowHiveSessions(); hiveServer2.stopOrDisconnectTezSessions(); LOG.info("Stopped/Disconnected tez sessions."); // resolve futures used for testing if (HiveConf.getBoolVar(hiveServer2.getHiveConf(), ConfVars.HIVE_IN_TEST)) { hiveServer2.notLeaderTestFuture.set(true); hiveServer2.resetIsLeaderTestFuture(); } } }
private void startOrReconnectTezSessions() { LOG.info("Starting/Reconnecting tez sessions.."); // TODO: add tez session reconnect after TEZ-3875 WMFullResourcePlan resourcePlan = null; if (!StringUtils.isEmpty(wmQueue)) { try { resourcePlan = sessionHive.getActiveResourcePlan(); } catch (HiveException e) { if (!HiveConf.getBoolVar(getHiveConf(), ConfVars.HIVE_IN_TEST_SSL)) { throw new RuntimeException(e); } else { resourcePlan = null; // Ignore errors in SSL tests where the connection is misconfigured. } } if (resourcePlan == null && HiveConf.getBoolVar( getHiveConf(), ConfVars.HIVE_IN_TEST)) { LOG.info("Creating a default resource plan for test"); resourcePlan = createTestResourcePlan(); } } initAndStartTezSessionPoolManager(resourcePlan); initAndStartWorkloadManager(resourcePlan); }
conf.get(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE.varname)); if (HiveServer2.isHTTPTransportMode(conf)) { confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT.varname, conf.get(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_HTTP_PORT.varname)); if (HiveServer2.isKerberosAuthMode(conf)) { confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL.varname, conf.get(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL.varname));
@Override public void run() { hiveServer2.stop(); } };
private void createHiveServer() { HiveServer2 server = new HiveServer2(); server.init(new HiveConf()); for (Service service : server.getServices()) { if (service instanceof CLIService) { _client = (CLIService) service; } } Preconditions.checkNotNull(_client, "CLI service not found in local Hive server"); try { _sessionHandle = _client.openSession(null, null, null); _functionRegistry = SessionState.getRegistryForWrite(); // "map_from_entries" UDF is required to create maps with non-primitive key types _functionRegistry.registerGenericUDF("map_from_entries", MapFromEntriesWrapper.class); // TODO: This is a hack. Hive's public API does not have a way to register an already created GenericUDF object // It only accepts a class name after which the parameterless constructor of the class is called to create a // GenericUDF object. This does not work for HiveTestStdUDFWrapper as it accepts the UDF classes as parameters. // However, Hive has an internal method which does allow passing GenericUDF objects instead of classes. _functionRegistryAddFunctionMethod = _functionRegistry.getClass().getDeclaredMethod("addFunction", String.class, FunctionInfo.class); _functionRegistryAddFunctionMethod.setAccessible(true); } catch (HiveSQLException | NoSuchMethodException e) { throw new RuntimeException(e); } }
@Test public void testSslIsFalse() { thrown.expect(IllegalArgumentException.class); thrown.expectMessage(is(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL.varname + " has false value. It is recommended to set to true when PAM is used.")); hiveConf.setVar(ConfVars.HIVE_SERVER2_PAM_SERVICES, "sshd"); hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_USE_SSL, false); hiveServer2 = new HiveServer2(); hiveServer2.init(hiveConf); }
static void startHiveServer2WithConf(HiveConf hiveConf) throws Exception { hiveServer2.init(hiveConf); // Start HiveServer2 with given config // Fail if server doesn't start try { hiveServer2.start(); } catch (Throwable t) { t.printStackTrace(); fail(); } // Wait for startup to complete Thread.sleep(2000); System.out.println("HiveServer2 started on port " + port); }
addService(cliService); final HiveServer2 hiveServer2 = this; Runnable oomHook = new Runnable() { if (isHTTPTransportMode(hiveConf)) { thriftCLIService = new ThriftHttpCLIService(cliService, oomHook); } else { thriftCLIService = new ThriftBinaryCLIService(cliService, oomHook); addService(thriftCLIService); super.init(hiveConf); hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.varname, getServerHost()); } catch (Throwable t) { throw new Error("Unable to initialize HiveServer2", t); serviceUri = getServerInstanceURI(); addConfsToPublish(hiveConf, confsToPublish, serviceUri); if (activePassiveHA) { hiveConf.set(INSTANCE_URI_CONFIG, serviceUri); maybeStartCompactorThreads(hiveConf); } catch (Exception e) { throw new RuntimeException(e); ShutdownHookManager.addShutdownHook(() -> hiveServer2.stop());
/** * @throws java.lang.Exception */ @BeforeClass public static void setUpBeforeClass() throws Exception { // Find a free port port = MetaStoreTestUtils.findFreePort(); hiveServer2 = new HiveServer2(); hiveConf = new HiveConf(); }
@Override public synchronized void init(HiveConf hiveConf) { cliService = new CLIService(this); addService(cliService); if (isHTTPTransportMode(hiveConf)) { thriftCLIService = new ThriftHttpCLIService(cliService); } else { thriftCLIService = new ThriftBinaryCLIService(cliService); } addService(thriftCLIService); super.init(hiveConf); // Add a shutdown hook for catching SIGTERM & SIGINT final HiveServer2 hiveServer2 = this; Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { hiveServer2.stop(); } }); }
HiveConf hiveConf = getHiveConf(); if (!serviceDiscovery || !activePassiveHA) { allowClientSessions(); boolean publishConfigs = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ZOOKEEPER_PUBLISH_CONFIGS); String instanceURI = getServerInstanceURI(); String znodeData; if (publishConfigs) { addConfsToPublish(hiveConf, confsToPublish, getServerInstanceURI()); znodeData = Joiner.on(';').withKeyValueSeparator("=").join(confsToPublish); } else { startPrivilegeSynchronizer(hiveConf); } catch (Exception e) { LOG.error("Error starting priviledge synchronizer: ", e); LOG.info("HS2 interactive HA not enabled. Starting tez sessions.."); try { startOrReconnectTezSessions(); } catch (Exception e) { LOG.error("Error starting Tez sessions: ", e);
@Override public void start() { if (!isRunning()) { server.start(); } }
@Override public synchronized void stop() { LOG.info("Shutting down HiveServer2"); HiveConf hiveConf = this.getHiveConf(); super.stop(); removeServerInstanceFromZooKeeper(); } catch (Exception e) { LOG.error("Error removing znode for this HiveServer2 instance from ZooKeeper.", e);
if (HiveServer2.isHTTPTransportMode(hiveConf)) { workerKeepAliveTime = hiveConf.getTimeVar(ConfVars.HIVE_SERVER2_THRIFT_HTTP_WORKER_KEEPALIVE_TIME,
@Override public synchronized void stop() { LOG.info("Shutting down HiveServer2"); HiveConf hiveConf = this.getHiveConf(); super.stop(); if (hs2HARegistry != null) { hs2HARegistry.stop(); shutdownExecutor(leaderActionsExecutorService); LOG.info("HS2 HA registry stopped"); hs2HARegistry = null; stopOrDisconnectTezSessions();
@Override public synchronized void stop() { LOG.info("Shutting down HiveServer2"); HiveConf hiveConf = this.getHiveConf(); super.stop(); if (hs2HARegistry != null) { hs2HARegistry.stop(); shutdownExecutor(leaderActionsExecutorService); LOG.info("HS2 HA registry stopped"); hs2HARegistry = null; removeServerInstanceFromZooKeeper(); } catch (Exception e) { LOG.error("Error removing znode for this HiveServer2 instance from ZooKeeper.", e); stopOrDisconnectTezSessions();
HiveConf hiveConf = getHiveConf(); if (serviceDiscovery) { try { LOG.info("HS2 HA registry started"); } else { addServerInstanceToZooKeeper(hiveConf, confsToPublish); startPrivilegeSynchonizer(hiveConf); } catch (Exception e) { LOG.error("Error starting priviledge synchonizer: ", e); LOG.info("HS2 interactive HA not enabled. Starting tez sessions.."); try { startOrReconnectTezSessions(); } catch (Exception e) { LOG.error("Error starting Tez sessions: ", e);
protected static void stopHiveServer2() throws Exception { if (hiveServer2 != null) { hiveServer2.stop(); } }