@Override public HiveServer2Instance getLeader() { for (HiveServer2Instance hs2Instance : getAll()) { if (hs2Instance.isLeader()) { return hs2Instance; } } return null; }
@Override public HiveServer2Instance getInstance(final String instanceId) { for (HiveServer2Instance hs2Instance : getAll()) { if (hs2Instance.getWorkerIdentity().equals(instanceId)) { return hs2Instance; } } return null; }
@Override public void execute() { try { HiveConf hiveConf = new HiveConf(); HS2ActivePassiveHARegistry haRegistry = HS2ActivePassiveHARegistryClient.getClient(hiveConf); HS2Peers.HS2Instances hs2Instances = new HS2Peers.HS2Instances(haRegistry.getAll()); String jsonOut = hs2Instances.toJson(); System.out.println(jsonOut); } catch (IOException e) { LOG.error("Error listing HiveServer2 HA instances from ZooKeeper", e); System.err.println("Error listing HiveServer2 HA instances from ZooKeeper" + e); System.exit(-1); } System.exit(0); } }
@Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { // admin check - // allows when hadoop.security.instrumentation.requires.admin is set to false // when hadoop.security.instrumentation.requires.admin is set to true, checks if hadoop.security.authorization // is true and if the logged in user (via PAM or SPNEGO + kerberos) is in hive.users.in.admin.role list final ServletContext context = getServletContext(); if (!HttpServer.isInstrumentationAccessAllowed(context, request, response)) { LOG.warn("Unauthorized to perform GET action. remoteUser: {}", request.getRemoteUser()); return; } response.setContentType(HttpConstants.CONTENT_TYPE_JSON); response.setHeader(HttpConstants.ACCESS_CONTROL_ALLOW_METHODS, HttpConstants.METHOD_GET); response.setHeader(HttpConstants.ACCESS_CONTROL_ALLOW_ORIGIN, HttpConstants.WILDCARD); ServletContext ctx = getServletContext(); HiveConf hiveConf = (HiveConf) ctx.getAttribute("hiveconf"); HS2ActivePassiveHARegistry hs2Registry = HS2ActivePassiveHARegistryClient.getClient(hiveConf); HS2Instances instances = new HS2Instances(hs2Registry.getAll()); response.getWriter().write(instances.toJson()); response.setStatus(HttpServletResponse.SC_OK); response.flushBuffer(); } }
HiveConf hiveConf = new HiveConf(); HS2ActivePassiveHARegistry haRegistry = HS2ActivePassiveHARegistryClient.getClient(hiveConf); Collection<HiveServer2Instance> hs2Instances = haRegistry.getAll();
int sleepMs = 1000; while (!foundLeader && retries < maxRetries) { for (HiveServer2Instance hiveServer2Instance : haRegistryClient.getAll()) { if (hiveServer2Instance.isLeader()) { foundLeader = true;
@Override public HiveServer2Instance getLeader() { for (HiveServer2Instance hs2Instance : getAll()) { if (hs2Instance.isLeader()) { return hs2Instance; } } return null; }
@Override public HiveServer2Instance getInstance(final String instanceId) { for (HiveServer2Instance hs2Instance : getAll()) { if (hs2Instance.getWorkerIdentity().equals(instanceId)) { return hs2Instance; } } return null; }
@Override public void execute() { try { HiveConf hiveConf = new HiveConf(); HS2ActivePassiveHARegistry haRegistry = HS2ActivePassiveHARegistryClient.getClient(hiveConf); HS2Peers.HS2Instances hs2Instances = new HS2Peers.HS2Instances(haRegistry.getAll()); String jsonOut = hs2Instances.toJson(); System.out.println(jsonOut); } catch (IOException e) { LOG.error("Error listing HiveServer2 HA instances from ZooKeeper", e); System.err.println("Error listing HiveServer2 HA instances from ZooKeeper" + e); System.exit(-1); } System.exit(0); } }
@Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { // admin check - // allows when hadoop.security.instrumentation.requires.admin is set to false // when hadoop.security.instrumentation.requires.admin is set to true, checks if hadoop.security.authorization // is true and if the logged in user (via PAM or SPNEGO + kerberos) is in hive.users.in.admin.role list final ServletContext context = getServletContext(); if (!HttpServer.isInstrumentationAccessAllowed(context, request, response)) { LOG.warn("Unauthorized to perform GET action. remoteUser: {}", request.getRemoteUser()); return; } response.setContentType(HttpConstants.CONTENT_TYPE_JSON); response.setHeader(HttpConstants.ACCESS_CONTROL_ALLOW_METHODS, HttpConstants.METHOD_GET); response.setHeader(HttpConstants.ACCESS_CONTROL_ALLOW_ORIGIN, HttpConstants.WILDCARD); ServletContext ctx = getServletContext(); HiveConf hiveConf = (HiveConf) ctx.getAttribute("hiveconf"); HS2ActivePassiveHARegistry hs2Registry = HS2ActivePassiveHARegistryClient.getClient(hiveConf); HS2Instances instances = new HS2Instances(hs2Registry.getAll()); response.getWriter().write(instances.toJson()); response.setStatus(HttpServletResponse.SC_OK); response.flushBuffer(); } }
HiveConf hiveConf = new HiveConf(); HS2ActivePassiveHARegistry haRegistry = HS2ActivePassiveHARegistryClient.getClient(hiveConf); Collection<HiveServer2Instance> hs2Instances = haRegistry.getAll();
int sleepMs = 1000; while (!foundLeader && retries < maxRetries) { for (HiveServer2Instance hiveServer2Instance : haRegistryClient.getAll()) { if (hiveServer2Instance.isLeader()) { foundLeader = true;