Refine search
private TezAmRegistryImpl(String instanceName, Configuration conf, boolean useSecureZk) { super(instanceName, conf, null, NAMESPACE_PREFIX, USER_SCOPE_PATH_PREFIX, WORKER_PREFIX, WORKER_GROUP, useSecureZk ? SASL_LOGIN_CONTEXT_NAME : null, HiveConf.getVar(conf, ConfVars.LLAP_TASK_SCHEDULER_AM_REGISTRY_PRINCIPAL), HiveConf.getVar(conf, ConfVars.LLAP_TASK_SCHEDULER_AM_REGISTRY_KEYTAB_FILE), null); // Always validate ACLs this.registryName = instanceName; LOG.info("AM Zookeeper Registry is enabled with registryid: " + instanceName); }
/** * Register logging context so that log system can print QueryId, SessionId, etc for each message */ public static void registerLoggingContext(Configuration conf) { MDC.put(SESSIONID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVESESSIONID)); MDC.put(QUERYID_LOG_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID)); if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) { MDC.put(OPERATIONLOG_LEVEL_KEY, HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL)); } }
@SuppressWarnings("unchecked") public static List<HiveMetastoreAuthorizationProvider> getMetaStoreAuthorizeProviderManagers( Configuration conf, HiveConf.ConfVars authorizationProviderConfKey, HiveAuthenticationProvider authenticator) throws HiveException { String clsStrs = HiveConf.getVar(conf, authorizationProviderConfKey); if(clsStrs == null){ return null; } List<HiveMetastoreAuthorizationProvider> authProviders = new ArrayList<HiveMetastoreAuthorizationProvider>(); for (String clsStr : clsStrs.trim().split(",")) { LOG.info("Adding metastore authorization provider: " + clsStr); authProviders.add((HiveMetastoreAuthorizationProvider) getAuthorizeProviderManager(conf, clsStr, authenticator, false)); } return authProviders; }
private List<String> getMapSideJoinTables(QB qb) { List<String> cols = new ArrayList<String>(); ASTNode hints = qb.getParseInfo().getHints(); for (int pos = 0; pos < hints.getChildCount(); pos++) { ASTNode hint = (ASTNode) hints.getChild(pos); if (((ASTNode) hint.getChild(0)).getToken().getType() == HintParser.TOK_MAPJOIN) { // the user has specified to ignore mapjoin hint if (!conf.getBoolVar(HiveConf.ConfVars.HIVEIGNOREMAPJOINHINT) && !conf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) { ASTNode hintTblNames = (ASTNode) hint.getChild(1); int numCh = hintTblNames.getChildCount(); for (int tblPos = 0; tblPos < numCh; tblPos++) { String tblName = ((ASTNode) hintTblNames.getChild(tblPos)).getText() .toLowerCase(); if (!cols.contains(tblName)) { cols.add(tblName); } } } else { queryProperties.setMapJoinRemoved(true); } } } return cols; }
protected GetTablesOperation(HiveSession parentSession, String catalogName, String schemaName, String tableName, List<String> tableTypes) { super(parentSession, OperationType.GET_TABLES); this.catalogName = catalogName; this.schemaName = schemaName; this.tableName = tableName; String tableMappingStr = getParentSession().getHiveConf(). getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING); tableTypeMapping = TableTypeMappingFactory.getTableTypeMapping(tableMappingStr); if (tableTypes != null) { tableTypeList = new ArrayList<String>(); for (String tableType : tableTypes) { tableTypeList.addAll(Arrays.asList(tableTypeMapping.mapToHiveType(tableType.trim()))); } } else { tableTypeList = null; } this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false); LOG.info("Starting GetTablesOperation with the following parameters: " + "catalogName={}, schemaName={}, tableName={}, tableTypes={}", catalogName, schemaName, tableName, tableTypeList != null ? tableTypeList.toString() : "null"); }
public static boolean isAuthorizationEnabled(Configuration conf) { if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) { return false; } // If the V2 api of authorizer in use, the session state getAuthorizer return null. // Here we disable authorization if we use V2 api or the DefaultHiveAuthorizationProvider // The additional authorization checks happening in hcatalog are designed to // work with storage based authorization (on client side). It should not try doing // additional checks if a V2 authorizer or DefaultHiveAuthorizationProvider is in use. // The recommended configuration is to use storage based authorization in metastore server. // However, if user define a custom V1 authorization, it will be honored. if (SessionState.get().getAuthorizer() == null || DefaultHiveAuthorizationProvider.class.getName().equals(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER))) { LOG.info("Metastore authorizer is skipped for V2 authorizer or" + " DefaultHiveAuthorizationProvider"); return false; } return true; } }
@Override public ParseContext transform(ParseContext pctx) throws SemanticException { boolean enabled = false; String queryEngine = pctx.getConf().getVar(ConfVars.HIVE_EXECUTION_ENGINE); if (queryEngine.equals("tez") && pctx.getConf().getBoolVar(ConfVars.TEZ_DYNAMIC_PARTITION_PRUNING)) { enabled = true; } else if ((queryEngine.equals("spark") && pctx.getConf().isSparkDPPAny())) { enabled = true; } if (!enabled) { return pctx; } Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); opRules.put(new RuleRegExp("R1", "(" + TableScanOperator.getOperatorName() + "%" + ".*" + ReduceSinkOperator.getOperatorName() + "%" + JoinOperator.getOperatorName() + "%)"), new JoinSynthetic()); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along SyntheticContext context = new SyntheticContext(pctx); Dispatcher disp = new DefaultRuleDispatcher(null, opRules, context); GraphWalker ogw = new PreOrderOnceWalker(disp); // Create a list of top op nodes List<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getTopOps().values()); ogw.startWalking(topNodes, null); return pctx; }
/** * Add the HIVE_CONF_RESTRICTED_LIST values to restrictList, * including HIVE_CONF_RESTRICTED_LIST itself */ private void setupRestrictList() { String restrictListStr = this.getVar(ConfVars.HIVE_CONF_RESTRICTED_LIST); restrictList.clear(); if (restrictListStr != null) { for (String entry : restrictListStr.split(",")) { restrictList.add(entry.trim()); } } String internalVariableListStr = this.getVar(ConfVars.HIVE_CONF_INTERNAL_VARIABLE_LIST); if (internalVariableListStr != null) { for (String entry : internalVariableListStr.split(",")) { restrictList.add(entry.trim()); } } restrictList.add(ConfVars.HIVE_IN_TEST.varname); restrictList.add(ConfVars.HIVE_CONF_RESTRICTED_LIST.varname); restrictList.add(ConfVars.HIVE_CONF_HIDDEN_LIST.varname); restrictList.add(ConfVars.HIVE_CONF_INTERNAL_VARIABLE_LIST.varname); restrictList.add(ConfVars.HIVE_SPARK_RSC_CONF_LIST.varname); }
@SuppressWarnings("unchecked") public static List<HiveMetastoreAuthorizationProvider> getMetaStoreAuthorizeProviderManagers( Configuration conf, HiveConf.ConfVars authorizationProviderConfKey, HiveAuthenticationProvider authenticator) throws HiveException { String clsStrs = HiveConf.getVar(conf, authorizationProviderConfKey); if(clsStrs == null){ return null; } List<HiveMetastoreAuthorizationProvider> authProviders = new ArrayList<HiveMetastoreAuthorizationProvider>(); for (String clsStr : clsStrs.trim().split(",")) { LOG.info("Adding metastore authorization provider: " + clsStr); authProviders.add((HiveMetastoreAuthorizationProvider) getAuthorizeProviderManager(conf, clsStr, authenticator, false)); } return authProviders; }
public LlapZookeeperRegistryImpl(String instanceName, Configuration conf) { super(instanceName, conf, HiveConf.getVar(conf, ConfVars.LLAP_ZK_REGISTRY_NAMESPACE), NAMESPACE_PREFIX, USER_SCOPE_PATH_PREFIX, WORKER_PREFIX, WORKER_GROUP, LlapProxy.isDaemon() ? SASL_LOGIN_CONTEXT_NAME : null, HiveConf.getVar(conf, ConfVars.LLAP_KERBEROS_PRINCIPAL), HiveConf.getVar(conf, ConfVars.LLAP_KERBEROS_KEYTAB_FILE), ConfVars.LLAP_VALIDATE_ACLS); LOG.info("Llap Zookeeper Registry is enabled with registryid: " + instanceName); }
private List<String> getMapSideJoinTables(QB qb) { List<String> cols = new ArrayList<String>(); ASTNode hints = qb.getParseInfo().getHints(); for (int pos = 0; pos < hints.getChildCount(); pos++) { ASTNode hint = (ASTNode) hints.getChild(pos); if (((ASTNode) hint.getChild(0)).getToken().getType() == HintParser.TOK_MAPJOIN) { // the user has specified to ignore mapjoin hint if (!conf.getBoolVar(HiveConf.ConfVars.HIVEIGNOREMAPJOINHINT) && !conf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) { ASTNode hintTblNames = (ASTNode) hint.getChild(1); int numCh = hintTblNames.getChildCount(); for (int tblPos = 0; tblPos < numCh; tblPos++) { String tblName = ((ASTNode) hintTblNames.getChild(tblPos)).getText() .toLowerCase(); if (!cols.contains(tblName)) { cols.add(tblName); } } } else { queryProperties.setMapJoinRemoved(true); } } } return cols; }
String baseDir = conf.getVar(ConfVars.HIVE_PROTO_EVENTS_BASE_PATH); if (StringUtils.isBlank(baseDir)) { baseDir = null; eventPerFile = conf.getBoolVar(ConfVars.HIVE_PROTO_FILE_PER_EVENT); LOG.info("Event per file enabled: {}", eventPerFile); DatePartitionedLogger<HiveHookEventProto> tmpLogger = null; try {
private static boolean isPermissiveManagementAcl(Configuration conf) { return HiveConf.getBoolVar(conf, ConfVars.LLAP_VALIDATE_ACLS) && AccessControlList.WILDCARD_ACL_VALUE.equals( HiveConf.getVar(conf, ConfVars.LLAP_MANAGEMENT_ACL)) && "".equals(HiveConf.getVar(conf, ConfVars.LLAP_MANAGEMENT_ACL_DENY)); }
String patternsString = conf.getVar(var); List<String> result = new ArrayList<>(); if (StringUtils.isBlank(patternsString)) { String defaultBaseDn = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_BASEDN); String guidAttr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GUIDKEY); if (StringUtils.isNotBlank(defaultBaseDn)) { result.add(guidAttr + "=%s," + defaultBaseDn); for (String pattern : patterns) { if (pattern.contains(",") && pattern.contains("=")) { result.add(pattern); } else { LOG.warn("Unexpected format for " + var + "..ignoring " + pattern);
public String getLogIdVar(String defaultValue) { String retval = getVar(ConfVars.HIVE_LOG_TRACE_ID); if (StringUtils.EMPTY.equals(retval)) { LOG.info("Using the default value passed in for log id: {}", defaultValue); retval = defaultValue; } if (retval.length() > LOG_PREFIX_LENGTH) { LOG.warn("The original log id prefix is {} has been truncated to {}", retval, retval.substring(0, LOG_PREFIX_LENGTH - 1)); retval = retval.substring(0, LOG_PREFIX_LENGTH - 1); } return retval; }
protected synchronized TTransport connect(HiveConf conf) throws HiveSQLException, TTransportException { if (transport != null && transport.isOpen()) { transport.close(); } String host = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST); int port = conf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT); LOG.info("Connecting to " + host + ":" + port); transport = new TSocket(host, port); ((TSocket) transport).setTimeout((int) conf.getTimeVar(HiveConf.ConfVars.SERVER_READ_SOCKET_TIMEOUT, TimeUnit.SECONDS) * 1000); try { ((TSocket) transport).getSocket().setKeepAlive(conf.getBoolVar(HiveConf.ConfVars.SERVER_TCP_KEEP_ALIVE)); } catch (SocketException e) { LOG.error("Error setting keep alive to " + conf.getBoolVar(HiveConf.ConfVars.SERVER_TCP_KEEP_ALIVE), e); } String userName = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_USER); String passwd = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_PASSWORD); try { transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport); } catch (SaslException e) { LOG.error("Error creating plain SASL transport", e); } TProtocol protocol = new TBinaryProtocol(transport); transport.open(); base = new ThriftCLIServiceClient(new TCLIService.Client(protocol), conf); LOG.info("Connected!"); return transport; }
public static boolean canProvideProgressLog(HiveConf hiveConf) { return ("tez".equals(hiveConf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE)) || "spark" .equals(hiveConf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE))) && hiveConf .getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_INPLACE_PROGRESS); }
public static <T extends Hook> List<T> readHooksFromConf(HiveConf conf, HiveConf.ConfVars hookConfVar) throws InstantiationException, IllegalAccessException, ClassNotFoundException { String csHooks = conf.getVar(hookConfVar); List<T> hooks = new ArrayList<>(); if (Strings.isBlank(csHooks)) { return hooks; } String[] hookClasses = csHooks.split(","); for (String hookClass : hookClasses) { T hook = (T) Class.forName(hookClass.trim(), true, Utilities.getSessionSpecifiedClassLoader()).newInstance(); hooks.add(hook); } return hooks; } }
public static String getDaemonLocalDirString(Configuration conf, String workDirsEnvString) { String localDirList = HiveConf.getVar(conf, ConfVars.LLAP_DAEMON_WORK_DIRS); if (localDirList != null && !localDirList.isEmpty()) { LOG.info("Local dirs from Configuration: {}", localDirList); if (!localDirList.equalsIgnoreCase("useYarnEnvDirs") && !StringUtils.isBlank(localDirList)) { LOG.info("Using local dirs from Configuration"); return localDirList; } } // Fallback to picking up the value from environment. if (StringUtils.isNotBlank(workDirsEnvString)) { LOG.info("Using local dirs from environment: {}", workDirsEnvString); return workDirsEnvString; } else { throw new RuntimeException( "Cannot determined local dirs from specified configuration and env. ValueFromConf=" + localDirList + ", ValueFromEnv=" + workDirsEnvString); } }