private HiveConf createHiveConf(String metaStoreURI, boolean tokenAuthEnabled) { if (!tokenAuthEnabled) { return null; } HiveConf hcatConf = new HiveConf(); hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); hcatConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true); return hcatConf; }
public HiveConf createHiveConf(String metaStoreURI, String hiveMetaStorePrincipal) throws IOException { HiveConf hcatConf = new HiveConf(); hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI); hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3); hcatConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hcatConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true); hcatConf.set(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname, hiveMetaStorePrincipal); return hcatConf; }
private HiveConf newAuthEnabledConf() { HiveConf conf = new HiveConf(); conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); return conf; }
CounterStatusProcessor(Configuration hconf, Reporter reporter){ this.reporterPrefix = HiveConf.getVar(hconf, HiveConf.ConfVars.STREAMREPORTERPERFIX); this.counterPrefix = reporterPrefix + "counter:"; this.statusPrefix = reporterPrefix + "status:"; this.reporter = reporter; }
public CopyUtils(String distCpDoAsUser, HiveConf hiveConf) { this.hiveConf = hiveConf; maxNumberOfFiles = hiveConf.getLongVar(HiveConf.ConfVars.HIVE_EXEC_COPYFILE_MAXNUMFILES); maxCopyFileSize = hiveConf.getLongVar(HiveConf.ConfVars.HIVE_EXEC_COPYFILE_MAXSIZE); hiveInTest = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST); this.copyAsUser = distCpDoAsUser; }
public MemoryCalculator(PhysicalContext pctx) { this.pctx = pctx; this.totalAvailableMemory = HiveConf.getLongVar(pctx.conf, HiveConf.ConfVars.HIVECONVERTJOINNOCONDITIONALTASKTHRESHOLD); this.minimumHashTableSize = HiveConf.getIntVar(pctx.conf, HiveConf.ConfVars.HIVEHYBRIDGRACEHASHJOINMINNUMPARTITIONS) * HiveConf.getIntVar(pctx.conf, HiveConf.ConfVars.HIVEHYBRIDGRACEHASHJOINMINWBSIZE); this.inflationFactor = HiveConf.getFloatVar(pctx.conf, HiveConf.ConfVars.HIVE_HASH_TABLE_INFLATION_FACTOR); }
/** * Return true when HBaseStorageHandler should generate hfiles instead of operate against the * online table. This mode is implicitly applied when "hive.hbase.generatehfiles" is true. */ public static boolean isHBaseGenerateHFiles(Configuration conf) { return HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_HBASE_GENERATE_HFILES); }
public RemoteSparkJobMonitor(HiveConf hiveConf, RemoteSparkJobStatus sparkJobStatus) { super(hiveConf); this.sparkJobStatus = sparkJobStatus; this.hiveConf = hiveConf; sparkJobMaxTaskCount = hiveConf.getIntVar(HiveConf.ConfVars.SPARK_JOB_MAX_TASKS); sparkStageMaxTaskCount = hiveConf.getIntVar(HiveConf.ConfVars.SPARK_STAGE_MAX_TASKS); }
private void verifyRestriction(String varName, String newVal) { try { conf.verifyAndSet(varName, newVal); fail("Setting config property " + varName + " should fail"); } catch (IllegalArgumentException e) { // the verifyAndSet in this case is expected to fail with the IllegalArgumentException } } }
/** * Searches the given configuration object and replaces all the configuration values for keys * defined hive.conf.hidden.list by empty String * * @param conf - Configuration object which needs to be modified to remove sensitive keys */ public static void stripConfigurations(Configuration conf) { Set<String> hiddenSet = getHiddenSet(conf); stripConfigurations(conf, hiddenSet); }
public String typeString() { String type = valType.typeString(); if (valType == VarType.STRING && validator != null) { if (validator instanceof TimeValidator) { type += "(TIME)"; } } return type; }
public static TimeUnit getDefaultTimeUnit(ConfVars var) { TimeUnit inputUnit = null; if (var.validator instanceof TimeValidator) { inputUnit = ((TimeValidator)var.validator).getTimeUnit(); } return inputUnit; }
/** * Strips hidden config entries from configuration */ public void stripHiddenConfigurations(Configuration conf) { HiveConfUtil.stripConfigurations(conf, hiddenSet); }
private static String findHadoopBinary() { String val = findHadoopHome(); // if can't find hadoop home we can at least try /usr/bin/hadoop val = (val == null ? File.separator + "usr" : val) + File.separator + "bin" + File.separator + "hadoop"; // Launch hadoop command file on windows. return val; }
public CalcitePlanner(QueryState queryState) throws SemanticException { super(queryState); if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED)) { runCBO = false; disableSemJoinReordering = false; } }
/** * Returns the number of columns set in the conf for writers. * * @param conf * @return number of columns for RCFile's writer */ public static int getColumnNumber(Configuration conf) { return HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_RCFILE_COLUMN_NUMBER_CONF); }
private static String findMapRedBinary() { String val = findHadoopHome(); val = (val == null ? "mapred" : val + File.separator + "bin" + File.separator + "mapred"); return val; }