private final String getNsOrDefault(String ns) { // This is only needed for old clients not setting NS in requests. // Not clear how to handle this... this is properly a HS2 config but metastore needs its default // value for backward compat, and we don't want it configurable separately because it's also // used in upgrade scripts, were it cannot be configured. return normalizeIdentifier(ns == null ? "default" : ns); }
public SerDeInfo( String name, String serializationLib, Map<String,String> parameters) { this(); this.name = org.apache.hadoop.hive.metastore.utils.StringUtils.intern(name); this.serializationLib = org.apache.hadoop.hive.metastore.utils.StringUtils.intern(serializationLib); this.parameters = org.apache.hadoop.hive.metastore.utils.StringUtils.intern(parameters); }
/*** * Split uri with fragment into file uri, subdirs, checksum and source cmroot uri. * Currently using fileuri#checksum#cmrooturi#subdirs as the format. * @param fileURIStr uri with fragment * @return array of file name, subdirs, checksum and source CM root URI */ public static String[] decodeFileUri(String fileURIStr) { String[] uriAndFragment = fileURIStr.split(URI_FRAGMENT_SEPARATOR); String[] result = new String[4]; result[0] = uriAndFragment[0]; if ((uriAndFragment.length > 1) && !StringUtils.isEmpty(uriAndFragment[1])) { result[1] = uriAndFragment[1]; } if ((uriAndFragment.length > 2) && !StringUtils.isEmpty(uriAndFragment[2])) { result[2] = uriAndFragment[2]; } if ((uriAndFragment.length > 3) && !StringUtils.isEmpty(uriAndFragment[3])) { result[3] = uriAndFragment[3]; } LOG.debug("Reading Encoded URI: " + result[0] + ":: " + result[1] + ":: " + result[2] + ":: " + result[3]); return result; }
LOG.error("Exception when clearing cmroot:" + StringUtils.stringifyException(e));
/** * Treat a configuration value as a comma separated list. * @param conf configuration to retrieve it from * @param var variable to retrieve * @return collection of strings. If the value is unset it will return an empty collection. */ public static Collection<String> getStringCollection(Configuration conf, ConfVars var) { assert var.defaultVal.getClass() == String.class; String val = conf.get(var.varname); if (val == null) { val = conf.get(var.hiveName, (String)var.defaultVal); } if (val == null) { return Collections.emptySet(); } return StringUtils.asSet(val.split(",")); }
static public String checksumFor(Path path, FileSystem fs) throws IOException { // TODO: fs checksum only available on hdfs, need to // find a solution for other fs (eg, local fs, s3, etc) String checksumString = null; FileChecksum checksum = fs.getFileChecksum(path); if (checksum != null) { checksumString = StringUtils.byteToHexString( checksum.getBytes(), 0, checksum.getLength()); } return checksumString; }
private ReplChangeManager(Configuration conf) throws MetaException { try { if (!inited) { if (MetastoreConf.getBoolVar(conf, ConfVars.REPLCMENABLED)) { ReplChangeManager.enabled = true; ReplChangeManager.cmroot = new Path(MetastoreConf.getVar(conf, ConfVars.REPLCMDIR)); ReplChangeManager.conf = conf; FileSystem cmFs = cmroot.getFileSystem(conf); // Create cmroot with permission 700 if not exist if (!cmFs.exists(cmroot)) { cmFs.mkdirs(cmroot); cmFs.setPermission(cmroot, new FsPermission("700")); } UserGroupInformation usergroupInfo = UserGroupInformation.getCurrentUser(); msUser = usergroupInfo.getShortUserName(); msGroup = usergroupInfo.getPrimaryGroupName(); } inited = true; } } catch (IOException e) { throw new MetaException(StringUtils.stringifyException(e)); } }
public static boolean matches(String name, String pattern) { String[] subpatterns = pattern.trim().split("\\|"); for (String subpattern : subpatterns) { subpattern = "(?i)" + subpattern.replaceAll("\\?", ".{1}").replaceAll("\\*", ".*") .replaceAll("\\^", "\\\\^").replaceAll("\\$", "\\\\$"); if (Pattern.matches(subpattern, StringUtils.normalizeIdentifier(name))) { return true; } } return false; } }
} catch (Exception e) { throw new MetaException( org.apache.hadoop.hive.metastore.utils.StringUtils.stringifyException(e));
public static boolean isSourceOfReplication(Database db) { assert (db != null); String replPolicyIds = getReplPolicyIdString(db); return !StringUtils.isEmpty(replPolicyIds); }
protected List<String> getTablesInternal(String catName, String dbName, String pattern, TableType tableType, boolean allowSql, boolean allowJdo) throws MetaException, NoSuchObjectException { final String db_name = normalizeIdentifier(dbName); final String cat_name = normalizeIdentifier(catName); return new GetListHelper<String>(cat_name, dbName, null, allowSql, allowJdo) { @Override protected List<String> getSqlResult(GetHelper<List<String>> ctx) throws MetaException { return directSql.getTables(cat_name, db_name, tableType); } @Override protected List<String> getJdoResult(GetHelper<List<String>> ctx) throws MetaException, NoSuchObjectException { return getTablesInternalViaJdo(cat_name, db_name, pattern, tableType); } }.run(false); }
public FieldSchema( String name, String type, String comment) { this(); this.name = org.apache.hadoop.hive.metastore.utils.StringUtils.intern(name); this.type = org.apache.hadoop.hive.metastore.utils.StringUtils.intern(type); this.comment = org.apache.hadoop.hive.metastore.utils.StringUtils.intern(comment); }
@Override public List<FieldSchema> readSchema(Table tbl, EnvironmentContext envContext, Configuration conf) throws MetaException { ClassLoader orgHiveLoader = null; try { if (envContext != null) { String addedJars = envContext.getProperties().get("hive.added.jars.path"); if (org.apache.commons.lang.StringUtils.isNotBlank(addedJars)) { //for thread safe orgHiveLoader = conf.getClassLoader(); ClassLoader loader = org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.addToClassPath( orgHiveLoader, org.apache.commons.lang.StringUtils.split(addedJars, ",")); conf.setClassLoader(loader); } } Deserializer s = HiveMetaStoreUtils.getDeserializer(conf, tbl, false); return HiveMetaStoreUtils.getFieldsFromDeserializer(tbl.getTableName(), s); } catch (Exception e) { StringUtils.stringifyException(e); throw new MetaException(e.getMessage()); } finally { if (orgHiveLoader != null) { conf.setClassLoader(orgHiveLoader); } } } }
private List<SQLPrimaryKey> getPrimaryKeysInternal(final String catName, final String db_name_input, final String tbl_name_input) throws MetaException, NoSuchObjectException { final String db_name = normalizeIdentifier(db_name_input); final String tbl_name = normalizeIdentifier(tbl_name_input); return new GetListHelper<SQLPrimaryKey>(catName, db_name, tbl_name, true, true) { @Override protected List<SQLPrimaryKey> getSqlResult(GetHelper<List<SQLPrimaryKey>> ctx) throws MetaException { return directSql.getPrimaryKeys(catName, db_name, tbl_name); } @Override protected List<SQLPrimaryKey> getJdoResult( GetHelper<List<SQLPrimaryKey>> ctx) throws MetaException, NoSuchObjectException { return getPrimaryKeysViaJdo(catName, db_name, tbl_name); } }.run(false); }
public void setDbName(String dbName) { this.dbName = org.apache.hadoop.hive.metastore.utils.StringUtils.intern(dbName); }
throw new MetaException(StringUtils.stringifyException(e));
private StringBuilder appendPatternCondition(StringBuilder builder, String fieldName, String elements, List<String> parameters) { elements = normalizeIdentifier(elements); return appendCondition(builder, fieldName, elements.split("\\|"), true, parameters); }
public void setName(String name) { this.name = org.apache.hadoop.hive.metastore.utils.StringUtils.intern(name); }
@Override void setCommandLineArguments(SchemaToolCommandLine cl) { fromCatName = normalizeIdentifier(cl.getOptionValue("fromCatalog")); toCatName = normalizeIdentifier(cl.getOptionValue("toCatalog")); dbName = normalizeIdentifier(cl.getOptionValue("moveDatabase")); }