public static boolean hasMetastoreBasedSchema(HiveConf conf, String serdeLib) { return StringUtils.isEmpty(serdeLib) || conf.getStringCollection(ConfVars.SERDESUSINGMETASTOREFORSCHEMA.varname).contains(serdeLib); }
/** * {@inheritDoc} */ @Override public Filter getInstance(HiveConf conf) { Collection<String> groupFilter = conf.getStringCollection( HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER.varname); Collection<String> userFilter = conf.getStringCollection( HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERFILTER.varname); if (groupFilter.isEmpty() && userFilter.isEmpty()) { return null; } return new UserSearchFilter(); }
public static boolean hasMetastoreBasedSchema(HiveConf conf, String serdeLib) { return StringUtils.isEmpty(serdeLib) || conf.getStringCollection(ConfVars.SERDESUSINGMETASTOREFORSCHEMA.varname).contains(serdeLib); }
/** * {@inheritDoc} */ @Override public Filter getInstance(HiveConf conf) { Collection<String> userFilter = conf.getStringCollection( HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERFILTER.varname); if (userFilter.isEmpty()) { return null; } return new UserFilter(userFilter); }
/** * {@inheritDoc} */ @Override public Filter getInstance(HiveConf conf) { Collection<String> groupFilter = conf.getStringCollection( HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER.varname); if (groupFilter.isEmpty()) { return null; } if (conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERMEMBERSHIP_KEY) == null) { return new GroupMembershipKeyFilter(groupFilter); } else { return new UserMembershipKeyFilter(groupFilter); } }
public SecureCmdDoAs(HiveConf conf) throws HiveException, IOException{ // Get delegation token for user from filesystem and write the token along with // metastore tokens into a file String uname = UserGroupInformation.getLoginUser().getShortUserName(); FileSystem fs = FileSystem.get(conf); Credentials cred = new Credentials(); ShimLoader.getHadoopShims().addDelegationTokens(fs, cred, uname); // ask default fs first for (String uri : conf.getStringCollection("mapreduce.job.hdfs-servers")) { try { ShimLoader.getHadoopShims().addDelegationTokens( FileSystem.get(new URI(uri), conf), cred, uname); } catch (URISyntaxException e) { LOG.warn("Invalid URI in mapreduce.job.hdfs-servers:["+uri+"], ignoring.", e); } } tokenFile = File.createTempFile("hive_hadoop_delegation_token", null); tokenPath = new Path(tokenFile.toURI()); //write credential with token to file cred.writeTokenStorageFile(tokenPath, conf); }
conf.getStringCollection(HiveConf.ConfVars.HIVE_EXIM_URI_SCHEME_WL.varname); if (!eximSchemes.contains(scheme)) { throw new SemanticException(
public SecureCmdDoAs(HiveConf conf) throws HiveException, IOException{ // Get delegation token for user from filesystem and write the token along with // metastore tokens into a file String uname = UserGroupInformation.getLoginUser().getShortUserName(); FileSystem fs = FileSystem.get(conf); Credentials cred = new Credentials(); ShimLoader.getHadoopShims().addDelegationTokens(fs, cred, uname); // ask default fs first for (String uri : conf.getStringCollection("mapreduce.job.hdfs-servers")) { try { ShimLoader.getHadoopShims().addDelegationTokens( FileSystem.get(new URI(uri), conf), cred, uname); } catch (URISyntaxException e) { LOG.warn("Invalid URI in mapreduce.job.hdfs-servers:["+uri+"], ignoring.", e); } } tokenFile = File.createTempFile("hive_hadoop_delegation_token", null); tokenPath = new Path(tokenFile.toURI()); //write credential with token to file cred.writeTokenStorageFile(tokenPath, conf); }
conf.getStringCollection(HiveConf.ConfVars.HIVE_EXIM_URI_SCHEME_WL.varname); if (!eximSchemes.contains(scheme)) { throw new SemanticException(
localizeJarForClass(lfs, libDir, className, false); Collection<String> codecs = conf.getStringCollection("io.compression.codecs"); if (codecs != null) { for (String codecClassName : codecs) {
/** * {@inheritDoc} */ @Override public Filter getInstance(HiveConf conf) { Collection<String> userFilter = conf.getStringCollection( HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERFILTER.varname); if (userFilter.isEmpty()) { return null; } return new UserFilter(userFilter); }
/** * {@inheritDoc} */ @Override public Filter getInstance(HiveConf conf) { Collection<String> groupFilter = conf.getStringCollection( HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER.varname); Collection<String> userFilter = conf.getStringCollection( HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERFILTER.varname); if (groupFilter.isEmpty() && userFilter.isEmpty()) { return null; } return new UserSearchFilter(); }
public static boolean hasMetastoreBasedSchema(HiveConf conf, String serdeLib) { return StringUtils.isEmpty(serdeLib) || conf.getStringCollection(ConfVars.SERDESUSINGMETASTOREFORSCHEMA.varname).contains(serdeLib); }
/** * {@inheritDoc} */ @Override public Filter getInstance(HiveConf conf) { Collection<String> groupFilter = conf.getStringCollection( HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_GROUPFILTER.varname); if (groupFilter.isEmpty()) { return null; } if (conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERMEMBERSHIP_KEY) == null) { return new GroupMembershipKeyFilter(groupFilter); } else { return new UserMembershipKeyFilter(groupFilter); } }
Collection<String> eximSchemes = conf.getStringCollection( HiveConf.ConfVars.HIVE_EXIM_URI_SCHEME_WL.varname); if (!eximSchemes.contains(scheme)) {
public SecureCmdDoAs(HiveConf conf) throws HiveException, IOException{ // Get delegation token for user from filesystem and write the token along with // metastore tokens into a file String uname = UserGroupInformation.getLoginUser().getShortUserName(); FileSystem fs = FileSystem.get(conf); Credentials cred = new Credentials(); ShimLoader.getHadoopShims().addDelegationTokens(fs, cred, uname); // ask default fs first for (String uri : conf.getStringCollection("mapreduce.job.hdfs-servers")) { try { ShimLoader.getHadoopShims().addDelegationTokens( FileSystem.get(new URI(uri), conf), cred, uname); } catch (URISyntaxException e) { LOG.warn("Invalid URI in mapreduce.job.hdfs-servers:["+uri+"], ignoring.", e); } } tokenFile = File.createTempFile("hive_hadoop_delegation_token", null); tokenPath = new Path(tokenFile.toURI()); //write credential with token to file cred.writeTokenStorageFile(tokenPath, conf); }
localizeJarForClass(lfs, libDir, className, false); Collection<String> codecs = conf.getStringCollection("io.compression.codecs"); if (codecs != null) { for (String codecClassName : codecs) {
hiveConf.getStringCollection(ConfVars.SERDESUSINGMETASTOREFORSCHEMA.varname).contains (tbl.getSd().getSerdeInfo().getSerializationLib())) { ret = tbl.getSd().getCols();
hiveConf.getStringCollection(ConfVars.SERDESUSINGMETASTOREFORSCHEMA.varname).contains (tbl.getSd().getSerdeInfo().getSerializationLib())) { ret = tbl.getSd().getCols();