HadoopShims.HdfsEncryptionShim shim = ShimLoader.getHadoopShims().createHdfsEncryptionShim(fs, conf); if (!shim.isPathEncrypted(location)) { HdfsUtils.HadoopFileStatus status = new HdfsUtils.HadoopFileStatus(conf, fs, location); FileStatus targetStatus = fs.getFileStatus(location);
/** * Checks if a given path is encrypted (valid only for HDFS files) * @param path The path to check for encryption * @return True if the path is encrypted; False if it is not encrypted * @throws HiveException If an error occurs while checking for encryption */ private boolean isPathEncrypted(Path path) throws HiveException { try { HadoopShims.HdfsEncryptionShim hdfsEncryptionShim = SessionState.get().getHdfsEncryptionShim(path.getFileSystem(conf)); if (hdfsEncryptionShim != null) { if (hdfsEncryptionShim.isPathEncrypted(path)) { return true; } } } catch (Exception e) { throw new HiveException("Unable to determine if " + path + " is encrypted: " + e, e); } return false; }
/** * Checks if a given path is encrypted (valid only for HDFS files) * @param path The path to check for encryption * @return True if the path is encrypted; False if it is not encrypted * @throws HiveException If an error occurs while checking for encryption */ private boolean isPathEncrypted(Path path) throws HiveException { try { HadoopShims.HdfsEncryptionShim hdfsEncryptionShim = SessionState.get().getHdfsEncryptionShim(path.getFileSystem(conf)); if (hdfsEncryptionShim != null) { if (hdfsEncryptionShim.isPathEncrypted(path)) { return true; } } } catch (Exception e) { throw new HiveException("Unable to determine if " + path + " is encrypted: " + e, e); } return false; }
if (hdfsEncryptionShim != null && (hdfsEncryptionShim.isPathEncrypted(srcf) || hdfsEncryptionShim.isPathEncrypted(destf)) && !hdfsEncryptionShim.arePathsOnSameEncryptionZone(srcf, destf))
HadoopShims.HdfsEncryptionShim shim = ShimLoader.getHadoopShims().createHdfsEncryptionShim(FileSystem.get(hiveConf), hiveConf); if (shim.isPathEncrypted(pathToData)) { throw new MetaException("Unable to drop " + objectName + " because it is in an encryption zone" + " and trash is enabled. Use PURGE option to skip trash.");
HadoopShims.HdfsEncryptionShim shim = ShimLoader.getHadoopShims().createHdfsEncryptionShim(FileSystem.get(hiveConf), hiveConf); if (shim.isPathEncrypted(pathToData)) { throw new MetaException("Unable to drop " + objectName + " because it is in an encryption zone" + " and trash is enabled. Use PURGE option to skip trash.");
/** * Checks if a given path is encrypted (valid only for HDFS files) * @param path The path to check for encryption * @return True if the path is encrypted; False if it is not encrypted * @throws HiveException If an error occurs while checking for encryption */ private boolean isPathEncrypted(Path path) throws HiveException { HadoopShims.HdfsEncryptionShim hdfsEncryptionShim; hdfsEncryptionShim = SessionState.get().getHdfsEncryptionShim(); if (hdfsEncryptionShim != null) { try { if (hdfsEncryptionShim.isPathEncrypted(path)) { return true; } } catch (Exception e) { throw new HiveException("Unable to determine if " + path + " is encrypted: " + e, e); } } return false; }
HadoopShims.HdfsEncryptionShim shim = ShimLoader.getHadoopShims().createHdfsEncryptionShim(fs, conf); if (!shim.isPathEncrypted(location)) { HdfsUtils.HadoopFileStatus status = new HdfsUtils.HadoopFileStatus(conf, fs, location); FileStatus targetStatus = fs.getFileStatus(location);
return srcHdfsEncryptionShim != null && destHdfsEncryptionShim != null && (srcHdfsEncryptionShim.isPathEncrypted(srcf) || destHdfsEncryptionShim.isPathEncrypted(destf)) && !srcHdfsEncryptionShim.arePathsOnSameEncryptionZone(srcf, destf, destHdfsEncryptionShim); } catch (IOException e) {
/** * If moving across different FileSystems or differnent encryption zone, need to do a File copy instead of rename. * TODO- consider if need to do this for different file authority. * @throws HiveException */ static protected boolean needToCopy(Path srcf, Path destf, FileSystem srcFs, FileSystem destFs) throws HiveException { //Check if different FileSystems if (!FileUtils.equalsFileSystem(srcFs, destFs)) { return true; } //Check if different encryption zones HadoopShims.HdfsEncryptionShim srcHdfsEncryptionShim = SessionState.get().getHdfsEncryptionShim(srcFs); HadoopShims.HdfsEncryptionShim destHdfsEncryptionShim = SessionState.get().getHdfsEncryptionShim(destFs); try { return srcHdfsEncryptionShim != null && destHdfsEncryptionShim != null && (srcHdfsEncryptionShim.isPathEncrypted(srcf) || destHdfsEncryptionShim.isPathEncrypted(destf)) && !srcHdfsEncryptionShim.arePathsOnSameEncryptionZone(srcf, destf, destHdfsEncryptionShim); } catch (IOException e) { throw new HiveException(e); } }
static boolean hasEquivalentEncryption(HadoopShims.HdfsEncryptionShim encryptionShim, Path path1, Path path2) throws IOException { // Assumes these are both qualified paths are in the same FileSystem if (encryptionShim.isPathEncrypted(path1) || encryptionShim.isPathEncrypted(path2)) { if (!encryptionShim.arePathsOnSameEncryptionZone(path1, path2)) { return false; } } return true; } }