public FileSystem getFs(Path f) throws MetaException { return getFs(f, conf); }
/** * Hadoop File System reverse lookups paths with raw ip addresses The File * System URI always contains the canonical DNS name of the Namenode. * Subsequently, operations on paths with raw ip addresses cause an exception * since they don't match the file system URI. * * This routine solves this problem by replacing the scheme and authority of a * path with the scheme and authority of the FileSystem that it maps to. * * @param path * Path to be canonicalized * @return Path with canonical scheme and authority */ public static Path getDnsPath(Path path, Configuration conf) throws MetaException { FileSystem fs = getFs(path, conf); String uriPath = path.toUri().getPath(); if (StringUtils.isEmpty(uriPath)) { uriPath = "/"; } return (new Path(fs.getUri().getScheme(), fs.getUri().getAuthority(), uriPath)); }
public boolean renameDir(Path sourcePath, Path destPath, boolean needCmRecycle) throws MetaException { try { if (needCmRecycle) { // Copy the source files to cmroot. As the client will move the source files to another // location, we should make a copy of the files to cmroot instead of moving it. cm.recycle(sourcePath, RecycleType.COPY, true); } FileSystem srcFs = getFs(sourcePath); FileSystem destFs = getFs(destPath); return FileUtils.rename(srcFs, destFs, sourcePath, destPath); } catch (Exception ex) { MetaStoreUtils.logAndThrowMetaException(ex); } return false; }
public boolean isWritable(Path path) throws IOException { if (!storageAuthCheck) { // no checks for non-secure hadoop installations return true; } if (path == null) { //what??!! return false; } final FileStatus stat; final FileSystem fs; try { fs = getFs(path); stat = fs.getFileStatus(path); HdfsUtils.checkFileAccess(fs, stat, FsAction.WRITE); return true; } catch (FileNotFoundException fnfe){ // File named by path doesn't exist; nothing to validate. return true; } catch (Exception e) { // all other exceptions are considered as emanating from // unauthorized accesses if (LOG.isDebugEnabled()) { LOG.debug("Exception when checking if path (" + path + ")", e); } return false; } }
public boolean isDir(Path f) throws MetaException { FileSystem fs; try { fs = getFs(f); FileStatus fstatus = fs.getFileStatus(f); if (!fstatus.isDir()) { return false; } } catch (FileNotFoundException e) { return false; } catch (IOException e) { MetaStoreUtils.logAndThrowMetaException(e); } return true; }
public boolean mkdirs(Path f) throws MetaException { FileSystem fs; try { fs = getFs(f); return FileUtils.mkdir(fs, f); } catch (IOException e) { MetaStoreUtils.logAndThrowMetaException(e); } return false; }
public boolean isEmpty(Path path) throws IOException, MetaException { ContentSummary contents = getFs(path).getContentSummary(path); if (contents != null && contents.getFileCount() == 0 && contents.getDirectoryCount() == 1) { return true; } return false; }
public boolean deleteDir(Path f, boolean recursive, boolean ifPurge, boolean needCmRecycle) throws MetaException { if (needCmRecycle) { try { cm.recycle(f, RecycleType.MOVE, ifPurge); } catch (IOException e) { throw new MetaException(org.apache.hadoop.util.StringUtils.stringifyException(e)); } } FileSystem fs = getFs(f); return fsHandler.deleteDir(fs, f, recursive, ifPurge, conf); }
Warehouse wh = new Warehouse(conf); Path tblPath = new Path(tbl.getSd().getLocation()); fs = wh.getFs(tblPath); Date now = new Date(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
Warehouse wh = new Warehouse(hiveconf); Path tblPath = new Path(tbl.getSd().getLocation()); fs = wh.getFs(tblPath); Date now = new Date(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
/** * Starts the MetaStoreService. Be aware, as the current MetaStore does not implement clean * shutdown, starting MetaStoreService is possible only once per test. * * @throws Exception if any Exception occurs */ public void start() throws Exception { warehouse = new Warehouse(configuration); warehouseRootFs = warehouse.getFs(warehouse.getWhRoot()); TrashPolicy trashPolicy = TrashPolicy.getInstance(configuration, warehouseRootFs); trashDir = trashPolicy.getCurrentTrashDir(); }
tablePath = wh.getDnsPath(new Path(table.getSd().getLocation())); FileSystem fs = wh.getFs(tablePath);
public boolean renameDir(Path sourcePath, Path destPath, boolean inheritPerms) throws MetaException { try { FileSystem fs = getFs(sourcePath); return FileUtils.renameWithPerms(fs, sourcePath, destPath, inheritPerms, conf); } catch (Exception ex) { MetaStoreUtils.logAndThrowMetaException(ex); } return false; }
public boolean mkdirs(Path f) throws MetaException { FileSystem fs; try { fs = getFs(f); return FileUtils.mkdir(fs, f); } catch (IOException e) { MetaStoreUtils.logAndThrowMetaException(e); } return false; }
public boolean mkdirs(Path f, boolean inheritPermCandidate) throws MetaException { boolean inheritPerms = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS) && inheritPermCandidate; FileSystem fs = null; try { fs = getFs(f); return FileUtils.mkdir(fs, f, inheritPerms, conf); } catch (IOException e) { MetaStoreUtils.logAndThrowMetaException(e); } return false; }
public boolean mkdirs(Path f, boolean inheritPermCandidate) throws MetaException { boolean inheritPerms = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS) && inheritPermCandidate; FileSystem fs = null; try { fs = getFs(f); return FileUtils.mkdir(fs, f, inheritPerms, conf); } catch (IOException e) { MetaStoreUtils.logAndThrowMetaException(e); } return false; }
public boolean isEmpty(Path path) throws IOException, MetaException { ContentSummary contents = getFs(path).getContentSummary(path); if (contents != null && contents.getFileCount() == 0 && contents.getDirectoryCount() == 1) { return true; } return false; }
public boolean deleteDir(Path f, boolean recursive, boolean ifPurge, boolean needCmRecycle) throws MetaException { if (needCmRecycle) { try { cm.recycle(f, RecycleType.MOVE, ifPurge); } catch (IOException e) { throw new MetaException(org.apache.hadoop.util.StringUtils.stringifyException(e)); } } FileSystem fs = getFs(f); return fsHandler.deleteDir(fs, f, recursive, ifPurge, conf); }