@Override public Path getWorkingDirectory() { return new Path(this.fs.getWorkingDirectory().toUri()); }
/** * @param c configuration * @return {@link Path} to hbase root directory from * configuration as a qualified Path. * @throws IOException e */ public static Path getRootDir(final Configuration c) throws IOException { Path p = new Path(c.get(HConstants.HBASE_DIR)); FileSystem fs = p.getFileSystem(c); return p.makeQualified(fs.getUri(), fs.getWorkingDirectory()); }
/** * Returns a qualified path object for the {@link FileSystem}'s working * directory. * * @param fs the target FileSystem * @return a qualified path object for the FileSystem's working directory * @deprecated use {@link #makeQualified(URI, Path)} */ @Deprecated public Path makeQualified(FileSystem fs) { return makeQualified(fs.getUri(), fs.getWorkingDirectory()); }
/** * Gets the qualified root dir of the mob files. * @param conf The current configuration. * @return The qualified root dir. * @throws IOException */ public static Path getQualifiedMobRootDir(Configuration conf) throws IOException { Path hbaseDir = new Path(conf.get(HConstants.HBASE_DIR)); Path mobRootDir = new Path(hbaseDir, MobConstants.MOB_DIR_NAME); FileSystem fs = mobRootDir.getFileSystem(conf); return mobRootDir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); }
private String pathToObjectName(final Path path) { org.apache.hadoop.fs.Path hadoopPath = HadoopFileSystem.toHadoopPath(path); if (!hadoopPath.isAbsolute()) { hadoopPath = new org.apache.hadoop.fs.Path(fs.getWorkingDirectory(), hadoopPath); } return hadoopPath.toUri().getScheme() != null && hadoopPath.toUri().getPath().isEmpty() ? "" : hadoopPath.toUri().getPath().substring(1); }
/** * Return Qualified Path of the specified family/file * * @param familyName Column Family Name * @param fileName File Name * @return The qualified Path for the specified family/file */ Path getStoreFilePath(final String familyName, final String fileName) { Path familyDir = getStoreDir(familyName); return new Path(familyDir, fileName).makeQualified(fs.getUri(), fs.getWorkingDirectory()); }
@Override public Path getQualifiedPath() { return this.fileInfo.getPath().makeQualified(fs.getUri(), fs.getWorkingDirectory()); }
public static Path makeAbsolute(FileSystem fileSystem, Path path) throws IOException { if (path.isAbsolute()) { return path; } else { return new Path(fileSystem.getWorkingDirectory(), path); } }
private static boolean isValidWALRootDir(Path walDir, final Configuration c) throws IOException { Path rootDir = getRootDir(c); FileSystem fs = walDir.getFileSystem(c); Path qualifiedWalDir = walDir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); if (!qualifiedWalDir.equals(rootDir)) { if (qualifiedWalDir.toString().startsWith(rootDir.toString() + "/")) { throw new IllegalStateException("Illegal WAL directory specified. " + "WAL directories are not permitted to be under the root directory if set."); } } return true; }
private Path generateUniqTempDir(boolean withDirCreated) throws IOException { FileSystem fs = FSUtils.getCurrentFileSystem(getConf()); Path dir = new Path(fs.getWorkingDirectory(), NAME); if (!fs.exists(dir)) { fs.mkdirs(dir); } Path newDir = new Path(dir, UUID.randomUUID().toString()); if (withDirCreated) { fs.mkdirs(newDir); } return newDir; }
/** * @param c configuration * @return {@link Path} to hbase log root directory: e.g. {@value HBASE_WAL_DIR} from * configuration as a qualified Path. Defaults to HBase root dir. * @throws IOException e */ public static Path getWALRootDir(final Configuration c) throws IOException { Path p = new Path(c.get(HBASE_WAL_DIR, c.get(HConstants.HBASE_DIR))); if (!isValidWALRootDir(p, c)) { return getRootDir(c); } FileSystem fs = p.getFileSystem(c); return p.makeQualified(fs.getUri(), fs.getWorkingDirectory()); }
private static Path getQualifiedPathWithoutSchemeAndAuthority(Path srcf, FileSystem fs) { Path currentWorkingDir = fs.getWorkingDirectory(); Path path = srcf.makeQualified(srcf.toUri(), currentWorkingDir); return ShimLoader.getHadoopShims().getPathWithoutSchemeAndAuthority(path); }
/** * @return Where to write test data on the test filesystem; Returns working directory * for the test filesystem by default * @see #setupDataTestDirOnTestFS() * @see #getTestFileSystem() */ private Path getBaseTestDirOnTestFS() throws IOException { FileSystem fs = getTestFileSystem(); return new Path(fs.getWorkingDirectory(), "test-data"); }
/** * Verifies that nested directories are properly copied with a <tt>hdfs://</tt> file * system (from a <tt>/absolute/path</tt> source path). */ @Test public void testCopyFromLocalRecursiveWithoutScheme() throws Exception { final FileSystem targetFileSystem = hdfsRootPath.getFileSystem(hadoopConfig); final Path targetDir = targetFileSystem.getWorkingDirectory(); testCopyFromLocalRecursive(targetFileSystem, targetDir, tempFolder, false); }
/** * Verifies that nested directories are properly copied with a <tt>hdfs://</tt> file * system (from a <tt>file:///absolute/path</tt> source path). */ @Test public void testCopyFromLocalRecursiveWithScheme() throws Exception { final FileSystem targetFileSystem = hdfsRootPath.getFileSystem(hadoopConfig); final Path targetDir = targetFileSystem.getWorkingDirectory(); testCopyFromLocalRecursive(targetFileSystem, targetDir, tempFolder, true); }
@Test public void test_RelativeJavaIoTmpDir_CoercedTo_AbsolutePath() { FileSystem localFileSystem = new LocalFileSystem(); String systemJavaIoTmpDir = makeVarName(SYSTEM, "java.io.tmpdir"); System.setProperty("java.io.tmpdir", "./relativePath"); Path relativePath = new Path(localFileSystem.getWorkingDirectory(), "./relativePath"); assertEquals(relativePath.toString(), SystemVariables.substitute(systemJavaIoTmpDir)); System.setProperty("java.io.tmpdir", "this/is/a/relative/path"); Path thisIsARelativePath= new Path(localFileSystem.getWorkingDirectory(), "this/is/a/relative/path"); assertEquals(thisIsARelativePath.toString(), SystemVariables.substitute(systemJavaIoTmpDir)); }
/** @throws Exception If failed. */ @Test public void testGetWorkingDirectory() throws Exception { Path dir = new Path("/tmp/some/dir"); fs.mkdirs(dir); fs.setWorkingDirectory(dir); String path = fs.getWorkingDirectory().toString(); assertTrue(path.endsWith("/tmp/some/dir")); }
/** @throws Exception If failed. */ @Test public void testGetWorkingDirectoryIfDefault() throws Exception { String path = fs.getWorkingDirectory().toString(); assertTrue(path.endsWith("/user/" + getClientFsUser())); }
@Test public void testEmptyLogDir() throws Exception { LOG.info("testEmptyLogDir"); slm = new SplitLogManager(master, conf); FileSystem fs = TEST_UTIL.getTestFileSystem(); Path emptyLogDirPath = new Path(new Path(fs.getWorkingDirectory(), HConstants.HREGION_LOGDIR_NAME), ServerName.valueOf("emptyLogDir", 1, 1).toString()); fs.mkdirs(emptyLogDirPath); slm.splitLogDistributed(emptyLogDirPath); assertFalse(fs.exists(emptyLogDirPath)); }
@BeforeClass public static void setUpOneTime() throws Exception { fs = new LocalFileSystem(); fs.initialize(fs.getWorkingDirectory().toUri(), new Configuration()); HiveConf hiveConf = new HiveConf(); hiveConf.setInt(HCatConstants.HCAT_HIVE_CLIENT_EXPIRY_TIME, 0); // Hack to initialize cache with 0 expiry time causing it to return a new hive client every time // Otherwise the cache doesn't play well with the second test method with the client gets closed() in the // tearDown() of the previous test HCatUtil.getHiveMetastoreClient(hiveConf); MapCreate.writeCount = 0; MapRead.readCount = 0; }