Refine search
private Path createStagingDir(Path baseDir, User user, String randomDir) throws IOException { Path p = new Path(baseDir, randomDir); fs.mkdirs(p, PERM_ALL_ACCESS); fs.setPermission(p, PERM_ALL_ACCESS); return p; }
private Path createStagingDir(Path baseDir, User user, String randomDir) throws IOException { Path p = new Path(baseDir, randomDir); sinkFs.mkdirs(p, PERM_ALL_ACCESS); sinkFs.setPermission(p, PERM_ALL_ACCESS); return p; }
@Override public void setMode(String path, short mode) throws IOException { FileSystem hdfs = getFs(); try { FileStatus fileStatus = hdfs.getFileStatus(new Path(path)); hdfs.setPermission(fileStatus.getPath(), new FsPermission(mode)); } catch (IOException e) { LOG.warn("Fail to set permission for {} with perm {} : {}", path, mode, e.getMessage()); throw e; } }
/** * Check permissions for bulk load staging directory. This directory has special hidden * permissions. Create it if necessary. * @throws IOException */ private void checkStagingDir() throws IOException { Path p = new Path(this.rootdir, HConstants.BULKLOAD_STAGING_DIR_NAME); try { if (!this.fs.exists(p)) { if (!this.fs.mkdirs(p, HiddenDirPerms)) { throw new IOException("Failed to create staging directory " + p.toString()); } } else { this.fs.setPermission(p, HiddenDirPerms); } } catch (IOException e) { LOG.error("Failed to create or set permission on staging directory " + p.toString()); throw new IOException("Failed to create or set permission on staging directory " + p.toString(), e); } }
private static FileOutputStream insecureCreateForWrite(File f, int permissions) throws IOException { // If we can't do real security, do a racy exists check followed by an // open and chmod if (f.exists()) { throw new AlreadyExistsException("File " + f + " already exists"); } FileOutputStream fos = new FileOutputStream(f); boolean success = false; try { rawFilesystem.setPermission(new Path(f.getAbsolutePath()), new FsPermission((short)permissions)); success = true; return fos; } finally { if (!success) { fos.close(); } } }
/** * Persists a *.metadata file to a specific directory in HDFS. * * @param directoryPath where to write the metadata file. * @param outputFs {@link org.apache.hadoop.fs.FileSystem} where to write the file * @param metadataFileName name of the file (including extension) * @param metadata {@link voldemort.store.readonly.ReadOnlyStorageMetadata} to persist on HDFS * @throws IOException if the FileSystem operations fail */ private void writeMetadataFile(Path directoryPath, FileSystem outputFs, String metadataFileName, ReadOnlyStorageMetadata metadata) throws IOException { Path metadataPath = new Path(directoryPath, metadataFileName); FSDataOutputStream metadataStream = outputFs.create(metadataPath); outputFs.setPermission(metadataPath, new FsPermission(HADOOP_FILE_PERMISSION)); metadataStream.write(metadata.toJsonString().getBytes()); metadataStream.flush(); metadataStream.close(); }
int totalHFiles = hfiles.size(); for (int i = 0; i < totalHFiles; i++) { sourceHFilePath = new Path(sourceBaseNamespaceDirPath, hfiles.get(i)); localHFilePath = new Path(stagingDir, sourceHFilePath.getName()); try { FileUtil.copy(sourceFs, sourceHFilePath, sinkFs, localHFilePath, false, conf); + ". Trying to copy from hfile archive directory.", e); sourceHFilePath = new Path(sourceHFileArchiveDirPath, hfiles.get(i)); sinkFs.setPermission(localHFilePath, PERM_ALL_ACCESS);
@Override public void failedBulkLoad(final byte[] family, final String srcPath) throws IOException { try { Path p = new Path(srcPath); if (srcFs == null) { srcFs = FileSystem.newInstance(p.toUri(), conf); Path stageP = new Path(stagingDir, new Path(Bytes.toString(family), p.getName())); fs.setPermission(p, origPermissions.get(srcPath)); } else { LOG.warn("Can't find previous permission for path=" + srcPath);
@Override public Map<byte[], List<Path>> run() { FileSystem fs = null; try { fs = FileSystem.get(conf); for(Pair<byte[], String> el: familyPaths) { Path stageFamily = new Path(bulkToken, Bytes.toString(el.getFirst())); if(!fs.exists(stageFamily)) { fs.mkdirs(stageFamily); fs.setPermission(stageFamily, PERM_ALL_ACCESS); } } if (fsCreatedListener != null) { fsCreatedListener.accept(region); } //We call bulkLoadHFiles as requesting user //To enable access prior to staging return region.bulkLoadHFiles(familyPaths, true, new SecureBulkLoadListener(fs, bulkToken, conf), request.getCopyFile()); } catch (Exception e) { LOG.error("Failed to complete bulk load", e); } return null; } });
private ReplChangeManager(Configuration conf) throws MetaException { try { if (!inited) { if (MetastoreConf.getBoolVar(conf, ConfVars.REPLCMENABLED)) { ReplChangeManager.enabled = true; ReplChangeManager.cmroot = new Path(MetastoreConf.getVar(conf, ConfVars.REPLCMDIR)); ReplChangeManager.conf = conf; FileSystem cmFs = cmroot.getFileSystem(conf); // Create cmroot with permission 700 if not exist if (!cmFs.exists(cmroot)) { cmFs.mkdirs(cmroot); cmFs.setPermission(cmroot, new FsPermission("700")); } UserGroupInformation usergroupInfo = UserGroupInformation.getCurrentUser(); msUser = usergroupInfo.getShortUserName(); msGroup = usergroupInfo.getPrimaryGroupName(); } inited = true; } } catch (IOException e) { throw new MetaException(StringUtils.stringifyException(e)); } }
this.checkSumDigestValue[chunkId] = CheckSum.getInstance(checkSumType); this.position[chunkId] = 0; this.taskIndexFileName[chunkId] = new Path(FileOutputFormat.getOutputPath(conf), getStoreName() + "." + Integer.toString(chunkId) + "_" + this.taskId + INDEX_FILE_EXTENSION + fileExtension); this.taskValueFileName[chunkId] = new Path(FileOutputFormat.getOutputPath(conf), getStoreName() + "." + Integer.toString(chunkId) + "_" fs.setPermission(this.taskIndexFileName[chunkId], new FsPermission(HadoopStoreBuilder.HADOOP_FILE_PERMISSION)); logger.info("Setting permission to 755 for " + this.taskIndexFileName[chunkId]); fs.setPermission(this.taskValueFileName[chunkId], new FsPermission(HadoopStoreBuilder.HADOOP_FILE_PERMISSION)); logger.info("Setting permission to 755 for " + this.taskValueFileName[chunkId]);
@Override public String prepareBulkLoad(final byte[] family, final String srcPath, boolean copyFile) throws IOException { Path p = new Path(srcPath); Path stageP = new Path(stagingDir, new Path(Bytes.toString(family), p.getName())); fs.setPermission(stageP, PERM_ALL_ACCESS); return stageP.toString();
new Path(testTempDirPath, PathUtils.withoutLeadingSeparator(new Path(fileToCreate .getString(TEST_DATA_PATH_LOCAL_KEY)))); this.fs.setPermission(fullFilePath, new FsPermission(fileToCreate.getString(TEST_DATA_PERMISSIONS_KEY)));
Path tmpDir = hfilePath.getParent(); if (!tmpDir.getName().equals(TMP_DIR)) { tmpDir = new Path(tmpDir, TMP_DIR); ColumnFamilyDescriptor familyDesc = table.getDescriptor().getColumnFamily(family); Path botOut = new Path(tmpDir, uniqueName + ".bottom"); Path topOut = new Path(tmpDir, uniqueName + ".top"); splitStoreFile(getConf(), hfilePath, familyDesc, splitKey, botOut, topOut); fs.setPermission(tmpDir, FsPermission.valueOf("-rwxrwxrwx")); fs.setPermission(botOut, FsPermission.valueOf("-rwxrwxrwx")); fs.setPermission(topOut, FsPermission.valueOf("-rwxrwxrwx"));
outputDirName = "node-" + this.nodeId; Path outputDir = new Path(this.outputDir, outputDirName); outputFs.setPermission(outputDir, new FsPermission(HadoopStoreBuilder.HADOOP_FILE_PERMISSION)); logger.info("Setting permission to 755 for " + outputDir); Path checkSumIndexFile = new Path(outputDir, chunkFileName + INDEX_FILE_EXTENSION + CHECKSUM_FILE_EXTENSION); Path checkSumValueFile = new Path(outputDir, chunkFileName + DATA_FILE_EXTENSION + CHECKSUM_FILE_EXTENSION); outputFs.setPermission(checkSumIndexFile, new FsPermission(HadoopStoreBuilder.HADOOP_FILE_PERMISSION)); indexCheckSum.add(CheckSumMetadata.INDEX_FILE_SIZE_IN_BYTES, outputFs.setPermission(checkSumValueFile, new FsPermission(HadoopStoreBuilder.HADOOP_FILE_PERMISSION)); valueCheckSum.add(CheckSumMetadata.DATA_FILE_SIZE_IN_BYTES,
private Path createFile(FileSystem fs, FsPermission perms) throws IOException { StringBuilder buf = new StringBuilder(); for (int i = 0; i < 10; i++) { buf.append((char)(rand.nextInt(26) + 'a')); } Path p = new Path(buf.toString()); FSDataOutputStream os = fs.create(p); os.writeBytes("Mary had a little lamb\nit's fleece was white as snow\nand anywhere that Mary " + "went\nthe lamb was sure to go\n"); os.close(); fs.setPermission(p, perms); fs.deleteOnExit(p); return p; }
/** @throws Exception If failed. */ @Test public void testSetPermissionIfOutputStreamIsNotClosed() throws Exception { Path fsHome = new Path(primaryFsUri); Path file = new Path(fsHome, "myFile"); FsPermission perm = new FsPermission((short)123); FSDataOutputStream os = fs.create(file); fs.setPermission(file, perm); os.close(); assertEquals(perm, fs.getFileStatus(file).getPermission()); }
/** * Tests that HdfsUtils#setFullFileStatus * does not thrown an exception when setting permissions and without recursion. */ @Test public void testSetFullFileStatusFailInheritPerms() throws IOException { Configuration conf = new Configuration(); conf.set("dfs.namenode.acls.enabled", "false"); HdfsUtils.HadoopFileStatus mockHadoopFileStatus = mock(HdfsUtils.HadoopFileStatus.class); FileStatus mockSourceStatus = mock(FileStatus.class); FileSystem mockFs = mock(FileSystem.class); when(mockSourceStatus.getPermission()).thenReturn(new FsPermission((short) 777)); when(mockHadoopFileStatus.getFileStatus()).thenReturn(mockSourceStatus); doThrow(RuntimeException.class).when(mockFs).setPermission(any(Path.class), any(FsPermission.class)); HdfsUtils.setFullFileStatus(conf, mockHadoopFileStatus, null, mockFs, new Path("fakePath"), false); verify(mockFs).setPermission(any(Path.class), any(FsPermission.class)); }
/** @throws Exception If failed. */ @SuppressWarnings("OctalInteger") @Test public void testSetPermission() throws Exception { Path fsHome = new Path(primaryFsUri); Path file = new Path(fsHome, "/tmp/my"); FSDataOutputStream os = fs.create(file); os.close(); for (short i = 0; i <= 0777; i += 7) { FsPermission perm = new FsPermission(i); fs.setPermission(file, perm); assertEquals(perm, fs.getFileStatus(file).getPermission()); } }
Path destinationPath = new Path(destination); try { FsPermission permission; destinationPath, permission)); } else { fs.setPermission(destinationPath, permission);