/** * A copy constructor. All the fields are copied from the copied {@code igfsFile}, but the {@code groupBlockSize} * which is specified separately. * * @param igfsFile The file to copy. * @param grpBlockSize Group block size. */ public IgfsFileImpl(IgfsFile igfsFile, long grpBlockSize) { A.notNull(igfsFile, "igfsFile"); this.path = igfsFile.path(); this.fileId = igfsFile instanceof IgfsFileImpl ? ((IgfsFileImpl)igfsFile).fileId : IgniteUuid.randomUuid(); this.blockSize = igfsFile.blockSize(); this.len = igfsFile.length(); this.grpBlockSize = igfsFile.isFile() ? grpBlockSize : 0L; this.props = igfsFile.properties(); this.accessTime = igfsFile.accessTime(); this.modificationTime = igfsFile.modificationTime(); this.flags = IgfsUtils.flags(igfsFile.isDirectory(), igfsFile.isFile()); }
/** * Convert IGFS file information into Hadoop file status. * * @param file IGFS file information. * @return Hadoop file status. */ private FileStatus convert(IgfsFile file) { return new FileStatus( file.length(), file.isDirectory(), dfltReplication, file.groupBlockSize(), file.modificationTime(), file.accessTime(), permission(file), file.property(IgfsUtils.PROP_USER_NAME, user), file.property(IgfsUtils.PROP_GROUP_NAME, "users"), convert(file.path())) { @Override public String toString() { return "FileStatus [path=" + getPath() + ", isDir=" + isDirectory() + ", len=" + getLen() + "]"; } }; }
/** * Calculates size of directory or file for given ID. * * @param file IGFS File object. * @param sum Summary object that will collect information. * @throws IgniteCheckedException If failed. */ private void summaryRecursive(IgfsFile file, IgfsPathSummary sum) throws IgniteCheckedException { assert file != null; assert sum != null; if (file.isDirectory()) { if (!F.eq(IgfsPath.ROOT, file.path())) sum.directoriesCount(sum.directoriesCount() + 1); for (IgfsFile childFile : listFiles(file.path())) summaryRecursive(childFile, sum); } else { sum.filesCount(sum.filesCount() + 1); sum.totalLength(sum.totalLength() + file.length()); } }
if (!status.isDirectory() && !curPath.equals(endPath)) throw new IgniteCheckedException("Failed to create path the locally because secondary file " + "system directory structure was modified concurrently and the path is not a directory as " + IgfsEntryInfo curInfo = status.isDirectory() ? IgfsUtils.createDirectory( IgniteUuid.randomUuid(), null, status.properties(), status.accessTime(), status.modificationTime() ) : IgfsUtils.createFile( IgniteUuid.randomUuid(), igfsCtx.configuration().getBlockSize(), status.length(), null, null, igfsCtx.igfs().evictExclude(curPath, false), status.properties(), status.accessTime(), status.modificationTime() );
/** * Dump IGFS content. * * @param igfs IGFS. * @param path Path. * @throws Exception If failed. */ private static void dumpIgfs(IgniteFileSystem igfs, IgfsPath path) throws Exception { IgfsFile file = igfs.info(path); assert file != null; System.out.println(file.path()); if (file.isDirectory()) { for (IgfsPath child : igfs.listPaths(path)) dumpIgfs(igfs, child); } else { try (BufferedReader br = new BufferedReader(new InputStreamReader(igfs.open(path)))) { String line = br.readLine(); while (line != null) { System.out.println(line); line = br.readLine(); } } } }
log.debug("File descriptor: " + desc); Collection<IgfsBlockLocation> aff = fs.affinity(path, 0, desc.length()); assertFalse("Affinity: " + aff, desc.length() != 0 && aff.isEmpty()); int blockSize = desc.blockSize(); assertEquals("File size", size, desc.length()); assertEquals("Binary block size", CFG_BLOCK_SIZE, blockSize); assertEquals("Type", true, desc.isFile()); assertEquals("Type", false, desc.isDirectory());
/** * Checks file access & modification time equality in the file itself and in the same file found through * the listing of its parent. * * @param fs The file system. * @param p The file path. * * @return Tuple of access and modification times of the file. */ private T2<Long, Long> checkParentListingTime(IgfsSecondaryFileSystem fs, IgfsPath p) { IgfsFile f0 = fs.info(p); T2<Long, Long> t0 = new T2<>(f0.accessTime(), f0.modificationTime()); // Root cannot be seen through the parent listing: if (!F.eq(IgfsPath.ROOT, p)) { assertNotNull(f0); Collection<IgfsFile> listing = fs.listFiles(p.parent()); IgfsFile f1 = null; for (IgfsFile fi : listing) { if (F.eq(fi.path(), p)) { f1 = fi; break; } } assertNotNull(f1); // file should be found in parent listing. T2<Long, Long> t1 = new T2<>(f1.accessTime(), f1.modificationTime()); assertEquals(t0, t1); } return t0; }
/** @throws Exception If failed. */ @Test public void testZeroReplicationFactor() throws Exception { // This test doesn't make sense for any mode except of PRIMARY. if (mode == PRIMARY) { Path igfsHome = new Path(PRIMARY_URI); Path file = new Path(igfsHome, "someFile"); try (FSDataOutputStream out = fs.create(file, (short)0)) { out.write(new byte[1024 * 1024]); } IgniteFileSystem igfs = grid(0).fileSystem("igfs"); IgfsPath filePath = new IgfsPath("/someFile"); IgfsFile fileInfo = igfs.info(filePath); awaitPartitionMapExchange(); Collection<IgfsBlockLocation> locations = igfs.affinity(filePath, 0, fileInfo.length()); assertEquals(1, locations.size()); IgfsBlockLocation location = F.first(locations); assertEquals(1, location.nodeIds().size()); } }
Collection<IgfsBlockLocation> aff = fs.affinity(path, 0, file.length(), args.maxRangeLength()); ", subgrid=" + subgrid + ']'); IgfsJob job = createJob(path, new IgfsFileRange(file.path(), loc.start(), loc.length()), args); ComputeJob jobImpl = igfsProc.createJob(job, fs.name(), file.path(), loc.start(), loc.length(), args.recordResolver()); assert totalLen == file.length();
throw new IgfsPathNotFoundException("File not found: " + path); if (!info.isFile()) throw new IgfsPathIsDirectoryException("Failed to open file (not a file): " + path); new IgfsLazySecondaryFileSystemPositionedReadable(secondaryFs, path, bufSize); long len = info.length(); int blockSize = info.blockSize() > 0 ? info.blockSize() : cfg.getBlockSize(); info.length(), blockSize, blockCnt, true);
/** {@inheritDoc} */ @Override public T2<Long, Long> times(String path) throws IOException { IgfsFile info = igfsEx.info(new IgfsPath(path)); if (info == null) throw new IOException("Path not found: " + path); return new T2<>(info.modificationTime(), info.accessTime()); }
/** * Test list files routine when the path doesn't exist locally. * * @throws Exception If failed. */ @Test public void testListFilesPathMissing() throws Exception { create(igfsSecondary, paths(DIR, SUBDIR, SUBSUBDIR), paths(FILE)); Collection<IgfsFile> paths = igfs.listFiles(SUBDIR); assert paths != null; assert paths.size() == 2; Iterator<IgfsFile> iter = paths.iterator(); IgfsFile path1 = iter.next(); IgfsFile path2 = iter.next(); assert (SUBSUBDIR.equals(path1.path()) && FILE.equals(path2.path())) || (FILE.equals(path1.path()) && SUBSUBDIR.equals(path2.path())); }
assert igfs.info(new IgfsPath("/r")).isFile(); assert igfs.info(new IgfsPath("/k/l")).isFile(); assert igfs.info(new IgfsPath("/k/l")).isFile(); assert igfs.info(new IgfsPath("/k/l")).isFile(); assert igfs.info(new IgfsPath("/k/l")).isFile(); assert igfs.info(new IgfsPath("/x/y")).isDirectory(); assert igfs.info(new IgfsPath("/x/y/f")).isFile(); assert igfs.info(new IgfsPath("/x/y/z/f")).isFile(); assert igfs.info(new IgfsPath("/x/y/z/t/f")).isFile(); assert igfs.info(new IgfsPath("/x/y/z/t/t2/t3/t4/t5/f")).isFile();
/** {@inheritDoc} */ @SuppressWarnings("ConstantConditions") @Override public Map<String, String> properties(String path) { return igfsEx.info(new IgfsPath(path)).properties(); }
/** * Convert IGFS file attributes into Hadoop permission. * * @param file File info. * @return Hadoop permission. */ private FsPermission permission(IgfsFile file) { String perm = file.property(IgfsUtils.PROP_PERMISSION, null); if (perm == null) return FsPermission.getDefault(); try { return new FsPermission((short)Integer.parseInt(perm, 8)); } catch (NumberFormatException ignore) { return FsPermission.getDefault(); } }
/** * Test properties management in meta-cache. * * @throws Exception If failed. */ @Test public void testUpdateProperties() throws Exception { IgfsPath p = path("/tmp/my"); igfs.mkdirs(p); Map<String, String> oldProps = igfs.info(p).properties(); igfs.update(p, F.asMap("a", "1")); igfs.update(p, F.asMap("b", "2")); assertEquals("1", igfs.info(p).property("a")); assertEquals("2", igfs.info(p).property("b")); igfs.update(p, F.asMap("b", "3")); Map<String, String> expProps = new HashMap<>(oldProps); expProps.put("a", "1"); expProps.put("b", "3"); assertEquals("3", igfs.info(p).property("b")); assertEquals(expProps, igfs.info(p).properties()); assertEquals("5", igfs.info(p).property("c", "5")); assertUpdatePropertiesFails(null, null, NullPointerException.class, "Ouch! Argument cannot be null"); assertUpdatePropertiesFails(p, null, NullPointerException.class, "Ouch! Argument cannot be null"); assertUpdatePropertiesFails(null, F.asMap("x", "9"), NullPointerException.class, "Ouch! Argument cannot be null"); assertUpdatePropertiesFails(p, Collections.<String, String>emptyMap(), IllegalArgumentException.class, "Ouch! Argument is invalid"); }
@Override public Object call() throws Exception { for (int cur = cnt.incrementAndGet(); cur < max; cur = cnt.incrementAndGet()) { IgfsFile info = igfs.info(path(cur)); assertNotNull("Expects file exist: " + cur, info); assertTrue("Expects file is a directory: " + cur, info.isDirectory()); } return null; } }, threads, "grid-test-check-directories-exist");
/** * Ensure that a DUAL mode file is not propagated to eviction policy * * @throws Exception If failed. */ @Test public void testFileDualExclusion() throws Exception { start(); evictPlc.setExcludePaths(Collections.singleton(FILE_RMT.toString())); // Create file in primary mode. It must not be propagated to eviction policy. igfsPrimary.create(FILE_RMT, true).close(); checkEvictionPolicy(0, 0); int blockSize = igfsPrimary.info(FILE_RMT).blockSize(); append(FILE_RMT, blockSize); checkEvictionPolicy(0, 0); read(FILE_RMT, 0, blockSize); checkEvictionPolicy(0, 0); }
assertTrue(igfs.info(new IgfsPath("/d/f")).isFile()); igfs.info(SUBSUBDIR).properties().get(IgfsUtils.PROP_PERMISSION));
throw fsException("Failed to open output stream to the file created in " + "the secondary file system because it no longer exists: " + path); else if (status.isDirectory()) throw fsException("Failed to open output stream to the file created in " + "the secondary file system because the path points to a directory: " + path); IgniteUuid.randomUuid(), igfsCtx.configuration().getBlockSize(), status.length(), affKey, createFileLockId(false), igfsCtx.igfs().evictExclude(path, false), status.properties(), status.accessTime(), status.modificationTime() );