const recursive = directory => fs.readdirAsync(directory).filter(file => { return (file.indexOf('.') !== 0) && (file !== 'index.js'); }).map(file => { file = path.join(directory, file); return fs.statAsync(file).then(stat => stat.isDirectory() ? recursive(file) : file); }).reduce((a, b) => a.concat(b), [])
/** * Get the names of the supported schools * @return {Array<string>} short names (IDs) of schools */ const dataDirectories = async () => { const contents = await fs.readdirAsync(schoolsDir) return contents.filter(name => fs.lstatSync(path.join(schoolsDir, name)).isDirectory()) }
test('does not move a file into an existing directory', function (t) { const fixture = new Tacks(Dir({ src: File('foo'), dest: Dir({}) })) fixture.create(testDir) return moveFile('src', 'dest').then(() => { return fs.readdirAsync('dest') }).then(files => { t.equal(files.length, 0, 'directory remains empty') }) })
const routes = (router) => { console.log('Initializing routes'); fs.readdirAsync('routes') .then((dir) => { dir.forEach((fileName) => { // path.join will make filepath become routes/sessions.js const filepath = path.join(__dirname, 'routes', fileName); const route = require(filepath); route(router); }); }); }
/** * Remove directory content with rimraf on each file * Skips dot files * @todo test? * @param string path * @return Promise */ var removeDirectoryContent = function(path) { return fs.readdirAsync(path) .filter(noDotFiles) .map(function(filename) { return rimraf(p.resolve(path, filename)) }) }
var recursiveReaddir = function(root, options) { let items = fs.readdirAsync(root) for(let i in options.filters) items = items.filter(options.filters[i]) return items.map(function(f) { var path = p.join(root, f) return fs.statAsync(path) .then(function(stat) { let depth = root.replace(options.root, '').split(p.sep).length if(depth > options.maxDepth) return path if(stat.isDirectory()) { return recursiveReaddir(path, options) } return path }) .catch(gracefulCatch(root, path)) }).then(function(paths) { paths.push(root) return [].concat.apply([], paths) }) }
/** * Handles path argument and return filtered paths * @param mixed path * @param Object options {recursive: boolean} * @return array Promises */ var paths = function(path, options) { let items if(typeof path == 'string') { if(options.recursive === true) { return recursiveReaddir(path, options) .map(function(e) { return p.relative(path, e) }) } items = fs.readdirAsync(path) } else if(Array.isArray(path)) { items = Promise.all(path) } else { throw new TypeError('Path must be an array or a string') } for(let i in options.filters) { items = items.filter(options.filters[i]) } return items }
.then(function(stat) { if(stat.isDirectory()) { let items = fs.readdirAsync(path)
test('errors if input stream errors', t => { let int const putter = put.stream(CACHE, KEY).on('integrity', i => { int = i }) const stream = fromString(false) return pipe( stream, putter ).then(() => { throw new Error('expected error') }).catch(err => { t.ok(err, 'got an error') t.ok(!int, 'no integrity returned') t.match( err.message, /Invalid non-string/, 'returns the error from input stream' ) return fs.readdirAsync(testDir) }).then(files => { t.deepEqual(files, [], 'no files created') }) })
'user informed about the issue' return fs.readdirAsync(path.join(testDir)) }) .then((dir) => {
readdirAsync(path) .then(entities => { return Promise.all( entities .map(it => `${path}/${it}`) .map(it => statAsync(it) .then(stat => [ it, stat ]) ) ) }, err => { if (err.code !== 'ENOENT') { throw err } logger.error(`directory '${path}' doesn't exist, skipping...`) return [] }) .then(res => { return Promise.all( res.filter(it => it[1].isDirectory()) .map(it => this.loadApp(it[0]) .catch(err => { logger.error('Unexpected error on loading app', it[0], err.stack) }) ) ) })
test('rm.all deletes content and index dirs', t => { const fixture = new Tacks(CacheContent({ [INTEGRITY]: CONTENT })) fixture.create(CACHE) return index.insert(CACHE, KEY, INTEGRITY, { metadata: METADATA }).then(() => { return fs.mkdirAsync(path.join(CACHE, 'tmp')) }).then(() => { return fs.writeFileAsync(path.join(CACHE, 'other.js'), 'hi') }).then(() => { return rm.all(CACHE) }).then(() => { return fs.readdirAsync(CACHE) }).then(files => { t.deepEqual(files.sort(), [ 'other.js', 'tmp' ], 'removes content and index directories without touching other stuff') }) })
test('correct cache location when using cache config', (t) => { const fixture = new Tacks(Dir({ 'package.json': File(PKG), 'package-lock.json': File(RAW_LOCKFILE) })) return Promise.all([rimraf(cacheDir), rimraf(testDir)]) .then(() => fixture.create(cacheDir)) .then(() => fixture.create(testDir)) .then(() => common.npm([ 'ci', `--cache=${cacheDir}`, '--foo=asdf', '--registry', common.registry, '--loglevel', 'warn' ], EXEC_OPTS)) .then((ret) => { const code = ret[0] const stderr = ret[2] t.equal(code, 0, 'command completed without error') t.equal(stderr.trim(), '', 'no output on stderr') return fs.readdirAsync(path.join(cacheDir, '_cacache')) }) .then((modules) => { t.ok(modules, 'should create _cacache folder') t.end() }) })
'user informed about the issue' return fs.readdirAsync(path.join(testDir)) }) .then((dir) => {
const recursive = directory => fs.readdirAsync(directory).filter(file => { return (file.indexOf('.') !== 0) && (file !== 'incoming.js') && (file !== 'incoming_packet.js'); }).map(file => { file = path.join(directory, file); return fs.statAsync(file).then(stat => stat.isDirectory() ? recursive(file) : file); }).reduce((a, b) => a.concat(b), [])