🐛 Update: Added support for the 'find' command in settings.local.json. Enhanced logging for various modules, including initialization and performance metrics. Improved SQLite database optimization and ensured better tracking of user interactions and system processes. 📚
This commit is contained in:
20
network-visualization/node_modules/readdirp/LICENSE
generated
vendored
Normal file
20
network-visualization/node_modules/readdirp/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
This software is released under the MIT license:
|
||||
|
||||
Copyright (c) 2012-2015 Thorsten Lorenz
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
204
network-visualization/node_modules/readdirp/README.md
generated
vendored
Normal file
204
network-visualization/node_modules/readdirp/README.md
generated
vendored
Normal file
@@ -0,0 +1,204 @@
|
||||
# readdirp [](http://travis-ci.org/thlorenz/readdirp)
|
||||
|
||||
[](https://nodei.co/npm/readdirp/)
|
||||
|
||||
Recursive version of [fs.readdir](http://nodejs.org/docs/latest/api/fs.html#fs_fs_readdir_path_callback). Exposes a **stream api**.
|
||||
|
||||
```javascript
|
||||
var readdirp = require('readdirp')
|
||||
, path = require('path')
|
||||
, es = require('event-stream');
|
||||
|
||||
// print out all JavaScript files along with their size
|
||||
|
||||
var stream = readdirp({ root: path.join(__dirname), fileFilter: '*.js' });
|
||||
stream
|
||||
.on('warn', function (err) {
|
||||
console.error('non-fatal error', err);
|
||||
// optionally call stream.destroy() here in order to abort and cause 'close' to be emitted
|
||||
})
|
||||
.on('error', function (err) { console.error('fatal error', err); })
|
||||
.pipe(es.mapSync(function (entry) {
|
||||
return { path: entry.path, size: entry.stat.size };
|
||||
}))
|
||||
.pipe(es.stringify())
|
||||
.pipe(process.stdout);
|
||||
```
|
||||
|
||||
Meant to be one of the recursive versions of [fs](http://nodejs.org/docs/latest/api/fs.html) functions, e.g., like [mkdirp](https://github.com/substack/node-mkdirp).
|
||||
|
||||
**Table of Contents** *generated with [DocToc](http://doctoc.herokuapp.com/)*
|
||||
|
||||
- [Installation](#installation)
|
||||
- [API](#api)
|
||||
- [entry stream](#entry-stream)
|
||||
- [options](#options)
|
||||
- [entry info](#entry-info)
|
||||
- [Filters](#filters)
|
||||
- [Callback API](#callback-api)
|
||||
- [allProcessed ](#allprocessed)
|
||||
- [fileProcessed](#fileprocessed)
|
||||
- [More Examples](#more-examples)
|
||||
- [stream api](#stream-api)
|
||||
- [stream api pipe](#stream-api-pipe)
|
||||
- [grep](#grep)
|
||||
- [using callback api](#using-callback-api)
|
||||
- [tests](#tests)
|
||||
|
||||
|
||||
# Installation
|
||||
|
||||
npm install readdirp
|
||||
|
||||
# API
|
||||
|
||||
***var entryStream = readdirp (options)***
|
||||
|
||||
Reads given root recursively and returns a `stream` of [entry info](#entry-info)s.
|
||||
|
||||
## entry stream
|
||||
|
||||
Behaves as follows:
|
||||
|
||||
- `emit('data')` passes an [entry info](#entry-info) whenever one is found
|
||||
- `emit('warn')` passes a non-fatal `Error` that prevents a file/directory from being processed (i.e., if it is
|
||||
inaccessible to the user)
|
||||
- `emit('error')` passes a fatal `Error` which also ends the stream (i.e., when illegal options where passed)
|
||||
- `emit('end')` called when all entries were found and no more will be emitted (i.e., we are done)
|
||||
- `emit('close')` called when the stream is destroyed via `stream.destroy()` (which could be useful if you want to
|
||||
manually abort even on a non fatal error) - at that point the stream is no longer `readable` and no more entries,
|
||||
warning or errors are emitted
|
||||
- to learn more about streams, consult the very detailed
|
||||
[nodejs streams documentation](http://nodejs.org/api/stream.html) or the
|
||||
[stream-handbook](https://github.com/substack/stream-handbook)
|
||||
|
||||
|
||||
## options
|
||||
|
||||
- **root**: path in which to start reading and recursing into subdirectories
|
||||
|
||||
- **fileFilter**: filter to include/exclude files found (see [Filters](#filters) for more)
|
||||
|
||||
- **directoryFilter**: filter to include/exclude directories found and to recurse into (see [Filters](#filters) for more)
|
||||
|
||||
- **depth**: depth at which to stop recursing even if more subdirectories are found
|
||||
|
||||
- **entryType**: determines if data events on the stream should be emitted for `'files'`, `'directories'`, `'both'`, or `'all'`. Setting to `'all'` will also include entries for other types of file descriptors like character devices, unix sockets and named pipes. Defaults to `'files'`.
|
||||
|
||||
- **lstat**: if `true`, readdirp uses `fs.lstat` instead of `fs.stat` in order to stat files and includes symlink entries in the stream along with files.
|
||||
|
||||
## entry info
|
||||
|
||||
Has the following properties:
|
||||
|
||||
- **parentDir** : directory in which entry was found (relative to given root)
|
||||
- **fullParentDir** : full path to parent directory
|
||||
- **name** : name of the file/directory
|
||||
- **path** : path to the file/directory (relative to given root)
|
||||
- **fullPath** : full path to the file/directory found
|
||||
- **stat** : built in [stat object](http://nodejs.org/docs/v0.4.9/api/fs.html#fs.Stats)
|
||||
- **Example**: (assuming root was `/User/dev/readdirp`)
|
||||
|
||||
parentDir : 'test/bed/root_dir1',
|
||||
fullParentDir : '/User/dev/readdirp/test/bed/root_dir1',
|
||||
name : 'root_dir1_subdir1',
|
||||
path : 'test/bed/root_dir1/root_dir1_subdir1',
|
||||
fullPath : '/User/dev/readdirp/test/bed/root_dir1/root_dir1_subdir1',
|
||||
stat : [ ... ]
|
||||
|
||||
## Filters
|
||||
|
||||
There are three different ways to specify filters for files and directories respectively.
|
||||
|
||||
- **function**: a function that takes an entry info as a parameter and returns true to include or false to exclude the entry
|
||||
|
||||
- **glob string**: a string (e.g., `*.js`) which is matched using [minimatch](https://github.com/isaacs/minimatch), so go there for more
|
||||
information.
|
||||
|
||||
Globstars (`**`) are not supported since specifying a recursive pattern for an already recursive function doesn't make sense.
|
||||
|
||||
Negated globs (as explained in the minimatch documentation) are allowed, e.g., `!*.txt` matches everything but text files.
|
||||
|
||||
- **array of glob strings**: either need to be all inclusive or all exclusive (negated) patterns otherwise an error is thrown.
|
||||
|
||||
`[ '*.json', '*.js' ]` includes all JavaScript and Json files.
|
||||
|
||||
|
||||
`[ '!.git', '!node_modules' ]` includes all directories except the '.git' and 'node_modules'.
|
||||
|
||||
Directories that do not pass a filter will not be recursed into.
|
||||
|
||||
## Callback API
|
||||
|
||||
Although the stream api is recommended, readdirp also exposes a callback based api.
|
||||
|
||||
***readdirp (options, callback1 [, callback2])***
|
||||
|
||||
If callback2 is given, callback1 functions as the **fileProcessed** callback, and callback2 as the **allProcessed** callback.
|
||||
|
||||
If only callback1 is given, it functions as the **allProcessed** callback.
|
||||
|
||||
### allProcessed
|
||||
|
||||
- function with err and res parameters, e.g., `function (err, res) { ... }`
|
||||
- **err**: array of errors that occurred during the operation, **res may still be present, even if errors occurred**
|
||||
- **res**: collection of file/directory [entry infos](#entry-info)
|
||||
|
||||
### fileProcessed
|
||||
|
||||
- function with [entry info](#entry-info) parameter e.g., `function (entryInfo) { ... }`
|
||||
|
||||
|
||||
# More Examples
|
||||
|
||||
`on('error', ..)`, `on('warn', ..)` and `on('end', ..)` handling omitted for brevity
|
||||
|
||||
```javascript
|
||||
var readdirp = require('readdirp');
|
||||
|
||||
// Glob file filter
|
||||
readdirp({ root: './test/bed', fileFilter: '*.js' })
|
||||
.on('data', function (entry) {
|
||||
// do something with each JavaScript file entry
|
||||
});
|
||||
|
||||
// Combined glob file filters
|
||||
readdirp({ root: './test/bed', fileFilter: [ '*.js', '*.json' ] })
|
||||
.on('data', function (entry) {
|
||||
// do something with each JavaScript and Json file entry
|
||||
});
|
||||
|
||||
// Combined negated directory filters
|
||||
readdirp({ root: './test/bed', directoryFilter: [ '!.git', '!*modules' ] })
|
||||
.on('data', function (entry) {
|
||||
// do something with each file entry found outside '.git' or any modules directory
|
||||
});
|
||||
|
||||
// Function directory filter
|
||||
readdirp({ root: './test/bed', directoryFilter: function (di) { return di.name.length === 9; } })
|
||||
.on('data', function (entry) {
|
||||
// do something with each file entry found inside directories whose name has length 9
|
||||
});
|
||||
|
||||
// Limiting depth
|
||||
readdirp({ root: './test/bed', depth: 1 })
|
||||
.on('data', function (entry) {
|
||||
// do something with each file entry found up to 1 subdirectory deep
|
||||
});
|
||||
|
||||
// callback api
|
||||
readdirp({ root: '.' }, function(fileInfo) {
|
||||
// do something with file entry here
|
||||
}, function (err, res) {
|
||||
// all done, move on or do final step for all file entries here
|
||||
});
|
||||
```
|
||||
|
||||
Try more examples by following [instructions](https://github.com/paulmillr/readdirp/blob/master/examples/Readme.md)
|
||||
on how to get going.
|
||||
|
||||
## tests
|
||||
|
||||
The [readdirp tests](https://github.com/paulmillr/readdirp/blob/master/test/readdirp.js) also will give you a good idea on
|
||||
how things work.
|
||||
|
50
network-visualization/node_modules/readdirp/package.json
generated
vendored
Normal file
50
network-visualization/node_modules/readdirp/package.json
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
{
|
||||
"author": "Thorsten Lorenz <thlorenz@gmx.de> (thlorenz.com)",
|
||||
"name": "readdirp",
|
||||
"description": "Recursive version of fs.readdir with streaming api.",
|
||||
"version": "2.2.1",
|
||||
"homepage": "https://github.com/paulmillr/readdirp",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/paulmillr/readdirp.git"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10"
|
||||
},
|
||||
"files": [
|
||||
"readdirp.js",
|
||||
"stream-api.js"
|
||||
],
|
||||
"keywords": [
|
||||
"recursive",
|
||||
"fs",
|
||||
"stream",
|
||||
"streams",
|
||||
"readdir",
|
||||
"filesystem",
|
||||
"find",
|
||||
"filter"
|
||||
],
|
||||
"main": "readdirp.js",
|
||||
"scripts": {
|
||||
"test-main": "(cd test && set -e; for t in ./*.js; do node $t; done)",
|
||||
"test-0.10": "nave use 0.10 npm run test-main",
|
||||
"test-0.12": "nave use 0.12 npm run test-main",
|
||||
"test-4": "nave use 4.4 npm run test-main",
|
||||
"test-6": "nave use 6.2 npm run test-main",
|
||||
"test-all": "npm run test-main && npm run test-0.10 && npm run test-0.12 && npm run test-4 && npm run test-6",
|
||||
"test": "npm run test-main"
|
||||
},
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.1.11",
|
||||
"micromatch": "^3.1.10",
|
||||
"readable-stream": "^2.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"nave": "^0.5.1",
|
||||
"proxyquire": "^1.7.9",
|
||||
"tap": "1.3.2",
|
||||
"through2": "^2.0.0"
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
294
network-visualization/node_modules/readdirp/readdirp.js
generated
vendored
Normal file
294
network-visualization/node_modules/readdirp/readdirp.js
generated
vendored
Normal file
@@ -0,0 +1,294 @@
|
||||
'use strict';
|
||||
|
||||
var fs = require('graceful-fs')
|
||||
, path = require('path')
|
||||
, micromatch = require('micromatch').isMatch
|
||||
, toString = Object.prototype.toString
|
||||
;
|
||||
|
||||
|
||||
// Standard helpers
|
||||
function isFunction (obj) {
|
||||
return toString.call(obj) === '[object Function]';
|
||||
}
|
||||
|
||||
function isString (obj) {
|
||||
return toString.call(obj) === '[object String]';
|
||||
}
|
||||
|
||||
function isUndefined (obj) {
|
||||
return obj === void 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main function which ends up calling readdirRec and reads all files and directories in given root recursively.
|
||||
* @param { Object } opts Options to specify root (start directory), filters and recursion depth
|
||||
* @param { function } callback1 When callback2 is given calls back for each processed file - function (fileInfo) { ... },
|
||||
* when callback2 is not given, it behaves like explained in callback2
|
||||
* @param { function } callback2 Calls back once all files have been processed with an array of errors and file infos
|
||||
* function (err, fileInfos) { ... }
|
||||
*/
|
||||
function readdir(opts, callback1, callback2) {
|
||||
var stream
|
||||
, handleError
|
||||
, handleFatalError
|
||||
, errors = []
|
||||
, readdirResult = {
|
||||
directories: []
|
||||
, files: []
|
||||
}
|
||||
, fileProcessed
|
||||
, allProcessed
|
||||
, realRoot
|
||||
, aborted = false
|
||||
, paused = false
|
||||
;
|
||||
|
||||
// If no callbacks were given we will use a streaming interface
|
||||
if (isUndefined(callback1)) {
|
||||
var api = require('./stream-api')();
|
||||
stream = api.stream;
|
||||
callback1 = api.processEntry;
|
||||
callback2 = api.done;
|
||||
handleError = api.handleError;
|
||||
handleFatalError = api.handleFatalError;
|
||||
|
||||
stream.on('close', function () { aborted = true; });
|
||||
stream.on('pause', function () { paused = true; });
|
||||
stream.on('resume', function () { paused = false; });
|
||||
} else {
|
||||
handleError = function (err) { errors.push(err); };
|
||||
handleFatalError = function (err) {
|
||||
handleError(err);
|
||||
allProcessed(errors, null);
|
||||
};
|
||||
}
|
||||
|
||||
if (isUndefined(opts)){
|
||||
handleFatalError(new Error (
|
||||
'Need to pass at least one argument: opts! \n' +
|
||||
'https://github.com/paulmillr/readdirp#options'
|
||||
)
|
||||
);
|
||||
return stream;
|
||||
}
|
||||
|
||||
opts.root = opts.root || '.';
|
||||
opts.fileFilter = opts.fileFilter || function() { return true; };
|
||||
opts.directoryFilter = opts.directoryFilter || function() { return true; };
|
||||
opts.depth = typeof opts.depth === 'undefined' ? 999999999 : opts.depth;
|
||||
opts.entryType = opts.entryType || 'files';
|
||||
|
||||
var statfn = opts.lstat === true ? fs.lstat.bind(fs) : fs.stat.bind(fs);
|
||||
|
||||
if (isUndefined(callback2)) {
|
||||
fileProcessed = function() { };
|
||||
allProcessed = callback1;
|
||||
} else {
|
||||
fileProcessed = callback1;
|
||||
allProcessed = callback2;
|
||||
}
|
||||
|
||||
function normalizeFilter (filter) {
|
||||
|
||||
if (isUndefined(filter)) return undefined;
|
||||
|
||||
function isNegated (filters) {
|
||||
|
||||
function negated(f) {
|
||||
return f.indexOf('!') === 0;
|
||||
}
|
||||
|
||||
var some = filters.some(negated);
|
||||
if (!some) {
|
||||
return false;
|
||||
} else {
|
||||
if (filters.every(negated)) {
|
||||
return true;
|
||||
} else {
|
||||
// if we detect illegal filters, bail out immediately
|
||||
throw new Error(
|
||||
'Cannot mix negated with non negated glob filters: ' + filters + '\n' +
|
||||
'https://github.com/paulmillr/readdirp#filters'
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Turn all filters into a function
|
||||
if (isFunction(filter)) {
|
||||
|
||||
return filter;
|
||||
|
||||
} else if (isString(filter)) {
|
||||
|
||||
return function (entryInfo) {
|
||||
return micromatch(entryInfo.name, filter.trim());
|
||||
};
|
||||
|
||||
} else if (filter && Array.isArray(filter)) {
|
||||
|
||||
if (filter) filter = filter.map(function (f) {
|
||||
return f.trim();
|
||||
});
|
||||
|
||||
return isNegated(filter) ?
|
||||
// use AND to concat multiple negated filters
|
||||
function (entryInfo) {
|
||||
return filter.every(function (f) {
|
||||
return micromatch(entryInfo.name, f);
|
||||
});
|
||||
}
|
||||
:
|
||||
// use OR to concat multiple inclusive filters
|
||||
function (entryInfo) {
|
||||
return filter.some(function (f) {
|
||||
return micromatch(entryInfo.name, f);
|
||||
});
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function processDir(currentDir, entries, callProcessed) {
|
||||
if (aborted) return;
|
||||
var total = entries.length
|
||||
, processed = 0
|
||||
, entryInfos = []
|
||||
;
|
||||
|
||||
fs.realpath(currentDir, function(err, realCurrentDir) {
|
||||
if (aborted) return;
|
||||
if (err) {
|
||||
handleError(err);
|
||||
callProcessed(entryInfos);
|
||||
return;
|
||||
}
|
||||
|
||||
var relDir = path.relative(realRoot, realCurrentDir);
|
||||
|
||||
if (entries.length === 0) {
|
||||
callProcessed([]);
|
||||
} else {
|
||||
entries.forEach(function (entry) {
|
||||
|
||||
var fullPath = path.join(realCurrentDir, entry)
|
||||
, relPath = path.join(relDir, entry);
|
||||
|
||||
statfn(fullPath, function (err, stat) {
|
||||
if (err) {
|
||||
handleError(err);
|
||||
} else {
|
||||
entryInfos.push({
|
||||
name : entry
|
||||
, path : relPath // relative to root
|
||||
, fullPath : fullPath
|
||||
|
||||
, parentDir : relDir // relative to root
|
||||
, fullParentDir : realCurrentDir
|
||||
|
||||
, stat : stat
|
||||
});
|
||||
}
|
||||
processed++;
|
||||
if (processed === total) callProcessed(entryInfos);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function readdirRec(currentDir, depth, callCurrentDirProcessed) {
|
||||
var args = arguments;
|
||||
if (aborted) return;
|
||||
if (paused) {
|
||||
setImmediate(function () {
|
||||
readdirRec.apply(null, args);
|
||||
})
|
||||
return;
|
||||
}
|
||||
|
||||
fs.readdir(currentDir, function (err, entries) {
|
||||
if (err) {
|
||||
handleError(err);
|
||||
callCurrentDirProcessed();
|
||||
return;
|
||||
}
|
||||
|
||||
processDir(currentDir, entries, function(entryInfos) {
|
||||
|
||||
var subdirs = entryInfos
|
||||
.filter(function (ei) { return ei.stat.isDirectory() && opts.directoryFilter(ei); });
|
||||
|
||||
subdirs.forEach(function (di) {
|
||||
if(opts.entryType === 'directories' || opts.entryType === 'both' || opts.entryType === 'all') {
|
||||
fileProcessed(di);
|
||||
}
|
||||
readdirResult.directories.push(di);
|
||||
});
|
||||
|
||||
entryInfos
|
||||
.filter(function(ei) {
|
||||
var isCorrectType = opts.entryType === 'all' ?
|
||||
!ei.stat.isDirectory() : ei.stat.isFile() || ei.stat.isSymbolicLink();
|
||||
return isCorrectType && opts.fileFilter(ei);
|
||||
})
|
||||
.forEach(function (fi) {
|
||||
if(opts.entryType === 'files' || opts.entryType === 'both' || opts.entryType === 'all') {
|
||||
fileProcessed(fi);
|
||||
}
|
||||
readdirResult.files.push(fi);
|
||||
});
|
||||
|
||||
var pendingSubdirs = subdirs.length;
|
||||
|
||||
// Be done if no more subfolders exist or we reached the maximum desired depth
|
||||
if(pendingSubdirs === 0 || depth === opts.depth) {
|
||||
callCurrentDirProcessed();
|
||||
} else {
|
||||
// recurse into subdirs, keeping track of which ones are done
|
||||
// and call back once all are processed
|
||||
subdirs.forEach(function (subdir) {
|
||||
readdirRec(subdir.fullPath, depth + 1, function () {
|
||||
pendingSubdirs = pendingSubdirs - 1;
|
||||
if(pendingSubdirs === 0) {
|
||||
callCurrentDirProcessed();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Validate and normalize filters
|
||||
try {
|
||||
opts.fileFilter = normalizeFilter(opts.fileFilter);
|
||||
opts.directoryFilter = normalizeFilter(opts.directoryFilter);
|
||||
} catch (err) {
|
||||
// if we detect illegal filters, bail out immediately
|
||||
handleFatalError(err);
|
||||
return stream;
|
||||
}
|
||||
|
||||
// If filters were valid get on with the show
|
||||
fs.realpath(opts.root, function(err, res) {
|
||||
if (err) {
|
||||
handleFatalError(err);
|
||||
return stream;
|
||||
}
|
||||
|
||||
realRoot = res;
|
||||
readdirRec(opts.root, 0, function () {
|
||||
// All errors are collected into the errors array
|
||||
if (errors.length > 0) {
|
||||
allProcessed(errors, readdirResult);
|
||||
} else {
|
||||
allProcessed(null, readdirResult);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
module.exports = readdir;
|
98
network-visualization/node_modules/readdirp/stream-api.js
generated
vendored
Normal file
98
network-visualization/node_modules/readdirp/stream-api.js
generated
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
'use strict';
|
||||
|
||||
var stream = require('readable-stream');
|
||||
var util = require('util');
|
||||
|
||||
var Readable = stream.Readable;
|
||||
|
||||
module.exports = ReaddirpReadable;
|
||||
|
||||
util.inherits(ReaddirpReadable, Readable);
|
||||
|
||||
function ReaddirpReadable (opts) {
|
||||
if (!(this instanceof ReaddirpReadable)) return new ReaddirpReadable(opts);
|
||||
|
||||
opts = opts || {};
|
||||
|
||||
opts.objectMode = true;
|
||||
Readable.call(this, opts);
|
||||
|
||||
// backpressure not implemented at this point
|
||||
this.highWaterMark = Infinity;
|
||||
|
||||
this._destroyed = false;
|
||||
this._paused = false;
|
||||
this._warnings = [];
|
||||
this._errors = [];
|
||||
|
||||
this._pauseResumeErrors();
|
||||
}
|
||||
|
||||
var proto = ReaddirpReadable.prototype;
|
||||
|
||||
proto._pauseResumeErrors = function () {
|
||||
var self = this;
|
||||
self.on('pause', function () { self._paused = true });
|
||||
self.on('resume', function () {
|
||||
if (self._destroyed) return;
|
||||
self._paused = false;
|
||||
|
||||
self._warnings.forEach(function (err) { self.emit('warn', err) });
|
||||
self._warnings.length = 0;
|
||||
|
||||
self._errors.forEach(function (err) { self.emit('error', err) });
|
||||
self._errors.length = 0;
|
||||
})
|
||||
}
|
||||
|
||||
// called for each entry
|
||||
proto._processEntry = function (entry) {
|
||||
if (this._destroyed) return;
|
||||
this.push(entry);
|
||||
}
|
||||
|
||||
proto._read = function () { }
|
||||
|
||||
proto.destroy = function () {
|
||||
// when stream is destroyed it will emit nothing further, not even errors or warnings
|
||||
this.push(null);
|
||||
this.readable = false;
|
||||
this._destroyed = true;
|
||||
this.emit('close');
|
||||
}
|
||||
|
||||
proto._done = function () {
|
||||
this.push(null);
|
||||
}
|
||||
|
||||
// we emit errors and warnings async since we may handle errors like invalid args
|
||||
// within the initial event loop before any event listeners subscribed
|
||||
proto._handleError = function (err) {
|
||||
var self = this;
|
||||
setImmediate(function () {
|
||||
if (self._paused) return self._warnings.push(err);
|
||||
if (!self._destroyed) self.emit('warn', err);
|
||||
});
|
||||
}
|
||||
|
||||
proto._handleFatalError = function (err) {
|
||||
var self = this;
|
||||
setImmediate(function () {
|
||||
if (self._paused) return self._errors.push(err);
|
||||
if (!self._destroyed) self.emit('error', err);
|
||||
});
|
||||
}
|
||||
|
||||
function createStreamAPI () {
|
||||
var stream = new ReaddirpReadable();
|
||||
|
||||
return {
|
||||
stream : stream
|
||||
, processEntry : stream._processEntry.bind(stream)
|
||||
, done : stream._done.bind(stream)
|
||||
, handleError : stream._handleError.bind(stream)
|
||||
, handleFatalError : stream._handleFatalError.bind(stream)
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = createStreamAPI;
|
Reference in New Issue
Block a user