publishing v1 of action

This commit is contained in:
Paul Merlin
2019-09-21 16:11:55 +02:00
parent 91baa89272
commit 2d5ca45eab
569 changed files with 61688 additions and 2 deletions

13
node_modules/unzipper/.circleci/config.yml generated vendored Normal file
View File

@@ -0,0 +1,13 @@
version: 2
jobs:
build:
docker:
- image: circleci/node:10.15
working_directory: ~/build
steps:
- checkout
- run: npm install
- run: npm t
- store_artifacts:
path: coverage/lcov-report
destination: coverage

1
node_modules/unzipper/.gitattributes generated vendored Normal file
View File

@@ -0,0 +1 @@
text eol=lf

14
node_modules/unzipper/.travis.yml generated vendored Normal file
View File

@@ -0,0 +1,14 @@
language: node_js
node_js:
- "10"
- "8"
- "7"
- "6"
- "5"
- "4"
- "0.11"
- "0.10"
- "0.12"
before_install:
- if [[ `npm -v` < 5 ]]; then npm cache clean; fi
- if [[ `npm -v` != 3* ]]; then npm i -g npm@3; fi

25
node_modules/unzipper/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,25 @@
Copyright (c) 2012 - 2013 Near Infinity Corporation
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
---
Commits in this fork are (c) Ziggy Jonsson (ziggy.jonsson.nyc@gmail.com)
and fall under same licence structure as the original repo (MIT)

316
node_modules/unzipper/README.md generated vendored Normal file
View File

@@ -0,0 +1,316 @@
[![NPM Version][npm-image]][npm-url]
[![NPM Downloads][downloads-image]][downloads-url]
[![Test Coverage][travis-image]][travis-url]
[![Coverage][coverage-image]][coverage-url]
[npm-image]: https://img.shields.io/npm/v/unzipper.svg
[npm-url]: https://npmjs.org/package/unzipper
[travis-image]: https://api.travis-ci.org/ZJONSSON/node-unzipper.png?branch=master
[travis-url]: https://travis-ci.org/ZJONSSON/node-unzipper?branch=master
[downloads-image]: https://img.shields.io/npm/dm/unzipper.svg
[downloads-url]: https://npmjs.org/package/unzipper
[coverage-image]: https://3tjjj5abqi.execute-api.us-east-1.amazonaws.com/prod/node-unzipper/badge
[coverage-url]: https://3tjjj5abqi.execute-api.us-east-1.amazonaws.com/prod/node-unzipper/url
# unzipper
This is an active fork and drop-in replacement of the [node-unzip](https://github.com/EvanOxfeld/node-unzip) and addresses the following issues:
* finish/close events are not always triggered, particular when the input stream is slower than the receivers
* Any files are buffered into memory before passing on to entry
The structure of this fork is similar to the original, but uses Promises and inherit guarantees provided by node streams to ensure low memory footprint and emits finish/close events at the end of processing. The new `Parser` will push any parsed `entries` downstream if you pipe from it, while still supporting the legacy `entry` event as well.
Breaking changes: The new `Parser` will not automatically drain entries if there are no listeners or pipes in place.
Unzipper provides simple APIs similar to [node-tar](https://github.com/isaacs/node-tar) for parsing and extracting zip files.
There are no added compiled dependencies - inflation is handled by node.js's built in zlib support.
Please note: Methods that use the Central Directory instead of parsing entire file can be found under [`Open`](#open)
Chrome extension files (.crx) are zipfiles with an [extra header](http://www.adambarth.com/experimental/crx/docs/crx.html) at the start of the file. Unzipper will parse .crx file with the streaming methods (`Parse` and `ParseOne`). The `Open` methods will check for `crx` headers and parse crx files, but only if you provide `crx: true` in options.
## Installation
```bash
$ npm install unzipper
```
## Quick Examples
### Extract to a directory
```js
fs.createReadStream('path/to/archive.zip')
.pipe(unzipper.Extract({ path: 'output/path' }));
```
Extract emits the 'close' event once the zip's contents have been fully extracted to disk. `Extract` uses [fstream.Writer](https://www.npmjs.com/package/fstream) and therefore needs need an absolute path to the destination directory. This directory will be automatically created if it doesn't already exits.
### Parse zip file contents
Process each zip file entry or pipe entries to another stream.
__Important__: If you do not intend to consume an entry stream's raw data, call autodrain() to dispose of the entry's
contents. Otherwise the stream will halt. `.autodrain()` returns an empty stream that provides `error` and `finish` events.
Additionally you can call `.autodrain().promise()` to get the promisified version of success or failure of the autodrain.
```js
// If you want to handle autodrain errors you can either:
entry.autodrain().catch(e => handleError);
// or
entry.autodrain().on('error' => handleError);
```
Here is a quick example:
```js
fs.createReadStream('path/to/archive.zip')
.pipe(unzipper.Parse())
.on('entry', function (entry) {
const fileName = entry.path;
const type = entry.type; // 'Directory' or 'File'
const size = entry.vars.uncompressedSize; // There is also compressedSize;
if (fileName === "this IS the file I'm looking for") {
entry.pipe(fs.createWriteStream('output/path'));
} else {
entry.autodrain();
}
});
```
### Parse zip by piping entries downstream
If you `pipe` from unzipper the downstream components will receive each `entry` for further processing. This allows for clean pipelines transforming zipfiles into unzipped data.
Example using `stream.Transform`:
```js
fs.createReadStream('path/to/archive.zip')
.pipe(unzipper.Parse())
.pipe(stream.Transform({
objectMode: true,
transform: function(entry,e,cb) {
const fileName = entry.path;
const type = entry.type; // 'Directory' or 'File'
const size = entry.vars.uncompressedSize; // There is also compressedSize;
if (fileName === "this IS the file I'm looking for") {
entry.pipe(fs.createWriteStream('output/path'))
.on('finish',cb);
} else {
entry.autodrain();
cb();
}
}
}
}));
```
Example using [etl](https://www.npmjs.com/package/etl):
```js
fs.createReadStream('path/to/archive.zip')
.pipe(unzipper.Parse())
.pipe(etl.map(entry => {
if (entry.path == "this IS the file I'm looking for")
return entry
.pipe(etl.toFile('output/path'))
.promise();
else
entry.autodrain();
}))
```
### Parse a single file and pipe contents
`unzipper.parseOne([regex])` is a convenience method that unzips only one file from the archive and pipes the contents down (not the entry itself). If no serch criteria is specified, the first file in the archive will be unzipped. Otherwise, each filename will be compared to the criteria and the first one to match will be unzipped and piped down. If no file matches then the the stream will end without any content.
Example:
```js
fs.createReadStream('path/to/archive.zip')
.pipe(unzipper.ParseOne())
.pipe(fs.createReadStream('firstFile.txt'));
```
### Buffering the content of an entry into memory
While the recommended strategy of consuming the unzipped contents is using streams, it is sometimes convenient to be able to get the full buffered contents of each file . Each `entry` provides a `.buffer` function that consumes the entry by buffering the contents into memory and returning a promise to the complete buffer.
```js
fs.createReadStream('path/to/archive.zip')
.pipe(unzipper.Parse())
.pipe(etl.map(async entry => {
if (entry.path == "this IS the file I'm looking for") {
const content = await entry.buffer();
await fs.writeFile('output/path',content);
}
else {
entry.autodrain();
}
}))
```
### Parse.promise() syntax sugar
The parser emits `finish` and `error` events like any other stream. The parser additionally provides a promise wrapper around those two events to allow easy folding into existing Promise-based structures.
Example:
```js
fs.createReadStream('path/to/archive.zip')
.pipe(unzipper.Parse())
.on('entry', entry => entry.autodrain())
.promise()
.then( () => console.log('done'), e => console.log('error',e));
```
### Parse zip created by DOS ZIP or Windows ZIP Folders
Archives created by legacy tools usually have filenames encoded with IBM PC (Windows OEM) character set.
You can decode filenames with preferred character set:
```js
const il = require('iconv-lite');
fs.createReadStream('path/to/archive.zip')
.pipe(unzipper.Parse())
.on('entry', function (entry) {
// if some legacy zip tool follow ZIP spec then this flag will be set
const isUnicode = entry.props.flags.isUnicode;
// decode "non-unicode" filename from OEM Cyrillic character set
const fileName = isUnicode ? entry.path : il.decode(entry.props.pathBuffer, 'cp866');
const type = entry.type; // 'Directory' or 'File'
const size = entry.vars.uncompressedSize; // There is also compressedSize;
if (fileName === "Текстовый файл.txt") {
entry.pipe(fs.createWriteStream(fileName));
} else {
entry.autodrain();
}
});
```
## Open
Previous methods rely on the entire zipfile being received through a pipe. The Open methods load take a different approach: load the central directory first (at the end of the zipfile) and provide the ability to pick and choose which zipfiles to extract, even extracting them in parallel. The open methods return a promise on the contents of the directory, with individual `files` listed in an array. Each file element has the following methods:
* `stream([password])` - returns a stream of the unzipped content which can be piped to any destination
* `buffer([password])` - returns a promise on the buffered content of the file)
If the file is encrypted you will have to supply a password to decrypt, otherwise you can leave blank.
Unlike `adm-zip` the Open methods will never read the entire zipfile into buffer.
The last argument is optional `options` object where you can specify `tailSize` (default 80 bytes), i.e. how many bytes should we read at the end of the zipfile to locate the endOfCentralDirectory. This location can be variable depending on zip64 extensible data sector size. Additionally you can supply option `crx: true` which will check for a crx header and parse the file accordingly by shifting all file offsets by the length of the crx header.
### Open.file([path], [options])
Returns a Promise to the central directory information with methods to extract individual files. `start` and `end` options are used to avoid reading the whole file.
Example:
```js
async function main() {
const directory = await unzipper.Open.file('path/to/archive.zip');
console.log('directory', d);
return new Promise( (resolve, reject) => {
directory.files[0]
.stream()
.pipe(fs.createWriteStream('firstFile'))
.on('error',reject)
.on('finish',resolve)
});
}
main();
```
### Open.url([requestLibrary], [url | params], [options])
This function will return a Promise to the central directory information from a URL point to a zipfile. Range-headers are used to avoid reading the whole file. Unzipper does not ship with a request library so you will have to provide it as the first option.
Live Example: (extracts a tiny xml file from the middle of a 500MB zipfile)
```js
const request = require('request');
const unzipper = require('./unzip');
async function main() {
const directory = await unzipper.Open.url(request,'http://www2.census.gov/geo/tiger/TIGER2015/ZCTA5/tl_2015_us_zcta510.zip');
const file = directory.files.find(d => d.path === 'tl_2015_us_zcta510.shp.iso.xml');
const content = await file.buffer();
console.log(content.toString());
}
main();
```
This function takes a second parameter which can either be a string containing the `url` to request, or an `options` object to invoke the supplied `request` library with. This can be used when other request options are required, such as custom headers or authentication to a third party service.
```js
const request = require('google-oauth-jwt').requestWithJWT();
const googleStorageOptions = {
url: `https://www.googleapis.com/storage/v1/b/m-bucket-name/o/my-object-name`,
qs: { alt: 'media' },
jwt: {
email: google.storage.credentials.client_email,
key: google.storage.credentials.private_key,
scopes: ['https://www.googleapis.com/auth/devstorage.read_only']
}
});
async function getFile(req, res, next) {
const directory = await unzipper.Open.url(request, googleStorageOptions);
const file = zip.files.find((file) => file.path === 'my-filename');
return file.stream().pipe(res);
});
```
### Open.s3([aws-sdk], [params], [options])
This function will return a Promise to the central directory information from a zipfile on S3. Range-headers are used to avoid reading the whole file. Unzipper does not ship with with the aws-sdk so you have to provide an instantiated client as first arguments. The params object requires `Bucket` and `Key` to fetch the correct file.
Example:
```js
const unzipper = require('./unzip');
const AWS = require('aws-sdk');
const s3Client = AWS.S3(config);
async function main() {
const directory = await unzipper.Open.s3(s3Client,{Bucket: 'unzipper', Key: 'archive.zip'});
return new Promise( (resolve, reject) => {
directory.files[0]
.stream()
.pipe(fs.createWriteStream('firstFile'))
.on('error',reject)
.on('finish',resolve)
});
}
main();
```
### Open.buffer(buffer, [options])
If you already have the zip file in-memory as a buffer, you can open the contents directly.
Example:
```js
// never use readFileSync - only used here to simplify the example
const buffer = fs.readFileSync('path/to/arhive.zip');
async function main() {
const directory = await unzipper.Open.buffer(buffer);
console.log('directory',directory);
// ...
}
main();
```
### Open.[method].extract()
The directory object returned from `Open.[method]` provides an `extract` method which extracts all the files to a specified `path`, with an optional `concurrency` (default: 1).
Example (with concurrency of 5):
```js
unzip.Open.file('path/to/archive.zip')
.then(d => d.extract({path: '/extraction/path', concurrency: 5}));
```
## Licenses
See LICENCE

12
node_modules/unzipper/lib/Buffer.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
var Buffer = require('buffer').Buffer;
// Backwards compatibility for node versions < 8
if (Buffer.from === undefined) {
Buffer.from = function (a, b, c) {
return new Buffer(a, b, c)
};
Buffer.alloc = Buffer.from;
}
module.exports = Buffer;

25
node_modules/unzipper/lib/BufferStream.js generated vendored Normal file
View File

@@ -0,0 +1,25 @@
var Promise = require('bluebird');
var Stream = require('stream');
var Buffer = require('./Buffer');
// Backwards compatibility for node versions < 8
if (!Stream.Writable || !Stream.Writable.prototype.destroy)
Stream = require('readable-stream');
module.exports = function(entry) {
return new Promise(function(resolve,reject) {
var buffer = Buffer.from(''),
bufferStream = Stream.Transform()
.on('finish',function() {
resolve(buffer);
})
.on('error',reject);
bufferStream._transform = function(d,e,cb) {
buffer = Buffer.concat([buffer,d]);
cb();
};
entry.on('error',reject)
.pipe(bufferStream);
});
};

72
node_modules/unzipper/lib/Decrypt.js generated vendored Normal file
View File

@@ -0,0 +1,72 @@
var bigInt = require('big-integer');
var Stream = require('stream');
// Backwards compatibility for node versions < 8
if (!Stream.Writable || !Stream.Writable.prototype.destroy)
Stream = require('readable-stream');
var table;
function generateTable() {
var poly = 0xEDB88320,c,n,k;
table = [];
for (n = 0; n < 256; n++) {
c = n;
for (k = 0; k < 8; k++)
c = (c & 1) ? poly ^ (c >>> 1) : c = c >>> 1;
table[n] = c >>> 0;
}
}
function crc(ch,crc) {
if (!table)
generateTable();
if (ch.charCodeAt)
ch = ch.charCodeAt(0);
return (bigInt(crc).shiftRight(8).and(0xffffff)).xor(table[bigInt(crc).xor(ch).and(0xff)]).value;
}
function Decrypt() {
if (!(this instanceof Decrypt))
return new Decrypt();
this.key0 = 305419896;
this.key1 = 591751049;
this.key2 = 878082192;
}
Decrypt.prototype.update = function(h) {
this.key0 = crc(h,this.key0);
this.key1 = bigInt(this.key0).and(255).and(4294967295).add(this.key1)
this.key1 = bigInt(this.key1).multiply(134775813).add(1).and(4294967295).value;
this.key2 = crc(bigInt(this.key1).shiftRight(24).and(255), this.key2);
}
Decrypt.prototype.decryptByte = function(c) {
var k = bigInt(this.key2).or(2);
c = c ^ bigInt(k).multiply(bigInt(k^1)).shiftRight(8).and(255);
this.update(c);
return c;
};
Decrypt.prototype.stream = function() {
var stream = Stream.Transform(),
self = this;
stream._transform = function(d,e,cb) {
for (var i = 0; i<d.length;i++) {
d[i] = self.decryptByte(d[i]);
}
this.push(d);
cb();
};
return stream;
};
module.exports = Decrypt;

19
node_modules/unzipper/lib/NoopStream.js generated vendored Normal file
View File

@@ -0,0 +1,19 @@
var Stream = require('stream');
var util = require('util');
// Backwards compatibility for node versions < 8
if (!Stream.Writable || !Stream.Writable.prototype.destroy)
Stream = require('readable-stream');
function NoopStream() {
if (!(this instanceof NoopStream)) {
return new NoopStream();
}
Stream.Transform.call(this);
}
util.inherits(NoopStream,Stream.Transform);
NoopStream.prototype._transform = function(d,e,cb) { cb() ;};
module.exports = NoopStream;

224
node_modules/unzipper/lib/Open/directory.js generated vendored Normal file
View File

@@ -0,0 +1,224 @@
var binary = require('binary');
var PullStream = require('../PullStream');
var unzip = require('./unzip');
var Promise = require('bluebird');
var BufferStream = require('../BufferStream');
var parseExtraField = require('../parseExtraField');
var Buffer = require('../Buffer');
var path = require('path');
var Writer = require('fstream').Writer;
var parseDateTime = require('../parseDateTime');
var signature = Buffer.alloc(4);
signature.writeUInt32LE(0x06054b50,0);
function getCrxHeader(source) {
var sourceStream = source.stream(0).pipe(PullStream());
return sourceStream.pull(4).then(function(data) {
var signature = data.readUInt32LE(0);
if (signature === 0x34327243) {
var crxHeader;
return sourceStream.pull(12).then(function(data) {
crxHeader = binary.parse(data)
.word32lu('version')
.word32lu('pubKeyLength')
.word32lu('signatureLength')
.vars;
}).then(function() {
return sourceStream.pull(crxHeader.pubKeyLength +crxHeader.signatureLength);
}).then(function(data) {
crxHeader.publicKey = data.slice(0,crxHeader.pubKeyLength);
crxHeader.signature = data.slice(crxHeader.pubKeyLength);
crxHeader.size = 16 + crxHeader.pubKeyLength +crxHeader.signatureLength;
return crxHeader;
});
}
});
}
// Zip64 File Format Notes: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
function getZip64CentralDirectory(source, zip64CDL) {
var d64loc = binary.parse(zip64CDL)
.word32lu('signature')
.word32lu('diskNumber')
.word64lu('offsetToStartOfCentralDirectory')
.word32lu('numberOfDisks')
.vars;
if (d64loc.signature != 0x07064b50) {
throw new Error('invalid zip64 end of central dir locator signature (0x07064b50): 0x' + d64loc.signature.toString(16));
}
var dir64 = PullStream();
source.stream(d64loc.offsetToStartOfCentralDirectory).pipe(dir64);
return dir64.pull(56)
}
// Zip64 File Format Notes: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
function parseZip64DirRecord (dir64record) {
var vars = binary.parse(dir64record)
.word32lu('signature')
.word64lu('sizeOfCentralDirectory')
.word16lu('version')
.word16lu('versionsNeededToExtract')
.word32lu('diskNumber')
.word32lu('diskStart')
.word64lu('numberOfRecordsOnDisk')
.word64lu('numberOfRecords')
.word64lu('sizeOfCentralDirectory')
.word64lu('offsetToStartOfCentralDirectory')
.vars;
if (vars.signature != 0x06064b50) {
throw new Error('invalid zip64 end of central dir locator signature (0x06064b50): 0x0' + vars.signature.toString(16));
}
return vars
}
module.exports = function centralDirectory(source, options) {
var endDir = PullStream(),
records = PullStream(),
tailSize = (options && options.tailSize) || 80,
sourceSize,
crxHeader,
startOffset,
vars;
if (options && options.crx)
crxHeader = getCrxHeader(source);
return source.size()
.then(function(size) {
sourceSize = size;
source.stream(Math.max(0,size-tailSize))
.on('error', function (error) { endDir.emit('error', error) })
.pipe(endDir);
return endDir.pull(signature);
})
.then(function() {
return Promise.props({directory: endDir.pull(22), crxHeader: crxHeader});
})
.then(function(d) {
var data = d.directory;
startOffset = d.crxHeader && d.crxHeader.size || 0;
vars = binary.parse(data)
.word32lu('signature')
.word16lu('diskNumber')
.word16lu('diskStart')
.word16lu('numberOfRecordsOnDisk')
.word16lu('numberOfRecords')
.word32lu('sizeOfCentralDirectory')
.word32lu('offsetToStartOfCentralDirectory')
.word16lu('commentLength')
.vars;
// Is this zip file using zip64 format? Use same check as Go:
// https://github.com/golang/go/blob/master/src/archive/zip/reader.go#L503
// For zip64 files, need to find zip64 central directory locator header to extract
// relative offset for zip64 central directory record.
if (vars.numberOfRecords == 0xffff|| vars.numberOfRecords == 0xffff ||
vars.offsetToStartOfCentralDirectory == 0xffffffff) {
// Offset to zip64 CDL is 20 bytes before normal CDR
const zip64CDLSize = 20
const zip64CDLOffset = sourceSize - (tailSize - endDir.match + zip64CDLSize)
const zip64CDLStream = PullStream();
source.stream(zip64CDLOffset).pipe(zip64CDLStream);
return zip64CDLStream.pull(zip64CDLSize)
.then(function (d) { return getZip64CentralDirectory(source, d) })
.then(function (dir64record) {
vars = parseZip64DirRecord(dir64record)
})
} else {
vars.offsetToStartOfCentralDirectory += startOffset;
}
})
.then(function() {
source.stream(vars.offsetToStartOfCentralDirectory).pipe(records);
vars.extract = function(opts) {
if (!opts || !opts.path) throw new Error('PATH_MISSING');
return vars.files.then(function(files) {
return Promise.map(files, function(entry) {
if (entry.type == 'Directory') return;
// to avoid zip slip (writing outside of the destination), we resolve
// the target path, and make sure it's nested in the intended
// destination, or not extract it otherwise.
var extractPath = path.join(opts.path, entry.path);
if (extractPath.indexOf(opts.path) != 0) {
return;
}
var writer = opts.getWriter ? opts.getWriter({path: extractPath}) : Writer({ path: extractPath });
return new Promise(function(resolve, reject) {
entry.stream(opts.password)
.on('error',reject)
.pipe(writer)
.on('close',resolve)
.on('error',reject);
});
}, opts.concurrency > 1 ? {concurrency: opts.concurrency || undefined} : undefined);
});
};
vars.files = Promise.mapSeries(Array(vars.numberOfRecords),function() {
return records.pull(46).then(function(data) {
var vars = binary.parse(data)
.word32lu('signature')
.word16lu('versionMadeBy')
.word16lu('versionsNeededToExtract')
.word16lu('flags')
.word16lu('compressionMethod')
.word16lu('lastModifiedTime')
.word16lu('lastModifiedDate')
.word32lu('crc32')
.word32lu('compressedSize')
.word32lu('uncompressedSize')
.word16lu('fileNameLength')
.word16lu('extraFieldLength')
.word16lu('fileCommentLength')
.word16lu('diskNumber')
.word16lu('internalFileAttributes')
.word32lu('externalFileAttributes')
.word32lu('offsetToLocalFileHeader')
.vars;
vars.offsetToLocalFileHeader += startOffset;
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
return records.pull(vars.fileNameLength).then(function(fileNameBuffer) {
vars.pathBuffer = fileNameBuffer;
vars.path = fileNameBuffer.toString('utf8');
vars.isUnicode = vars.flags & 0x11;
return records.pull(vars.extraFieldLength);
})
.then(function(extraField) {
vars.extra = parseExtraField(extraField, vars);
return records.pull(vars.fileCommentLength);
})
.then(function(comment) {
vars.comment = comment;
vars.type = (vars.uncompressedSize === 0 && /[\/\\]$/.test(vars.path)) ? 'Directory' : 'File';
vars.stream = function(_password) {
return unzip(source, vars.offsetToLocalFileHeader,_password, vars);
};
vars.buffer = function(_password) {
return BufferStream(vars.stream(_password));
};
return vars;
});
});
});
return Promise.props(vars);
});
};

97
node_modules/unzipper/lib/Open/index.js generated vendored Normal file
View File

@@ -0,0 +1,97 @@
var fs = require('graceful-fs');
var Promise = require('bluebird');
var directory = require('./directory');
var Stream = require('stream');
// Backwards compatibility for node versions < 8
if (!Stream.Writable || !Stream.Writable.prototype.destroy)
Stream = require('readable-stream');
module.exports = {
buffer: function(buffer, options) {
var source = {
stream: function(offset, length) {
var stream = Stream.PassThrough();
stream.end(buffer.slice(offset, length));
return stream;
},
size: function() {
return Promise.resolve(buffer.length);
}
};
return directory(source, options);
},
file: function(filename, options) {
var source = {
stream: function(offset,length) {
return fs.createReadStream(filename,{start: offset, end: length && offset+length});
},
size: function() {
return new Promise(function(resolve,reject) {
fs.stat(filename,function(err,d) {
if (err)
reject(err);
else
resolve(d.size);
});
});
}
};
return directory(source, options);
},
url: function(request, params, options) {
if (typeof params === 'string')
params = {url: params};
if (!params.url)
throw 'URL missing';
params.headers = params.headers || {};
var source = {
stream : function(offset,length) {
var options = Object.create(params);
options.headers = Object.create(params.headers);
options.headers.range = 'bytes='+offset+'-' + (length ? length : '');
return request(options);
},
size: function() {
return new Promise(function(resolve,reject) {
var req = request(params);
req.on('response',function(d) {
req.abort();
if (!d.headers['content-length'])
reject(new Error('Missing content length header'));
else
resolve(d.headers['content-length']);
}).on('error',reject);
});
}
};
return directory(source, options);
},
s3 : function(client,params, options) {
var source = {
size: function() {
return new Promise(function(resolve,reject) {
client.headObject(params, function(err,d) {
if (err)
reject(err);
else
resolve(d.ContentLength);
});
});
},
stream: function(offset,length) {
var d = {};
for (var key in params)
d[key] = params[key];
d.Range = 'bytes='+offset+'-' + (length ? length : '');
return client.getObject(d).createReadStream();
}
};
return directory(source, options);
}
};

124
node_modules/unzipper/lib/Open/unzip.js generated vendored Normal file
View File

@@ -0,0 +1,124 @@
var Promise = require('bluebird');
var Decrypt = require('../Decrypt');
var PullStream = require('../PullStream');
var Stream = require('stream');
var binary = require('binary');
var zlib = require('zlib');
var parseExtraField = require('../parseExtraField');
var Buffer = require('../Buffer');
var parseDateTime = require('../parseDateTime');
// Backwards compatibility for node versions < 8
if (!Stream.Writable || !Stream.Writable.prototype.destroy)
Stream = require('readable-stream');
module.exports = function unzip(source,offset,_password, directoryVars) {
var file = PullStream(),
entry = Stream.PassThrough();
var req = source.stream(offset);
req.pipe(file).on('error', function(e) {
entry.emit('error', e);
});
entry.vars = file.pull(30)
.then(function(data) {
var vars = binary.parse(data)
.word32lu('signature')
.word16lu('versionsNeededToExtract')
.word16lu('flags')
.word16lu('compressionMethod')
.word16lu('lastModifiedTime')
.word16lu('lastModifiedDate')
.word32lu('crc32')
.word32lu('compressedSize')
.word32lu('uncompressedSize')
.word16lu('fileNameLength')
.word16lu('extraFieldLength')
.vars;
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
return file.pull(vars.fileNameLength)
.then(function(fileName) {
vars.fileName = fileName.toString('utf8');
return file.pull(vars.extraFieldLength);
})
.then(function(extraField) {
var checkEncryption;
vars.extra = parseExtraField(extraField, vars);
// Ignore logal file header vars if the directory vars are available
if (directoryVars && directoryVars.compressedSize) vars = directoryVars;
if (vars.flags & 0x01) checkEncryption = file.pull(12)
.then(function(header) {
if (!_password)
throw new Error('MISSING_PASSWORD');
var decrypt = Decrypt();
String(_password).split('').forEach(function(d) {
decrypt.update(d);
});
for (var i=0; i < header.length; i++)
header[i] = decrypt.decryptByte(header[i]);
vars.decrypt = decrypt;
vars.compressedSize -= 12;
var check = (vars.flags & 0x8) ? (vars.lastModifiedTime >> 8) & 0xff : (vars.crc32 >> 24) & 0xff;
if (header[11] !== check)
throw new Error('BAD_PASSWORD');
return vars;
});
return Promise.resolve(checkEncryption)
.then(function() {
entry.emit('vars',vars);
return vars;
});
});
});
entry.vars.then(function(vars) {
var fileSizeKnown = !(vars.flags & 0x08) || vars.compressedSize > 0,
eof;
var inflater = vars.compressionMethod ? zlib.createInflateRaw() : Stream.PassThrough();
if (fileSizeKnown) {
entry.size = vars.uncompressedSize;
eof = vars.compressedSize;
} else {
eof = Buffer.alloc(4);
eof.writeUInt32LE(0x08074b50, 0);
}
var stream = file.stream(eof);
if (vars.decrypt)
stream = stream.pipe(vars.decrypt.stream());
stream
.pipe(inflater)
.on('error',function(err) { entry.emit('error',err);})
.pipe(entry)
.on('finish', function() {
if (req.abort)
req.abort();
else if (req.close)
req.close();
else if (req.push)
req.push();
else
console.log('warning - unable to close stream');
});
})
.catch(function(e) {
entry.emit('error',e);
});
return entry;
};

145
node_modules/unzipper/lib/PullStream.js generated vendored Normal file
View File

@@ -0,0 +1,145 @@
var Stream = require('stream');
var Promise = require('bluebird');
var util = require('util');
var Buffer = require('./Buffer');
var strFunction = 'function';
// Backwards compatibility for node versions < 8
if (!Stream.Writable || !Stream.Writable.prototype.destroy)
Stream = require('readable-stream');
function PullStream() {
if (!(this instanceof PullStream))
return new PullStream();
Stream.Duplex.call(this,{decodeStrings:false, objectMode:true});
this.buffer = Buffer.from('');
var self = this;
self.on('finish',function() {
self.finished = true;
self.emit('chunk',false);
});
}
util.inherits(PullStream,Stream.Duplex);
PullStream.prototype._write = function(chunk,e,cb) {
this.buffer = Buffer.concat([this.buffer,chunk]);
this.cb = cb;
this.emit('chunk');
};
// The `eof` parameter is interpreted as `file_length` if the type is number
// otherwise (i.e. buffer) it is interpreted as a pattern signaling end of stream
PullStream.prototype.stream = function(eof,includeEof) {
var p = Stream.PassThrough();
var done,self= this;
function cb() {
if (typeof self.cb === strFunction) {
var callback = self.cb;
self.cb = undefined;
return callback();
}
}
function pull() {
var packet;
if (self.buffer && self.buffer.length) {
if (typeof eof === 'number') {
packet = self.buffer.slice(0,eof);
self.buffer = self.buffer.slice(eof);
eof -= packet.length;
done = !eof;
} else {
var match = self.buffer.indexOf(eof);
if (match !== -1) {
// store signature match byte offset to allow us to reference
// this for zip64 offset
self.match = match
if (includeEof) match = match + eof.length;
packet = self.buffer.slice(0,match);
self.buffer = self.buffer.slice(match);
done = true;
} else {
var len = self.buffer.length - eof.length;
if (len <= 0) {
cb();
} else {
packet = self.buffer.slice(0,len);
self.buffer = self.buffer.slice(len);
}
}
}
if (packet) p.write(packet,function() {
if (self.buffer.length === 0 || (eof.length && self.buffer.length <= eof.length)) cb();
});
}
if (!done) {
if (self.finished && !this.__ended) {
self.removeListener('chunk',pull);
self.emit('error', new Error('FILE_ENDED'));
this.__ended = true;
return;
}
} else {
self.removeListener('chunk',pull);
p.end();
}
}
self.on('chunk',pull);
pull();
return p;
};
PullStream.prototype.pull = function(eof,includeEof) {
if (eof === 0) return Promise.resolve('');
// If we already have the required data in buffer
// we can resolve the request immediately
if (!isNaN(eof) && this.buffer.length > eof) {
var data = this.buffer.slice(0,eof);
this.buffer = this.buffer.slice(eof);
return Promise.resolve(data);
}
// Otherwise we stream until we have it
var buffer = Buffer.from(''),
self = this;
var concatStream = Stream.Transform();
concatStream._transform = function(d,e,cb) {
buffer = Buffer.concat([buffer,d]);
cb();
};
var rejectHandler;
var pullStreamRejectHandler;
return new Promise(function(resolve,reject) {
rejectHandler = reject;
pullStreamRejectHandler = function(e) {
self.__emittedError = e;
reject(e);
}
if (self.finished)
return reject(new Error('FILE_ENDED'));
self.once('error',pullStreamRejectHandler); // reject any errors from pullstream itself
self.stream(eof,includeEof)
.on('error',reject)
.pipe(concatStream)
.on('finish',function() {resolve(buffer);})
.on('error',reject);
})
.finally(function() {
self.removeListener('error',rejectHandler);
self.removeListener('error',pullStreamRejectHandler);
});
};
PullStream.prototype._read = function(){};
module.exports = PullStream;

55
node_modules/unzipper/lib/extract.js generated vendored Normal file
View File

@@ -0,0 +1,55 @@
module.exports = Extract;
var Parse = require('./parse');
var Writer = require('fstream').Writer;
var path = require('path');
var stream = require('stream');
var duplexer2 = require('duplexer2');
var Promise = require('bluebird');
function Extract (opts) {
// make sure path is normalized before using it
opts.path = path.normalize(opts.path);
var parser = new Parse(opts);
var outStream = new stream.Writable({objectMode: true});
outStream._write = function(entry, encoding, cb) {
if (entry.type == 'Directory') return cb();
// to avoid zip slip (writing outside of the destination), we resolve
// the target path, and make sure it's nested in the intended
// destination, or not extract it otherwise.
var extractPath = path.join(opts.path, entry.path);
if (extractPath.indexOf(opts.path) != 0) {
return cb();
}
const writer = opts.getWriter ? opts.getWriter({path: extractPath}) : Writer({ path: extractPath });
entry.pipe(writer)
.on('error', cb)
.on('close', cb);
};
var extract = duplexer2(parser,outStream);
parser.once('crx-header', function(crxHeader) {
extract.crxHeader = crxHeader;
});
parser
.pipe(outStream)
.on('finish',function() {
extract.emit('close');
});
extract.promise = function() {
return new Promise(function(resolve, reject) {
extract.on('close', resolve);
extract.on('error',reject);
});
};
return extract;
}

276
node_modules/unzipper/lib/parse.js generated vendored Normal file
View File

@@ -0,0 +1,276 @@
var util = require('util');
var zlib = require('zlib');
var Stream = require('stream');
var binary = require('binary');
var Promise = require('bluebird');
var PullStream = require('./PullStream');
var NoopStream = require('./NoopStream');
var BufferStream = require('./BufferStream');
var parseExtraField = require('./parseExtraField');
var Buffer = require('./Buffer');
var parseDateTime = require('./parseDateTime');
// Backwards compatibility for node versions < 8
if (!Stream.Writable || !Stream.Writable.prototype.destroy)
Stream = require('readable-stream');
var endDirectorySignature = Buffer.alloc(4);
endDirectorySignature.writeUInt32LE(0x06054b50, 0);
function Parse(opts) {
if (!(this instanceof Parse)) {
return new Parse(opts);
}
var self = this;
self._opts = opts || { verbose: false };
PullStream.call(self, self._opts);
self.on('finish',function() {
self.emit('close');
});
self._readRecord().catch(function(e) {
if (!self.__emittedError || self.__emittedError !== e)
self.emit('error',e);
});
}
util.inherits(Parse, PullStream);
Parse.prototype._readRecord = function () {
var self = this;
return self.pull(4).then(function(data) {
if (data.length === 0)
return;
var signature = data.readUInt32LE(0);
if (signature === 0x34327243) {
return self._readCrxHeader();
}
if (signature === 0x04034b50) {
return self._readFile();
}
else if (signature === 0x02014b50) {
self.__ended = true;
return self._readCentralDirectoryFileHeader();
}
else if (signature === 0x06054b50) {
return self._readEndOfCentralDirectoryRecord();
}
else if (self.__ended) {
return self.pull(endDirectorySignature).then(function() {
return self._readEndOfCentralDirectoryRecord();
});
}
else
self.emit('error', new Error('invalid signature: 0x' + signature.toString(16)));
});
};
Parse.prototype._readCrxHeader = function() {
var self = this;
return self.pull(12).then(function(data) {
self.crxHeader = binary.parse(data)
.word32lu('version')
.word32lu('pubKeyLength')
.word32lu('signatureLength')
.vars;
return self.pull(self.crxHeader.pubKeyLength + self.crxHeader.signatureLength);
}).then(function(data) {
self.crxHeader.publicKey = data.slice(0,self.crxHeader.pubKeyLength);
self.crxHeader.signature = data.slice(self.crxHeader.pubKeyLength);
self.emit('crx-header',self.crxHeader);
return self._readRecord();
});
};
Parse.prototype._readFile = function () {
var self = this;
return self.pull(26).then(function(data) {
var vars = binary.parse(data)
.word16lu('versionsNeededToExtract')
.word16lu('flags')
.word16lu('compressionMethod')
.word16lu('lastModifiedTime')
.word16lu('lastModifiedDate')
.word32lu('crc32')
.word32lu('compressedSize')
.word32lu('uncompressedSize')
.word16lu('fileNameLength')
.word16lu('extraFieldLength')
.vars;
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
if (self.crxHeader) vars.crxHeader = self.crxHeader;
return self.pull(vars.fileNameLength).then(function(fileNameBuffer) {
var fileName = fileNameBuffer.toString('utf8');
var entry = Stream.PassThrough();
var __autodraining = false;
entry.autodrain = function() {
__autodraining = true;
var draining = entry.pipe(NoopStream());
draining.promise = function() {
return new Promise(function(resolve, reject) {
draining.on('finish',resolve);
draining.on('error',reject);
});
};
return draining;
};
entry.buffer = function() {
return BufferStream(entry);
};
entry.path = fileName;
entry.props = {};
entry.props.path = fileName;
entry.props.pathBuffer = fileNameBuffer;
entry.props.flags = {
"isUnicode": vars.flags & 0x11
};
entry.type = (vars.uncompressedSize === 0 && /[\/\\]$/.test(fileName)) ? 'Directory' : 'File';
if (self._opts.verbose) {
if (entry.type === 'Directory') {
console.log(' creating:', fileName);
} else if (entry.type === 'File') {
if (vars.compressionMethod === 0) {
console.log(' extracting:', fileName);
} else {
console.log(' inflating:', fileName);
}
}
}
return self.pull(vars.extraFieldLength).then(function(extraField) {
var extra = parseExtraField(extraField, vars);
entry.vars = vars;
entry.extra = extra;
self.emit('entry', entry);
if (self._readableState.pipesCount)
self.push(entry);
if (self._opts.verbose)
console.log({
filename:fileName,
vars: vars,
extra: extra
});
var fileSizeKnown = !(vars.flags & 0x08) || vars.compressedSize > 0,
eof;
entry.__autodraining = __autodraining; // expose __autodraining for test purposes
var inflater = (vars.compressionMethod && !__autodraining) ? zlib.createInflateRaw() : Stream.PassThrough();
if (fileSizeKnown) {
entry.size = vars.uncompressedSize;
eof = vars.compressedSize;
} else {
eof = Buffer.alloc(4);
eof.writeUInt32LE(0x08074b50, 0);
}
self.stream(eof)
.pipe(inflater)
.on('error',function(err) { self.emit('error',err);})
.pipe(entry)
.on('finish', function() {
return fileSizeKnown ? self._readRecord() : self._processDataDescriptor(entry);
});
return null; // This prevents bluebird from throwing "promise created but not returned" warnings
});
});
});
};
Parse.prototype._processDataDescriptor = function (entry) {
var self = this;
self.pull(16).then(function(data) {
var vars = binary.parse(data)
.word32lu('dataDescriptorSignature')
.word32lu('crc32')
.word32lu('compressedSize')
.word32lu('uncompressedSize')
.vars;
entry.size = vars.uncompressedSize;
self._readRecord();
});
};
Parse.prototype._readCentralDirectoryFileHeader = function () {
var self = this;
self.pull(42).then(function(data) {
var vars = binary.parse(data)
.word16lu('versionMadeBy')
.word16lu('versionsNeededToExtract')
.word16lu('flags')
.word16lu('compressionMethod')
.word16lu('lastModifiedTime')
.word16lu('lastModifiedDate')
.word32lu('crc32')
.word32lu('compressedSize')
.word32lu('uncompressedSize')
.word16lu('fileNameLength')
.word16lu('extraFieldLength')
.word16lu('fileCommentLength')
.word16lu('diskNumber')
.word16lu('internalFileAttributes')
.word32lu('externalFileAttributes')
.word32lu('offsetToLocalFileHeader')
.vars;
return self.pull(vars.fileNameLength).then(function(fileName) {
vars.fileName = fileName.toString('utf8');
return self.pull(vars.extraFieldLength);
})
.then(function(extraField) {
return self.pull(vars.fileCommentLength);
})
.then(function(fileComment) {
return self._readRecord();
});
});
};
Parse.prototype._readEndOfCentralDirectoryRecord = function() {
var self = this;
self.pull(18).then(function(data) {
var vars = binary.parse(data)
.word16lu('diskNumber')
.word16lu('diskStart')
.word16lu('numberOfRecordsOnDisk')
.word16lu('numberOfRecords')
.word32lu('sizeOfCentralDirectory')
.word32lu('offsetToStartOfCentralDirectory')
.word16lu('commentLength')
.vars;
self.pull(vars.commentLength).then(function(comment) {
comment = comment.toString('utf8');
self.end();
self.push(null);
});
});
};
Parse.prototype.promise = function() {
var self = this;
return new Promise(function(resolve,reject) {
self.on('finish',resolve);
self.on('error',reject);
});
};
module.exports = Parse;

13
node_modules/unzipper/lib/parseDateTime.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
// Dates in zip file entries are stored as DosDateTime
// Spec is here: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-dosdatetimetofiletime
module.exports = function parseDateTime(date, time) {
const day = date & 0x1F;
const month = date >> 5 & 0x0F;
const year = (date >> 9 & 0x7F) + 1980;
const seconds = time ? (time & 0x1F) * 2 : 0;
const minutes = time ? (time >> 5) & 0x3F : 0;
const hours = time ? (time >> 11): 0;
return new Date(Date.UTC(year, month-1, day, hours, minutes, seconds));
};

37
node_modules/unzipper/lib/parseExtraField.js generated vendored Normal file
View File

@@ -0,0 +1,37 @@
var binary = require('binary');
module.exports = function(extraField, vars) {
var extra;
// Find the ZIP64 header, if present.
while(!extra && extraField && extraField.length) {
var candidateExtra = binary.parse(extraField)
.word16lu('signature')
.word16lu('partsize')
.word64lu('uncompressedSize')
.word64lu('compressedSize')
.word64lu('offset')
.word64lu('disknum')
.vars;
if(candidateExtra.signature === 0x0001) {
extra = candidateExtra;
} else {
// Advance the buffer to the next part.
// The total size of this part is the 4 byte header + partsize.
extraField = extraField.slice(candidateExtra.partsize + 4);
}
}
extra = extra || {};
if (vars.compressedSize === 0xffffffff)
vars.compressedSize = extra.compressedSize;
if (vars.uncompressedSize === 0xffffffff)
vars.uncompressedSize= extra.uncompressedSize;
if (vars.offsetToLocalFileHeader === 0xffffffff)
vars.offsetToLocalFileHeader= extra.offset;
return extra;
};

58
node_modules/unzipper/lib/parseOne.js generated vendored Normal file
View File

@@ -0,0 +1,58 @@
var Stream = require('stream');
var Parse = require('./parse');
var duplexer2 = require('duplexer2');
var BufferStream = require('./BufferStream');
// Backwards compatibility for node versions < 8
if (!Stream.Writable || !Stream.Writable.prototype.destroy)
Stream = require('readable-stream');
function parseOne(match,opts) {
var inStream = Stream.PassThrough({objectMode:true});
var outStream = Stream.PassThrough();
var transform = Stream.Transform({objectMode:true});
var re = match instanceof RegExp ? match : (match && new RegExp(match));
var found;
transform._transform = function(entry,e,cb) {
if (found || (re && !re.exec(entry.path))) {
entry.autodrain();
return cb();
} else {
found = true;
out.emit('entry',entry);
entry.on('error',function(e) {
outStream.emit('error',e);
});
entry.pipe(outStream)
.on('error',function(err) {
cb(err);
})
.on('finish',function(d) {
cb(null,d);
});
}
};
inStream.pipe(Parse(opts))
.on('error',function(err) {
outStream.emit('error',err);
})
.pipe(transform)
.on('error',Object) // Silence error as its already addressed in transform
.on('finish',function() {
if (!found)
outStream.emit('error',new Error('PATTERN_NOT_FOUND'));
else
outStream.end();
});
var out = duplexer2(inStream,outStream);
out.buffer = function() {
return BufferStream(outStream);
};
return out;
}
module.exports = parseOne;

View File

@@ -0,0 +1,9 @@
language: node_js
node_js:
- "6"
- "4"
- "0.10"
- "0.12"
before_install:
- "npm config set spin false"
- "npm install -g npm/npm"

15
node_modules/unzipper/node_modules/fstream/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,15 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

76
node_modules/unzipper/node_modules/fstream/README.md generated vendored Normal file
View File

@@ -0,0 +1,76 @@
Like FS streams, but with stat on them, and supporting directories and
symbolic links, as well as normal files. Also, you can use this to set
the stats on a file, even if you don't change its contents, or to create
a symlink, etc.
So, for example, you can "write" a directory, and it'll call `mkdir`. You
can specify a uid and gid, and it'll call `chown`. You can specify a
`mtime` and `atime`, and it'll call `utimes`. You can call it a symlink
and provide a `linkpath` and it'll call `symlink`.
Note that it won't automatically resolve symbolic links. So, if you
call `fstream.Reader('/some/symlink')` then you'll get an object
that stats and then ends immediately (since it has no data). To follow
symbolic links, do this: `fstream.Reader({path:'/some/symlink', follow:
true })`.
There are various checks to make sure that the bytes emitted are the
same as the intended size, if the size is set.
## Examples
```javascript
fstream
.Writer({ path: "path/to/file"
, mode: 0755
, size: 6
})
.write("hello\n")
.end()
```
This will create the directories if they're missing, and then write
`hello\n` into the file, chmod it to 0755, and assert that 6 bytes have
been written when it's done.
```javascript
fstream
.Writer({ path: "path/to/file"
, mode: 0755
, size: 6
, flags: "a"
})
.write("hello\n")
.end()
```
You can pass flags in, if you want to append to a file.
```javascript
fstream
.Writer({ path: "path/to/symlink"
, linkpath: "./file"
, SymbolicLink: true
, mode: "0755" // octal strings supported
})
.end()
```
If isSymbolicLink is a function, it'll be called, and if it returns
true, then it'll treat it as a symlink. If it's not a function, then
any truish value will make a symlink, or you can set `type:
'SymbolicLink'`, which does the same thing.
Note that the linkpath is relative to the symbolic link location, not
the parent dir or cwd.
```javascript
fstream
.Reader("path/to/dir")
.pipe(fstream.Writer("path/to/other/dir"))
```
This will do like `cp -Rp path/to/dir path/to/other/dir`. If the other
dir exists and isn't a directory, then it'll emit an error. It'll also
set the uid, gid, mode, etc. to be identical. In this way, it's more
like `rsync -a` than simply a copy.

View File

@@ -0,0 +1,134 @@
var fstream = require('../fstream.js')
var path = require('path')
var r = fstream.Reader({
path: path.dirname(__dirname),
filter: function () {
return !this.basename.match(/^\./) &&
!this.basename.match(/^node_modules$/) &&
!this.basename.match(/^deep-copy$/) &&
!this.basename.match(/^filter-copy$/)
}
})
// this writer will only write directories
var w = fstream.Writer({
path: path.resolve(__dirname, 'filter-copy'),
type: 'Directory',
filter: function () {
return this.type === 'Directory'
}
})
var indent = ''
r.on('entry', appears)
r.on('ready', function () {
console.error('ready to begin!', r.path)
})
function appears (entry) {
console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename)
if (foggy) {
console.error('FOGGY!')
var p = entry
do {
console.error(p.depth, p.path, p._paused)
p = p.parent
} while (p)
throw new Error('\u001b[mshould not have entries while foggy')
}
indent += '\t'
entry.on('data', missile(entry))
entry.on('end', runaway(entry))
entry.on('entry', appears)
}
var foggy
function missile (entry) {
function liftFog (who) {
if (!foggy) return
if (who) {
console.error('%s breaks the spell!', who && who.path)
} else {
console.error('the spell expires!')
}
console.error('\u001b[mthe fog lifts!\n')
clearTimeout(foggy)
foggy = null
if (entry._paused) entry.resume()
}
if (entry.type === 'Directory') {
var ended = false
entry.once('end', function () { ended = true })
return function (c) {
// throw in some pathological pause()/resume() behavior
// just for extra fun.
process.nextTick(function () {
if (!foggy && !ended) { // && Math.random() < 0.3) {
console.error(indent + '%s casts a spell', entry.basename)
console.error('\na slowing fog comes over the battlefield...\n\u001b[32m')
entry.pause()
entry.once('resume', liftFog)
foggy = setTimeout(liftFog, 1000)
}
})
}
}
return function (c) {
var e = Math.random() < 0.5
console.error(indent + '%s %s for %d damage!',
entry.basename,
e ? 'is struck' : 'fires a chunk',
c.length)
}
}
function runaway (entry) {
return function () {
var e = Math.random() < 0.5
console.error(indent + '%s %s',
entry.basename,
e ? 'turns to flee' : 'is vanquished!')
indent = indent.slice(0, -1)
}
}
w.on('entry', attacks)
// w.on('ready', function () { attacks(w) })
function attacks (entry) {
console.error(indent + '%s %s!', entry.basename,
entry.type === 'Directory' ? 'calls for backup' : 'attacks')
entry.on('entry', attacks)
}
var ended = false
var i = 1
r.on('end', function () {
if (foggy) clearTimeout(foggy)
console.error("\u001b[mIT'S OVER!!")
console.error('A WINNAR IS YOU!')
console.log('ok ' + (i++) + ' A WINNAR IS YOU')
ended = true
// now go through and verify that everything in there is a dir.
var p = path.resolve(__dirname, 'filter-copy')
var checker = fstream.Reader({ path: p })
checker.checker = true
checker.on('child', function (e) {
var ok = e.type === 'Directory'
console.log((ok ? '' : 'not ') + 'ok ' + (i++) +
' should be a dir: ' +
e.path.substr(checker.path.length + 1))
})
})
process.on('exit', function () {
console.log((ended ? '' : 'not ') + 'ok ' + (i) + ' ended')
console.log('1..' + i)
})
r.pipe(w)

View File

@@ -0,0 +1,118 @@
var fstream = require('../fstream.js')
var path = require('path')
var r = fstream.Reader({
path: path.dirname(__dirname),
filter: function () {
return !this.basename.match(/^\./) &&
!this.basename.match(/^node_modules$/) &&
!this.basename.match(/^deep-copy$/)
}
})
var w = fstream.Writer({
path: path.resolve(__dirname, 'deep-copy'),
type: 'Directory'
})
var indent = ''
r.on('entry', appears)
r.on('ready', function () {
console.error('ready to begin!', r.path)
})
function appears (entry) {
console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename, entry)
if (foggy) {
console.error('FOGGY!')
var p = entry
do {
console.error(p.depth, p.path, p._paused)
p = p.parent
} while (p)
throw new Error('\u001b[mshould not have entries while foggy')
}
indent += '\t'
entry.on('data', missile(entry))
entry.on('end', runaway(entry))
entry.on('entry', appears)
}
var foggy
function missile (entry) {
function liftFog (who) {
if (!foggy) return
if (who) {
console.error('%s breaks the spell!', who && who.path)
} else {
console.error('the spell expires!')
}
console.error('\u001b[mthe fog lifts!\n')
clearTimeout(foggy)
foggy = null
if (entry._paused) entry.resume()
}
if (entry.type === 'Directory') {
var ended = false
entry.once('end', function () { ended = true })
return function (c) {
// throw in some pathological pause()/resume() behavior
// just for extra fun.
process.nextTick(function () {
if (!foggy && !ended) { // && Math.random() < 0.3) {
console.error(indent + '%s casts a spell', entry.basename)
console.error('\na slowing fog comes over the battlefield...\n\u001b[32m')
entry.pause()
entry.once('resume', liftFog)
foggy = setTimeout(liftFog, 10)
}
})
}
}
return function (c) {
var e = Math.random() < 0.5
console.error(indent + '%s %s for %d damage!',
entry.basename,
e ? 'is struck' : 'fires a chunk',
c.length)
}
}
function runaway (entry) {
return function () {
var e = Math.random() < 0.5
console.error(indent + '%s %s',
entry.basename,
e ? 'turns to flee' : 'is vanquished!')
indent = indent.slice(0, -1)
}
}
w.on('entry', attacks)
// w.on('ready', function () { attacks(w) })
function attacks (entry) {
console.error(indent + '%s %s!', entry.basename,
entry.type === 'Directory' ? 'calls for backup' : 'attacks')
entry.on('entry', attacks)
}
var ended = false
r.on('end', function () {
if (foggy) clearTimeout(foggy)
console.error("\u001b[mIT'S OVER!!")
console.error('A WINNAR IS YOU!')
console.log('ok 1 A WINNAR IS YOU')
ended = true
})
process.on('exit', function () {
console.log((ended ? '' : 'not ') + 'ok 2 ended')
console.log('1..2')
})
r.pipe(w)

View File

@@ -0,0 +1,68 @@
var fstream = require('../fstream.js')
var tap = require('tap')
var fs = require('fs')
var path = require('path')
var dir = path.dirname(__dirname)
tap.test('reader test', function (t) {
var children = -1
var gotReady = false
var ended = false
var r = fstream.Reader({
path: dir,
filter: function () {
// return this.parent === r
return this.parent === r || this === r
}
})
r.on('ready', function () {
gotReady = true
children = fs.readdirSync(dir).length
console.error('Setting expected children to ' + children)
t.equal(r.type, 'Directory', 'should be a directory')
})
r.on('entry', function (entry) {
children--
if (!gotReady) {
t.fail('children before ready!')
}
t.equal(entry.dirname, r.path, 'basename is parent dir')
})
r.on('error', function (er) {
t.fail(er)
t.end()
process.exit(1)
})
r.on('end', function () {
t.equal(children, 0, 'should have seen all children')
ended = true
})
var closed = false
r.on('close', function () {
t.ok(ended, 'saw end before close')
t.notOk(closed, 'close should only happen once')
closed = true
t.end()
})
})
tap.test('reader error test', function (t) {
// assumes non-root on a *nix system
var r = fstream.Reader({ path: '/etc/shadow' })
r.once('error', function (er) {
t.ok(true)
t.end()
})
r.on('end', function () {
t.fail('reader ended without error')
t.end()
})
})

View File

@@ -0,0 +1,27 @@
var fstream = require('../fstream.js')
var notOpen = false
process.chdir(__dirname)
fstream
.Writer({
path: 'path/to/symlink',
linkpath: './file',
isSymbolicLink: true,
mode: '0755' // octal strings supported
})
.on('close', function () {
notOpen = true
var fs = require('fs')
var s = fs.lstatSync('path/to/symlink')
var isSym = s.isSymbolicLink()
console.log((isSym ? '' : 'not ') + 'ok 1 should be symlink')
var t = fs.readlinkSync('path/to/symlink')
var isTarget = t === './file'
console.log((isTarget ? '' : 'not ') + 'ok 2 should link to ./file')
})
.end()
process.on('exit', function () {
console.log((notOpen ? '' : 'not ') + 'ok 3 should be closed')
console.log('1..3')
})

35
node_modules/unzipper/node_modules/fstream/fstream.js generated vendored Normal file
View File

@@ -0,0 +1,35 @@
exports.Abstract = require('./lib/abstract.js')
exports.Reader = require('./lib/reader.js')
exports.Writer = require('./lib/writer.js')
exports.File = {
Reader: require('./lib/file-reader.js'),
Writer: require('./lib/file-writer.js')
}
exports.Dir = {
Reader: require('./lib/dir-reader.js'),
Writer: require('./lib/dir-writer.js')
}
exports.Link = {
Reader: require('./lib/link-reader.js'),
Writer: require('./lib/link-writer.js')
}
exports.Proxy = {
Reader: require('./lib/proxy-reader.js'),
Writer: require('./lib/proxy-writer.js')
}
exports.Reader.Dir = exports.DirReader = exports.Dir.Reader
exports.Reader.File = exports.FileReader = exports.File.Reader
exports.Reader.Link = exports.LinkReader = exports.Link.Reader
exports.Reader.Proxy = exports.ProxyReader = exports.Proxy.Reader
exports.Writer.Dir = exports.DirWriter = exports.Dir.Writer
exports.Writer.File = exports.FileWriter = exports.File.Writer
exports.Writer.Link = exports.LinkWriter = exports.Link.Writer
exports.Writer.Proxy = exports.ProxyWriter = exports.Proxy.Writer
exports.collect = require('./lib/collect.js')

View File

@@ -0,0 +1,85 @@
// the parent class for all fstreams.
module.exports = Abstract
var Stream = require('stream').Stream
var inherits = require('inherits')
function Abstract () {
Stream.call(this)
}
inherits(Abstract, Stream)
Abstract.prototype.on = function (ev, fn) {
if (ev === 'ready' && this.ready) {
process.nextTick(fn.bind(this))
} else {
Stream.prototype.on.call(this, ev, fn)
}
return this
}
Abstract.prototype.abort = function () {
this._aborted = true
this.emit('abort')
}
Abstract.prototype.destroy = function () {}
Abstract.prototype.warn = function (msg, code) {
var self = this
var er = decorate(msg, code, self)
if (!self.listeners('warn')) {
console.error('%s %s\n' +
'path = %s\n' +
'syscall = %s\n' +
'fstream_type = %s\n' +
'fstream_path = %s\n' +
'fstream_unc_path = %s\n' +
'fstream_class = %s\n' +
'fstream_stack =\n%s\n',
code || 'UNKNOWN',
er.stack,
er.path,
er.syscall,
er.fstream_type,
er.fstream_path,
er.fstream_unc_path,
er.fstream_class,
er.fstream_stack.join('\n'))
} else {
self.emit('warn', er)
}
}
Abstract.prototype.info = function (msg, code) {
this.emit('info', msg, code)
}
Abstract.prototype.error = function (msg, code, th) {
var er = decorate(msg, code, this)
if (th) throw er
else this.emit('error', er)
}
function decorate (er, code, self) {
if (!(er instanceof Error)) er = new Error(er)
er.code = er.code || code
er.path = er.path || self.path
er.fstream_type = er.fstream_type || self.type
er.fstream_path = er.fstream_path || self.path
if (self._path !== self.path) {
er.fstream_unc_path = er.fstream_unc_path || self._path
}
if (self.linkpath) {
er.fstream_linkpath = er.fstream_linkpath || self.linkpath
}
er.fstream_class = er.fstream_class || self.constructor.name
er.fstream_stack = er.fstream_stack ||
new Error().stack.split(/\n/).slice(3).map(function (s) {
return s.replace(/^ {4}at /, '')
})
return er
}

View File

@@ -0,0 +1,70 @@
module.exports = collect
function collect (stream) {
if (stream._collected) return
if (stream._paused) return stream.on('resume', collect.bind(null, stream))
stream._collected = true
stream.pause()
stream.on('data', save)
stream.on('end', save)
var buf = []
function save (b) {
if (typeof b === 'string') b = new Buffer(b)
if (Buffer.isBuffer(b) && !b.length) return
buf.push(b)
}
stream.on('entry', saveEntry)
var entryBuffer = []
function saveEntry (e) {
collect(e)
entryBuffer.push(e)
}
stream.on('proxy', proxyPause)
function proxyPause (p) {
p.pause()
}
// replace the pipe method with a new version that will
// unlock the buffered stuff. if you just call .pipe()
// without a destination, then it'll re-play the events.
stream.pipe = (function (orig) {
return function (dest) {
// console.error(' === open the pipes', dest && dest.path)
// let the entries flow through one at a time.
// Once they're all done, then we can resume completely.
var e = 0
;(function unblockEntry () {
var entry = entryBuffer[e++]
// console.error(" ==== unblock entry", entry && entry.path)
if (!entry) return resume()
entry.on('end', unblockEntry)
if (dest) dest.add(entry)
else stream.emit('entry', entry)
})()
function resume () {
stream.removeListener('entry', saveEntry)
stream.removeListener('data', save)
stream.removeListener('end', save)
stream.pipe = orig
if (dest) stream.pipe(dest)
buf.forEach(function (b) {
if (b) stream.emit('data', b)
else stream.emit('end')
})
stream.resume()
}
return dest
}
})(stream.pipe)
}

View File

@@ -0,0 +1,252 @@
// A thing that emits "entry" events with Reader objects
// Pausing it causes it to stop emitting entry events, and also
// pauses the current entry if there is one.
module.exports = DirReader
var fs = require('graceful-fs')
var inherits = require('inherits')
var path = require('path')
var Reader = require('./reader.js')
var assert = require('assert').ok
inherits(DirReader, Reader)
function DirReader (props) {
var self = this
if (!(self instanceof DirReader)) {
throw new Error('DirReader must be called as constructor.')
}
// should already be established as a Directory type
if (props.type !== 'Directory' || !props.Directory) {
throw new Error('Non-directory type ' + props.type)
}
self.entries = null
self._index = -1
self._paused = false
self._length = -1
if (props.sort) {
this.sort = props.sort
}
Reader.call(this, props)
}
DirReader.prototype._getEntries = function () {
var self = this
// race condition. might pause() before calling _getEntries,
// and then resume, and try to get them a second time.
if (self._gotEntries) return
self._gotEntries = true
fs.readdir(self._path, function (er, entries) {
if (er) return self.error(er)
self.entries = entries
self.emit('entries', entries)
if (self._paused) self.once('resume', processEntries)
else processEntries()
function processEntries () {
self._length = self.entries.length
if (typeof self.sort === 'function') {
self.entries = self.entries.sort(self.sort.bind(self))
}
self._read()
}
})
}
// start walking the dir, and emit an "entry" event for each one.
DirReader.prototype._read = function () {
var self = this
if (!self.entries) return self._getEntries()
if (self._paused || self._currentEntry || self._aborted) {
// console.error('DR paused=%j, current=%j, aborted=%j', self._paused, !!self._currentEntry, self._aborted)
return
}
self._index++
if (self._index >= self.entries.length) {
if (!self._ended) {
self._ended = true
self.emit('end')
self.emit('close')
}
return
}
// ok, handle this one, then.
// save creating a proxy, by stat'ing the thing now.
var p = path.resolve(self._path, self.entries[self._index])
assert(p !== self._path)
assert(self.entries[self._index])
// set this to prevent trying to _read() again in the stat time.
self._currentEntry = p
fs[ self.props.follow ? 'stat' : 'lstat' ](p, function (er, stat) {
if (er) return self.error(er)
var who = self._proxy || self
stat.path = p
stat.basename = path.basename(p)
stat.dirname = path.dirname(p)
var childProps = self.getChildProps.call(who, stat)
childProps.path = p
childProps.basename = path.basename(p)
childProps.dirname = path.dirname(p)
var entry = Reader(childProps, stat)
// console.error("DR Entry", p, stat.size)
self._currentEntry = entry
// "entry" events are for direct entries in a specific dir.
// "child" events are for any and all children at all levels.
// This nomenclature is not completely final.
entry.on('pause', function (who) {
if (!self._paused && !entry._disowned) {
self.pause(who)
}
})
entry.on('resume', function (who) {
if (self._paused && !entry._disowned) {
self.resume(who)
}
})
entry.on('stat', function (props) {
self.emit('_entryStat', entry, props)
if (entry._aborted) return
if (entry._paused) {
entry.once('resume', function () {
self.emit('entryStat', entry, props)
})
} else self.emit('entryStat', entry, props)
})
entry.on('ready', function EMITCHILD () {
// console.error("DR emit child", entry._path)
if (self._paused) {
// console.error(" DR emit child - try again later")
// pause the child, and emit the "entry" event once we drain.
// console.error("DR pausing child entry")
entry.pause(self)
return self.once('resume', EMITCHILD)
}
// skip over sockets. they can't be piped around properly,
// so there's really no sense even acknowledging them.
// if someone really wants to see them, they can listen to
// the "socket" events.
if (entry.type === 'Socket') {
self.emit('socket', entry)
} else {
self.emitEntry(entry)
}
})
var ended = false
entry.on('close', onend)
entry.on('disown', onend)
function onend () {
if (ended) return
ended = true
self.emit('childEnd', entry)
self.emit('entryEnd', entry)
self._currentEntry = null
if (!self._paused) {
self._read()
}
}
// XXX Remove this. Works in node as of 0.6.2 or so.
// Long filenames should not break stuff.
entry.on('error', function (er) {
if (entry._swallowErrors) {
self.warn(er)
entry.emit('end')
entry.emit('close')
} else {
self.emit('error', er)
}
})
// proxy up some events.
;[
'child',
'childEnd',
'warn'
].forEach(function (ev) {
entry.on(ev, self.emit.bind(self, ev))
})
})
}
DirReader.prototype.disown = function (entry) {
entry.emit('beforeDisown')
entry._disowned = true
entry.parent = entry.root = null
if (entry === this._currentEntry) {
this._currentEntry = null
}
entry.emit('disown')
}
DirReader.prototype.getChildProps = function () {
return {
depth: this.depth + 1,
root: this.root || this,
parent: this,
follow: this.follow,
filter: this.filter,
sort: this.props.sort,
hardlinks: this.props.hardlinks
}
}
DirReader.prototype.pause = function (who) {
var self = this
if (self._paused) return
who = who || self
self._paused = true
if (self._currentEntry && self._currentEntry.pause) {
self._currentEntry.pause(who)
}
self.emit('pause', who)
}
DirReader.prototype.resume = function (who) {
var self = this
if (!self._paused) return
who = who || self
self._paused = false
// console.error('DR Emit Resume', self._path)
self.emit('resume', who)
if (self._paused) {
// console.error('DR Re-paused', self._path)
return
}
if (self._currentEntry) {
if (self._currentEntry.resume) self._currentEntry.resume(who)
} else self._read()
}
DirReader.prototype.emitEntry = function (entry) {
this.emit('entry', entry)
this.emit('child', entry)
}

View File

@@ -0,0 +1,174 @@
// It is expected that, when .add() returns false, the consumer
// of the DirWriter will pause until a "drain" event occurs. Note
// that this is *almost always going to be the case*, unless the
// thing being written is some sort of unsupported type, and thus
// skipped over.
module.exports = DirWriter
var Writer = require('./writer.js')
var inherits = require('inherits')
var mkdir = require('mkdirp')
var path = require('path')
var collect = require('./collect.js')
inherits(DirWriter, Writer)
function DirWriter (props) {
var self = this
if (!(self instanceof DirWriter)) {
self.error('DirWriter must be called as constructor.', null, true)
}
// should already be established as a Directory type
if (props.type !== 'Directory' || !props.Directory) {
self.error('Non-directory type ' + props.type + ' ' +
JSON.stringify(props), null, true)
}
Writer.call(this, props)
}
DirWriter.prototype._create = function () {
var self = this
mkdir(self._path, Writer.dirmode, function (er) {
if (er) return self.error(er)
// ready to start getting entries!
self.ready = true
self.emit('ready')
self._process()
})
}
// a DirWriter has an add(entry) method, but its .write() doesn't
// do anything. Why a no-op rather than a throw? Because this
// leaves open the door for writing directory metadata for
// gnu/solaris style dumpdirs.
DirWriter.prototype.write = function () {
return true
}
DirWriter.prototype.end = function () {
this._ended = true
this._process()
}
DirWriter.prototype.add = function (entry) {
var self = this
// console.error('\tadd', entry._path, '->', self._path)
collect(entry)
if (!self.ready || self._currentEntry) {
self._buffer.push(entry)
return false
}
// create a new writer, and pipe the incoming entry into it.
if (self._ended) {
return self.error('add after end')
}
self._buffer.push(entry)
self._process()
return this._buffer.length === 0
}
DirWriter.prototype._process = function () {
var self = this
// console.error('DW Process p=%j', self._processing, self.basename)
if (self._processing) return
var entry = self._buffer.shift()
if (!entry) {
// console.error("DW Drain")
self.emit('drain')
if (self._ended) self._finish()
return
}
self._processing = true
// console.error("DW Entry", entry._path)
self.emit('entry', entry)
// ok, add this entry
//
// don't allow recursive copying
var p = entry
var pp
do {
pp = p._path || p.path
if (pp === self.root._path || pp === self._path ||
(pp && pp.indexOf(self._path) === 0)) {
// console.error('DW Exit (recursive)', entry.basename, self._path)
self._processing = false
if (entry._collected) entry.pipe()
return self._process()
}
p = p.parent
} while (p)
// console.error("DW not recursive")
// chop off the entry's root dir, replace with ours
var props = {
parent: self,
root: self.root || self,
type: entry.type,
depth: self.depth + 1
}
pp = entry._path || entry.path || entry.props.path
if (entry.parent) {
pp = pp.substr(entry.parent._path.length + 1)
}
// get rid of any ../../ shenanigans
props.path = path.join(self.path, path.join('/', pp))
// if i have a filter, the child should inherit it.
props.filter = self.filter
// all the rest of the stuff, copy over from the source.
Object.keys(entry.props).forEach(function (k) {
if (!props.hasOwnProperty(k)) {
props[k] = entry.props[k]
}
})
// not sure at this point what kind of writer this is.
var child = self._currentChild = new Writer(props)
child.on('ready', function () {
// console.error("DW Child Ready", child.type, child._path)
// console.error(" resuming", entry._path)
entry.pipe(child)
entry.resume()
})
// XXX Make this work in node.
// Long filenames should not break stuff.
child.on('error', function (er) {
if (child._swallowErrors) {
self.warn(er)
child.emit('end')
child.emit('close')
} else {
self.emit('error', er)
}
})
// we fire _end internally *after* end, so that we don't move on
// until any "end" listeners have had their chance to do stuff.
child.on('close', onend)
var ended = false
function onend () {
if (ended) return
ended = true
// console.error("* DW Child end", child.basename)
self._currentChild = null
self._processing = false
self._process()
}
}

View File

@@ -0,0 +1,150 @@
// Basically just a wrapper around an fs.ReadStream
module.exports = FileReader
var fs = require('graceful-fs')
var inherits = require('inherits')
var Reader = require('./reader.js')
var EOF = {EOF: true}
var CLOSE = {CLOSE: true}
inherits(FileReader, Reader)
function FileReader (props) {
// console.error(" FR create", props.path, props.size, new Error().stack)
var self = this
if (!(self instanceof FileReader)) {
throw new Error('FileReader must be called as constructor.')
}
// should already be established as a File type
// XXX Todo: preserve hardlinks by tracking dev+inode+nlink,
// with a HardLinkReader class.
if (!((props.type === 'Link' && props.Link) ||
(props.type === 'File' && props.File))) {
throw new Error('Non-file type ' + props.type)
}
self._buffer = []
self._bytesEmitted = 0
Reader.call(self, props)
}
FileReader.prototype._getStream = function () {
var self = this
var stream = self._stream = fs.createReadStream(self._path, self.props)
if (self.props.blksize) {
stream.bufferSize = self.props.blksize
}
stream.on('open', self.emit.bind(self, 'open'))
stream.on('data', function (c) {
// console.error('\t\t%d %s', c.length, self.basename)
self._bytesEmitted += c.length
// no point saving empty chunks
if (!c.length) {
return
} else if (self._paused || self._buffer.length) {
self._buffer.push(c)
self._read()
} else self.emit('data', c)
})
stream.on('end', function () {
if (self._paused || self._buffer.length) {
// console.error('FR Buffering End', self._path)
self._buffer.push(EOF)
self._read()
} else {
self.emit('end')
}
if (self._bytesEmitted !== self.props.size) {
self.error("Didn't get expected byte count\n" +
'expect: ' + self.props.size + '\n' +
'actual: ' + self._bytesEmitted)
}
})
stream.on('close', function () {
if (self._paused || self._buffer.length) {
// console.error('FR Buffering Close', self._path)
self._buffer.push(CLOSE)
self._read()
} else {
// console.error('FR close 1', self._path)
self.emit('close')
}
})
stream.on('error', function (e) {
self.emit('error', e)
})
self._read()
}
FileReader.prototype._read = function () {
var self = this
// console.error('FR _read', self._path)
if (self._paused) {
// console.error('FR _read paused', self._path)
return
}
if (!self._stream) {
// console.error('FR _getStream calling', self._path)
return self._getStream()
}
// clear out the buffer, if there is one.
if (self._buffer.length) {
// console.error('FR _read has buffer', self._buffer.length, self._path)
var buf = self._buffer
for (var i = 0, l = buf.length; i < l; i++) {
var c = buf[i]
if (c === EOF) {
// console.error('FR Read emitting buffered end', self._path)
self.emit('end')
} else if (c === CLOSE) {
// console.error('FR Read emitting buffered close', self._path)
self.emit('close')
} else {
// console.error('FR Read emitting buffered data', self._path)
self.emit('data', c)
}
if (self._paused) {
// console.error('FR Read Re-pausing at '+i, self._path)
self._buffer = buf.slice(i)
return
}
}
self._buffer.length = 0
}
// console.error("FR _read done")
// that's about all there is to it.
}
FileReader.prototype.pause = function (who) {
var self = this
// console.error('FR Pause', self._path)
if (self._paused) return
who = who || self
self._paused = true
if (self._stream) self._stream.pause()
self.emit('pause', who)
}
FileReader.prototype.resume = function (who) {
var self = this
// console.error('FR Resume', self._path)
if (!self._paused) return
who = who || self
self.emit('resume', who)
self._paused = false
if (self._stream) self._stream.resume()
self._read()
}

View File

@@ -0,0 +1,107 @@
module.exports = FileWriter
var fs = require('graceful-fs')
var Writer = require('./writer.js')
var inherits = require('inherits')
var EOF = {}
inherits(FileWriter, Writer)
function FileWriter (props) {
var self = this
if (!(self instanceof FileWriter)) {
throw new Error('FileWriter must be called as constructor.')
}
// should already be established as a File type
if (props.type !== 'File' || !props.File) {
throw new Error('Non-file type ' + props.type)
}
self._buffer = []
self._bytesWritten = 0
Writer.call(this, props)
}
FileWriter.prototype._create = function () {
var self = this
if (self._stream) return
var so = {}
if (self.props.flags) so.flags = self.props.flags
so.mode = Writer.filemode
if (self._old && self._old.blksize) so.bufferSize = self._old.blksize
self._stream = fs.createWriteStream(self._path, so)
self._stream.on('open', function () {
// console.error("FW open", self._buffer, self._path)
self.ready = true
self._buffer.forEach(function (c) {
if (c === EOF) self._stream.end()
else self._stream.write(c)
})
self.emit('ready')
// give this a kick just in case it needs it.
self.emit('drain')
})
self._stream.on('error', function (er) { self.emit('error', er) })
self._stream.on('drain', function () { self.emit('drain') })
self._stream.on('close', function () {
// console.error('\n\nFW Stream Close', self._path, self.size)
self._finish()
})
}
FileWriter.prototype.write = function (c) {
var self = this
self._bytesWritten += c.length
if (!self.ready) {
if (!Buffer.isBuffer(c) && typeof c !== 'string') {
throw new Error('invalid write data')
}
self._buffer.push(c)
return false
}
var ret = self._stream.write(c)
// console.error('\t-- fw wrote, _stream says', ret, self._stream._queue.length)
// allow 2 buffered writes, because otherwise there's just too
// much stop and go bs.
if (ret === false && self._stream._queue) {
return self._stream._queue.length <= 2
} else {
return ret
}
}
FileWriter.prototype.end = function (c) {
var self = this
if (c) self.write(c)
if (!self.ready) {
self._buffer.push(EOF)
return false
}
return self._stream.end()
}
FileWriter.prototype._finish = function () {
var self = this
if (typeof self.size === 'number' && self._bytesWritten !== self.size) {
self.error(
'Did not get expected byte count.\n' +
'expect: ' + self.size + '\n' +
'actual: ' + self._bytesWritten)
}
Writer.prototype._finish.call(self)
}

View File

@@ -0,0 +1,33 @@
module.exports = getType
function getType (st) {
var types = [
'Directory',
'File',
'SymbolicLink',
'Link', // special for hardlinks from tarballs
'BlockDevice',
'CharacterDevice',
'FIFO',
'Socket'
]
var type
if (st.type && types.indexOf(st.type) !== -1) {
st[st.type] = true
return st.type
}
for (var i = 0, l = types.length; i < l; i++) {
type = types[i]
var is = st[type] || st['is' + type]
if (typeof is === 'function') is = is.call(st)
if (is) {
st[type] = true
st.type = type
return type
}
}
return null
}

View File

@@ -0,0 +1,53 @@
// Basically just a wrapper around an fs.readlink
//
// XXX: Enhance this to support the Link type, by keeping
// a lookup table of {<dev+inode>:<path>}, so that hardlinks
// can be preserved in tarballs.
module.exports = LinkReader
var fs = require('graceful-fs')
var inherits = require('inherits')
var Reader = require('./reader.js')
inherits(LinkReader, Reader)
function LinkReader (props) {
var self = this
if (!(self instanceof LinkReader)) {
throw new Error('LinkReader must be called as constructor.')
}
if (!((props.type === 'Link' && props.Link) ||
(props.type === 'SymbolicLink' && props.SymbolicLink))) {
throw new Error('Non-link type ' + props.type)
}
Reader.call(self, props)
}
// When piping a LinkReader into a LinkWriter, we have to
// already have the linkpath property set, so that has to
// happen *before* the "ready" event, which means we need to
// override the _stat method.
LinkReader.prototype._stat = function (currentStat) {
var self = this
fs.readlink(self._path, function (er, linkpath) {
if (er) return self.error(er)
self.linkpath = self.props.linkpath = linkpath
self.emit('linkpath', linkpath)
Reader.prototype._stat.call(self, currentStat)
})
}
LinkReader.prototype._read = function () {
var self = this
if (self._paused) return
// basically just a no-op, since we got all the info we need
// from the _stat method
if (!self._ended) {
self.emit('end')
self.emit('close')
self._ended = true
}
}

View File

@@ -0,0 +1,95 @@
module.exports = LinkWriter
var fs = require('graceful-fs')
var Writer = require('./writer.js')
var inherits = require('inherits')
var path = require('path')
var rimraf = require('rimraf')
inherits(LinkWriter, Writer)
function LinkWriter (props) {
var self = this
if (!(self instanceof LinkWriter)) {
throw new Error('LinkWriter must be called as constructor.')
}
// should already be established as a Link type
if (!((props.type === 'Link' && props.Link) ||
(props.type === 'SymbolicLink' && props.SymbolicLink))) {
throw new Error('Non-link type ' + props.type)
}
if (props.linkpath === '') props.linkpath = '.'
if (!props.linkpath) {
self.error('Need linkpath property to create ' + props.type)
}
Writer.call(this, props)
}
LinkWriter.prototype._create = function () {
// console.error(" LW _create")
var self = this
var hard = self.type === 'Link' || process.platform === 'win32'
var link = hard ? 'link' : 'symlink'
var lp = hard ? path.resolve(self.dirname, self.linkpath) : self.linkpath
// can only change the link path by clobbering
// For hard links, let's just assume that's always the case, since
// there's no good way to read them if we don't already know.
if (hard) return clobber(self, lp, link)
fs.readlink(self._path, function (er, p) {
// only skip creation if it's exactly the same link
if (p && p === lp) return finish(self)
clobber(self, lp, link)
})
}
function clobber (self, lp, link) {
rimraf(self._path, function (er) {
if (er) return self.error(er)
create(self, lp, link)
})
}
function create (self, lp, link) {
fs[link](lp, self._path, function (er) {
// if this is a hard link, and we're in the process of writing out a
// directory, it's very possible that the thing we're linking to
// doesn't exist yet (especially if it was intended as a symlink),
// so swallow ENOENT errors here and just soldier in.
// Additionally, an EPERM or EACCES can happen on win32 if it's trying
// to make a link to a directory. Again, just skip it.
// A better solution would be to have fs.symlink be supported on
// windows in some nice fashion.
if (er) {
if ((er.code === 'ENOENT' ||
er.code === 'EACCES' ||
er.code === 'EPERM') && process.platform === 'win32') {
self.ready = true
self.emit('ready')
self.emit('end')
self.emit('close')
self.end = self._finish = function () {}
} else return self.error(er)
}
finish(self)
})
}
function finish (self) {
self.ready = true
self.emit('ready')
if (self._ended && !self._finished) self._finish()
}
LinkWriter.prototype.end = function () {
// console.error("LW finish in end")
this._ended = true
if (this.ready) {
this._finished = true
this._finish()
}
}

View File

@@ -0,0 +1,95 @@
// A reader for when we don't yet know what kind of thing
// the thing is.
module.exports = ProxyReader
var Reader = require('./reader.js')
var getType = require('./get-type.js')
var inherits = require('inherits')
var fs = require('graceful-fs')
inherits(ProxyReader, Reader)
function ProxyReader (props) {
var self = this
if (!(self instanceof ProxyReader)) {
throw new Error('ProxyReader must be called as constructor.')
}
self.props = props
self._buffer = []
self.ready = false
Reader.call(self, props)
}
ProxyReader.prototype._stat = function () {
var self = this
var props = self.props
// stat the thing to see what the proxy should be.
var stat = props.follow ? 'stat' : 'lstat'
fs[stat](props.path, function (er, current) {
var type
if (er || !current) {
type = 'File'
} else {
type = getType(current)
}
props[type] = true
props.type = self.type = type
self._old = current
self._addProxy(Reader(props, current))
})
}
ProxyReader.prototype._addProxy = function (proxy) {
var self = this
if (self._proxyTarget) {
return self.error('proxy already set')
}
self._proxyTarget = proxy
proxy._proxy = self
;[
'error',
'data',
'end',
'close',
'linkpath',
'entry',
'entryEnd',
'child',
'childEnd',
'warn',
'stat'
].forEach(function (ev) {
// console.error('~~ proxy event', ev, self.path)
proxy.on(ev, self.emit.bind(self, ev))
})
self.emit('proxy', proxy)
proxy.on('ready', function () {
// console.error("~~ proxy is ready!", self.path)
self.ready = true
self.emit('ready')
})
var calls = self._buffer
self._buffer.length = 0
calls.forEach(function (c) {
proxy[c[0]].apply(proxy, c[1])
})
}
ProxyReader.prototype.pause = function () {
return this._proxyTarget ? this._proxyTarget.pause() : false
}
ProxyReader.prototype.resume = function () {
return this._proxyTarget ? this._proxyTarget.resume() : false
}

View File

@@ -0,0 +1,111 @@
// A writer for when we don't know what kind of thing
// the thing is. That is, it's not explicitly set,
// so we're going to make it whatever the thing already
// is, or "File"
//
// Until then, collect all events.
module.exports = ProxyWriter
var Writer = require('./writer.js')
var getType = require('./get-type.js')
var inherits = require('inherits')
var collect = require('./collect.js')
var fs = require('fs')
inherits(ProxyWriter, Writer)
function ProxyWriter (props) {
var self = this
if (!(self instanceof ProxyWriter)) {
throw new Error('ProxyWriter must be called as constructor.')
}
self.props = props
self._needDrain = false
Writer.call(self, props)
}
ProxyWriter.prototype._stat = function () {
var self = this
var props = self.props
// stat the thing to see what the proxy should be.
var stat = props.follow ? 'stat' : 'lstat'
fs[stat](props.path, function (er, current) {
var type
if (er || !current) {
type = 'File'
} else {
type = getType(current)
}
props[type] = true
props.type = self.type = type
self._old = current
self._addProxy(Writer(props, current))
})
}
ProxyWriter.prototype._addProxy = function (proxy) {
// console.error("~~ set proxy", this.path)
var self = this
if (self._proxy) {
return self.error('proxy already set')
}
self._proxy = proxy
;[
'ready',
'error',
'close',
'pipe',
'drain',
'warn'
].forEach(function (ev) {
proxy.on(ev, self.emit.bind(self, ev))
})
self.emit('proxy', proxy)
var calls = self._buffer
calls.forEach(function (c) {
// console.error("~~ ~~ proxy buffered call", c[0], c[1])
proxy[c[0]].apply(proxy, c[1])
})
self._buffer.length = 0
if (self._needsDrain) self.emit('drain')
}
ProxyWriter.prototype.add = function (entry) {
// console.error("~~ proxy add")
collect(entry)
if (!this._proxy) {
this._buffer.push(['add', [entry]])
this._needDrain = true
return false
}
return this._proxy.add(entry)
}
ProxyWriter.prototype.write = function (c) {
// console.error('~~ proxy write')
if (!this._proxy) {
this._buffer.push(['write', [c]])
this._needDrain = true
return false
}
return this._proxy.write(c)
}
ProxyWriter.prototype.end = function (c) {
// console.error('~~ proxy end')
if (!this._proxy) {
this._buffer.push(['end', [c]])
return false
}
return this._proxy.end(c)
}

View File

@@ -0,0 +1,255 @@
module.exports = Reader
var fs = require('graceful-fs')
var Stream = require('stream').Stream
var inherits = require('inherits')
var path = require('path')
var getType = require('./get-type.js')
var hardLinks = Reader.hardLinks = {}
var Abstract = require('./abstract.js')
// Must do this *before* loading the child classes
inherits(Reader, Abstract)
var LinkReader = require('./link-reader.js')
function Reader (props, currentStat) {
var self = this
if (!(self instanceof Reader)) return new Reader(props, currentStat)
if (typeof props === 'string') {
props = { path: props }
}
// polymorphism.
// call fstream.Reader(dir) to get a DirReader object, etc.
// Note that, unlike in the Writer case, ProxyReader is going
// to be the *normal* state of affairs, since we rarely know
// the type of a file prior to reading it.
var type
var ClassType
if (props.type && typeof props.type === 'function') {
type = props.type
ClassType = type
} else {
type = getType(props)
ClassType = Reader
}
if (currentStat && !type) {
type = getType(currentStat)
props[type] = true
props.type = type
}
switch (type) {
case 'Directory':
ClassType = require('./dir-reader.js')
break
case 'Link':
// XXX hard links are just files.
// However, it would be good to keep track of files' dev+inode
// and nlink values, and create a HardLinkReader that emits
// a linkpath value of the original copy, so that the tar
// writer can preserve them.
// ClassType = HardLinkReader
// break
case 'File':
ClassType = require('./file-reader.js')
break
case 'SymbolicLink':
ClassType = LinkReader
break
case 'Socket':
ClassType = require('./socket-reader.js')
break
case null:
ClassType = require('./proxy-reader.js')
break
}
if (!(self instanceof ClassType)) {
return new ClassType(props)
}
Abstract.call(self)
if (!props.path) {
self.error('Must provide a path', null, true)
}
self.readable = true
self.writable = false
self.type = type
self.props = props
self.depth = props.depth = props.depth || 0
self.parent = props.parent || null
self.root = props.root || (props.parent && props.parent.root) || self
self._path = self.path = path.resolve(props.path)
if (process.platform === 'win32') {
self.path = self._path = self.path.replace(/\?/g, '_')
if (self._path.length >= 260) {
// how DOES one create files on the moon?
// if the path has spaces in it, then UNC will fail.
self._swallowErrors = true
// if (self._path.indexOf(" ") === -1) {
self._path = '\\\\?\\' + self.path.replace(/\//g, '\\')
// }
}
}
self.basename = props.basename = path.basename(self.path)
self.dirname = props.dirname = path.dirname(self.path)
// these have served their purpose, and are now just noisy clutter
props.parent = props.root = null
// console.error("\n\n\n%s setting size to", props.path, props.size)
self.size = props.size
self.filter = typeof props.filter === 'function' ? props.filter : null
if (props.sort === 'alpha') props.sort = alphasort
// start the ball rolling.
// this will stat the thing, and then call self._read()
// to start reading whatever it is.
// console.error("calling stat", props.path, currentStat)
self._stat(currentStat)
}
function alphasort (a, b) {
return a === b ? 0
: a.toLowerCase() > b.toLowerCase() ? 1
: a.toLowerCase() < b.toLowerCase() ? -1
: a > b ? 1
: -1
}
Reader.prototype._stat = function (currentStat) {
var self = this
var props = self.props
var stat = props.follow ? 'stat' : 'lstat'
// console.error("Reader._stat", self._path, currentStat)
if (currentStat) process.nextTick(statCb.bind(null, null, currentStat))
else fs[stat](self._path, statCb)
function statCb (er, props_) {
// console.error("Reader._stat, statCb", self._path, props_, props_.nlink)
if (er) return self.error(er)
Object.keys(props_).forEach(function (k) {
props[k] = props_[k]
})
// if it's not the expected size, then abort here.
if (undefined !== self.size && props.size !== self.size) {
return self.error('incorrect size')
}
self.size = props.size
var type = getType(props)
var handleHardlinks = props.hardlinks !== false
// special little thing for handling hardlinks.
if (handleHardlinks && type !== 'Directory' && props.nlink && props.nlink > 1) {
var k = props.dev + ':' + props.ino
// console.error("Reader has nlink", self._path, k)
if (hardLinks[k] === self._path || !hardLinks[k]) {
hardLinks[k] = self._path
} else {
// switch into hardlink mode.
type = self.type = self.props.type = 'Link'
self.Link = self.props.Link = true
self.linkpath = self.props.linkpath = hardLinks[k]
// console.error("Hardlink detected, switching mode", self._path, self.linkpath)
// Setting __proto__ would arguably be the "correct"
// approach here, but that just seems too wrong.
self._stat = self._read = LinkReader.prototype._read
}
}
if (self.type && self.type !== type) {
self.error('Unexpected type: ' + type)
}
// if the filter doesn't pass, then just skip over this one.
// still have to emit end so that dir-walking can move on.
if (self.filter) {
var who = self._proxy || self
// special handling for ProxyReaders
if (!self.filter.call(who, who, props)) {
if (!self._disowned) {
self.abort()
self.emit('end')
self.emit('close')
}
return
}
}
// last chance to abort or disown before the flow starts!
var events = ['_stat', 'stat', 'ready']
var e = 0
;(function go () {
if (self._aborted) {
self.emit('end')
self.emit('close')
return
}
if (self._paused && self.type !== 'Directory') {
self.once('resume', go)
return
}
var ev = events[e++]
if (!ev) {
return self._read()
}
self.emit(ev, props)
go()
})()
}
}
Reader.prototype.pipe = function (dest) {
var self = this
if (typeof dest.add === 'function') {
// piping to a multi-compatible, and we've got directory entries.
self.on('entry', function (entry) {
var ret = dest.add(entry)
if (ret === false) {
self.pause()
}
})
}
// console.error("R Pipe apply Stream Pipe")
return Stream.prototype.pipe.apply(this, arguments)
}
Reader.prototype.pause = function (who) {
this._paused = true
who = who || this
this.emit('pause', who)
if (this._stream) this._stream.pause(who)
}
Reader.prototype.resume = function (who) {
this._paused = false
who = who || this
this.emit('resume', who)
if (this._stream) this._stream.resume(who)
this._read()
}
Reader.prototype._read = function () {
this.error('Cannot read unknown type: ' + this.type)
}

View File

@@ -0,0 +1,36 @@
// Just get the stats, and then don't do anything.
// You can't really "read" from a socket. You "connect" to it.
// Mostly, this is here so that reading a dir with a socket in it
// doesn't blow up.
module.exports = SocketReader
var inherits = require('inherits')
var Reader = require('./reader.js')
inherits(SocketReader, Reader)
function SocketReader (props) {
var self = this
if (!(self instanceof SocketReader)) {
throw new Error('SocketReader must be called as constructor.')
}
if (!(props.type === 'Socket' && props.Socket)) {
throw new Error('Non-socket type ' + props.type)
}
Reader.call(self, props)
}
SocketReader.prototype._read = function () {
var self = this
if (self._paused) return
// basically just a no-op, since we got all the info we have
// from the _stat method
if (!self._ended) {
self.emit('end')
self.emit('close')
self._ended = true
}
}

View File

@@ -0,0 +1,390 @@
module.exports = Writer
var fs = require('graceful-fs')
var inherits = require('inherits')
var rimraf = require('rimraf')
var mkdir = require('mkdirp')
var path = require('path')
var umask = process.platform === 'win32' ? 0 : process.umask()
var getType = require('./get-type.js')
var Abstract = require('./abstract.js')
// Must do this *before* loading the child classes
inherits(Writer, Abstract)
Writer.dirmode = parseInt('0777', 8) & (~umask)
Writer.filemode = parseInt('0666', 8) & (~umask)
var DirWriter = require('./dir-writer.js')
var LinkWriter = require('./link-writer.js')
var FileWriter = require('./file-writer.js')
var ProxyWriter = require('./proxy-writer.js')
// props is the desired state. current is optionally the current stat,
// provided here so that subclasses can avoid statting the target
// more than necessary.
function Writer (props, current) {
var self = this
if (typeof props === 'string') {
props = { path: props }
}
// polymorphism.
// call fstream.Writer(dir) to get a DirWriter object, etc.
var type = getType(props)
var ClassType = Writer
switch (type) {
case 'Directory':
ClassType = DirWriter
break
case 'File':
ClassType = FileWriter
break
case 'Link':
case 'SymbolicLink':
ClassType = LinkWriter
break
case null:
default:
// Don't know yet what type to create, so we wrap in a proxy.
ClassType = ProxyWriter
break
}
if (!(self instanceof ClassType)) return new ClassType(props)
// now get down to business.
Abstract.call(self)
if (!props.path) self.error('Must provide a path', null, true)
// props is what we want to set.
// set some convenience properties as well.
self.type = props.type
self.props = props
self.depth = props.depth || 0
self.clobber = props.clobber === false ? props.clobber : true
self.parent = props.parent || null
self.root = props.root || (props.parent && props.parent.root) || self
self._path = self.path = path.resolve(props.path)
if (process.platform === 'win32') {
self.path = self._path = self.path.replace(/\?/g, '_')
if (self._path.length >= 260) {
self._swallowErrors = true
self._path = '\\\\?\\' + self.path.replace(/\//g, '\\')
}
}
self.basename = path.basename(props.path)
self.dirname = path.dirname(props.path)
self.linkpath = props.linkpath || null
props.parent = props.root = null
// console.error("\n\n\n%s setting size to", props.path, props.size)
self.size = props.size
if (typeof props.mode === 'string') {
props.mode = parseInt(props.mode, 8)
}
self.readable = false
self.writable = true
// buffer until ready, or while handling another entry
self._buffer = []
self.ready = false
self.filter = typeof props.filter === 'function' ? props.filter : null
// start the ball rolling.
// this checks what's there already, and then calls
// self._create() to call the impl-specific creation stuff.
self._stat(current)
}
// Calling this means that it's something we can't create.
// Just assert that it's already there, otherwise raise a warning.
Writer.prototype._create = function () {
var self = this
fs[self.props.follow ? 'stat' : 'lstat'](self._path, function (er) {
if (er) {
return self.warn('Cannot create ' + self._path + '\n' +
'Unsupported type: ' + self.type, 'ENOTSUP')
}
self._finish()
})
}
Writer.prototype._stat = function (current) {
var self = this
var props = self.props
var stat = props.follow ? 'stat' : 'lstat'
var who = self._proxy || self
if (current) statCb(null, current)
else fs[stat](self._path, statCb)
function statCb (er, current) {
if (self.filter && !self.filter.call(who, who, current)) {
self._aborted = true
self.emit('end')
self.emit('close')
return
}
// if it's not there, great. We'll just create it.
// if it is there, then we'll need to change whatever differs
if (er || !current) {
return create(self)
}
self._old = current
var currentType = getType(current)
// if it's a type change, then we need to clobber or error.
// if it's not a type change, then let the impl take care of it.
if (currentType !== self.type || self.type === 'File' && current.nlink > 1) {
return rimraf(self._path, function (er) {
if (er) return self.error(er)
self._old = null
create(self)
})
}
// otherwise, just handle in the app-specific way
// this creates a fs.WriteStream, or mkdir's, or whatever
create(self)
}
}
function create (self) {
// console.error("W create", self._path, Writer.dirmode)
// XXX Need to clobber non-dirs that are in the way,
// unless { clobber: false } in the props.
mkdir(path.dirname(self._path), Writer.dirmode, function (er, made) {
// console.error("W created", path.dirname(self._path), er)
if (er) return self.error(er)
// later on, we have to set the mode and owner for these
self._madeDir = made
return self._create()
})
}
function endChmod (self, want, current, path, cb) {
var wantMode = want.mode
var chmod = want.follow || self.type !== 'SymbolicLink'
? 'chmod' : 'lchmod'
if (!fs[chmod]) return cb()
if (typeof wantMode !== 'number') return cb()
var curMode = current.mode & parseInt('0777', 8)
wantMode = wantMode & parseInt('0777', 8)
if (wantMode === curMode) return cb()
fs[chmod](path, wantMode, cb)
}
function endChown (self, want, current, path, cb) {
// Don't even try it unless root. Too easy to EPERM.
if (process.platform === 'win32') return cb()
if (!process.getuid || process.getuid() !== 0) return cb()
if (typeof want.uid !== 'number' &&
typeof want.gid !== 'number') return cb()
if (current.uid === want.uid &&
current.gid === want.gid) return cb()
var chown = (self.props.follow || self.type !== 'SymbolicLink')
? 'chown' : 'lchown'
if (!fs[chown]) return cb()
if (typeof want.uid !== 'number') want.uid = current.uid
if (typeof want.gid !== 'number') want.gid = current.gid
fs[chown](path, want.uid, want.gid, cb)
}
function endUtimes (self, want, current, path, cb) {
if (!fs.utimes || process.platform === 'win32') return cb()
var utimes = (want.follow || self.type !== 'SymbolicLink')
? 'utimes' : 'lutimes'
if (utimes === 'lutimes' && !fs[utimes]) {
utimes = 'utimes'
}
if (!fs[utimes]) return cb()
var curA = current.atime
var curM = current.mtime
var meA = want.atime
var meM = want.mtime
if (meA === undefined) meA = curA
if (meM === undefined) meM = curM
if (!isDate(meA)) meA = new Date(meA)
if (!isDate(meM)) meA = new Date(meM)
if (meA.getTime() === curA.getTime() &&
meM.getTime() === curM.getTime()) return cb()
fs[utimes](path, meA, meM, cb)
}
// XXX This function is beastly. Break it up!
Writer.prototype._finish = function () {
var self = this
if (self._finishing) return
self._finishing = true
// console.error(" W Finish", self._path, self.size)
// set up all the things.
// At this point, we're already done writing whatever we've gotta write,
// adding files to the dir, etc.
var todo = 0
var errState = null
var done = false
if (self._old) {
// the times will almost *certainly* have changed.
// adds the utimes syscall, but remove another stat.
self._old.atime = new Date(0)
self._old.mtime = new Date(0)
// console.error(" W Finish Stale Stat", self._path, self.size)
setProps(self._old)
} else {
var stat = self.props.follow ? 'stat' : 'lstat'
// console.error(" W Finish Stating", self._path, self.size)
fs[stat](self._path, function (er, current) {
// console.error(" W Finish Stated", self._path, self.size, current)
if (er) {
// if we're in the process of writing out a
// directory, it's very possible that the thing we're linking to
// doesn't exist yet (especially if it was intended as a symlink),
// so swallow ENOENT errors here and just soldier on.
if (er.code === 'ENOENT' &&
(self.type === 'Link' || self.type === 'SymbolicLink') &&
process.platform === 'win32') {
self.ready = true
self.emit('ready')
self.emit('end')
self.emit('close')
self.end = self._finish = function () {}
return
} else return self.error(er)
}
setProps(self._old = current)
})
}
return
function setProps (current) {
todo += 3
endChmod(self, self.props, current, self._path, next('chmod'))
endChown(self, self.props, current, self._path, next('chown'))
endUtimes(self, self.props, current, self._path, next('utimes'))
}
function next (what) {
return function (er) {
// console.error(" W Finish", what, todo)
if (errState) return
if (er) {
er.fstream_finish_call = what
return self.error(errState = er)
}
if (--todo > 0) return
if (done) return
done = true
// we may still need to set the mode/etc. on some parent dirs
// that were created previously. delay end/close until then.
if (!self._madeDir) return end()
else endMadeDir(self, self._path, end)
function end (er) {
if (er) {
er.fstream_finish_call = 'setupMadeDir'
return self.error(er)
}
// all the props have been set, so we're completely done.
self.emit('end')
self.emit('close')
}
}
}
}
function endMadeDir (self, p, cb) {
var made = self._madeDir
// everything *between* made and path.dirname(self._path)
// needs to be set up. Note that this may just be one dir.
var d = path.dirname(p)
endMadeDir_(self, d, function (er) {
if (er) return cb(er)
if (d === made) {
return cb()
}
endMadeDir(self, d, cb)
})
}
function endMadeDir_ (self, p, cb) {
var dirProps = {}
Object.keys(self.props).forEach(function (k) {
dirProps[k] = self.props[k]
// only make non-readable dirs if explicitly requested.
if (k === 'mode' && self.type !== 'Directory') {
dirProps[k] = dirProps[k] | parseInt('0111', 8)
}
})
var todo = 3
var errState = null
fs.stat(p, function (er, current) {
if (er) return cb(errState = er)
endChmod(self, dirProps, current, p, next)
endChown(self, dirProps, current, p, next)
endUtimes(self, dirProps, current, p, next)
})
function next (er) {
if (errState) return
if (er) return cb(errState = er)
if (--todo === 0) return cb()
}
}
Writer.prototype.pipe = function () {
this.error("Can't pipe from writable stream")
}
Writer.prototype.add = function () {
this.error("Can't add to non-Directory type")
}
Writer.prototype.write = function () {
return true
}
function objectToString (d) {
return Object.prototype.toString.call(d)
}
function isDate (d) {
return typeof d === 'object' && objectToString(d) === '[object Date]'
}

View File

@@ -0,0 +1,64 @@
{
"_args": [
[
"fstream@1.0.12",
"/Users/paul/src/codeartisans/gradle-command-action"
]
],
"_from": "fstream@1.0.12",
"_id": "fstream@1.0.12",
"_inBundle": false,
"_integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==",
"_location": "/unzipper/fstream",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "fstream@1.0.12",
"name": "fstream",
"escapedName": "fstream",
"rawSpec": "1.0.12",
"saveSpec": null,
"fetchSpec": "1.0.12"
},
"_requiredBy": [
"/unzipper"
],
"_resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.12.tgz",
"_spec": "1.0.12",
"_where": "/Users/paul/src/codeartisans/gradle-command-action",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/npm/fstream/issues"
},
"dependencies": {
"graceful-fs": "^4.1.2",
"inherits": "~2.0.0",
"mkdirp": ">=0.5 0",
"rimraf": "2"
},
"description": "Advanced file system stream things",
"devDependencies": {
"standard": "^4.0.0",
"tap": "^1.2.0"
},
"engines": {
"node": ">=0.6"
},
"homepage": "https://github.com/npm/fstream#readme",
"license": "ISC",
"main": "fstream.js",
"name": "fstream",
"repository": {
"type": "git",
"url": "git+https://github.com/npm/fstream.git"
},
"scripts": {
"test": "standard && tap examples/*.js"
},
"version": "1.0.12"
}

View File

@@ -0,0 +1,55 @@
sudo: false
language: node_js
before_install:
- npm install -g npm@2
- test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g
notifications:
email: false
matrix:
fast_finish: true
include:
- node_js: '0.8'
env:
- TASK=test
- NPM_LEGACY=true
- node_js: '0.10'
env:
- TASK=test
- NPM_LEGACY=true
- node_js: '0.11'
env:
- TASK=test
- NPM_LEGACY=true
- node_js: '0.12'
env:
- TASK=test
- NPM_LEGACY=true
- node_js: 1
env:
- TASK=test
- NPM_LEGACY=true
- node_js: 2
env:
- TASK=test
- NPM_LEGACY=true
- node_js: 3
env:
- TASK=test
- NPM_LEGACY=true
- node_js: 4
env: TASK=test
- node_js: 5
env: TASK=test
- node_js: 6
env: TASK=test
- node_js: 7
env: TASK=test
- node_js: 8
env: TASK=test
- node_js: 9
env: TASK=test
script: "npm run $TASK"
env:
global:
- secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc=
- secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI=

View File

@@ -0,0 +1,38 @@
# Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
* (a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
* (b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
* (c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
* (d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.
## Moderation Policy
The [Node.js Moderation Policy] applies to this WG.
## Code of Conduct
The [Node.js Code of Conduct][] applies to this WG.
[Node.js Code of Conduct]:
https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md
[Node.js Moderation Policy]:
https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md

View File

@@ -0,0 +1,136 @@
### Streams Working Group
The Node.js Streams is jointly governed by a Working Group
(WG)
that is responsible for high-level guidance of the project.
The WG has final authority over this project including:
* Technical direction
* Project governance and process (including this policy)
* Contribution policy
* GitHub repository hosting
* Conduct guidelines
* Maintaining the list of additional Collaborators
For the current list of WG members, see the project
[README.md](./README.md#current-project-team-members).
### Collaborators
The readable-stream GitHub repository is
maintained by the WG and additional Collaborators who are added by the
WG on an ongoing basis.
Individuals making significant and valuable contributions are made
Collaborators and given commit-access to the project. These
individuals are identified by the WG and their addition as
Collaborators is discussed during the WG meeting.
_Note:_ If you make a significant contribution and are not considered
for commit-access log an issue or contact a WG member directly and it
will be brought up in the next WG meeting.
Modifications of the contents of the readable-stream repository are
made on
a collaborative basis. Anybody with a GitHub account may propose a
modification via pull request and it will be considered by the project
Collaborators. All pull requests must be reviewed and accepted by a
Collaborator with sufficient expertise who is able to take full
responsibility for the change. In the case of pull requests proposed
by an existing Collaborator, an additional Collaborator is required
for sign-off. Consensus should be sought if additional Collaborators
participate and there is disagreement around a particular
modification. See _Consensus Seeking Process_ below for further detail
on the consensus model used for governance.
Collaborators may opt to elevate significant or controversial
modifications, or modifications that have not found consensus to the
WG for discussion by assigning the ***WG-agenda*** tag to a pull
request or issue. The WG should serve as the final arbiter where
required.
For the current list of Collaborators, see the project
[README.md](./README.md#members).
### WG Membership
WG seats are not time-limited. There is no fixed size of the WG.
However, the expected target is between 6 and 12, to ensure adequate
coverage of important areas of expertise, balanced with the ability to
make decisions efficiently.
There is no specific set of requirements or qualifications for WG
membership beyond these rules.
The WG may add additional members to the WG by unanimous consensus.
A WG member may be removed from the WG by voluntary resignation, or by
unanimous consensus of all other WG members.
Changes to WG membership should be posted in the agenda, and may be
suggested as any other agenda item (see "WG Meetings" below).
If an addition or removal is proposed during a meeting, and the full
WG is not in attendance to participate, then the addition or removal
is added to the agenda for the subsequent meeting. This is to ensure
that all members are given the opportunity to participate in all
membership decisions. If a WG member is unable to attend a meeting
where a planned membership decision is being made, then their consent
is assumed.
No more than 1/3 of the WG members may be affiliated with the same
employer. If removal or resignation of a WG member, or a change of
employment by a WG member, creates a situation where more than 1/3 of
the WG membership shares an employer, then the situation must be
immediately remedied by the resignation or removal of one or more WG
members affiliated with the over-represented employer(s).
### WG Meetings
The WG meets occasionally on a Google Hangout On Air. A designated moderator
approved by the WG runs the meeting. Each meeting should be
published to YouTube.
Items are added to the WG agenda that are considered contentious or
are modifications of governance, contribution policy, WG membership,
or release process.
The intention of the agenda is not to approve or review all patches;
that should happen continuously on GitHub and be handled by the larger
group of Collaborators.
Any community member or contributor can ask that something be added to
the next meeting's agenda by logging a GitHub Issue. Any Collaborator,
WG member or the moderator can add the item to the agenda by adding
the ***WG-agenda*** tag to the issue.
Prior to each WG meeting the moderator will share the Agenda with
members of the WG. WG members can add any items they like to the
agenda at the beginning of each meeting. The moderator and the WG
cannot veto or remove items.
The WG may invite persons or representatives from certain projects to
participate in a non-voting capacity.
The moderator is responsible for summarizing the discussion of each
agenda item and sends it as a pull request after the meeting.
### Consensus Seeking Process
The WG follows a
[Consensus
Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making)
decision-making model.
When an agenda item has appeared to reach a consensus the moderator
will ask "Does anyone object?" as a final call for dissent from the
consensus.
If an agenda item cannot reach a consensus a WG member can call for
either a closing vote or a vote to table the issue to the next
meeting. The call for a vote must be seconded by a majority of the WG
or else the discussion will continue. Simple majority wins.
Note that changes to WG membership require a majority consensus. See
"WG Membership" above.

View File

@@ -0,0 +1,47 @@
Node.js is licensed for use as follows:
"""
Copyright Node.js contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
This license applies to parts of Node.js originating from the
https://github.com/joyent/node repository:
"""
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""

View File

@@ -0,0 +1,58 @@
# readable-stream
***Node-core v8.11.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream)
[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/)
[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/)
[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream)
```bash
npm install --save readable-stream
```
***Node-core streams for userland***
This package is a mirror of the Streams2 and Streams3 implementations in
Node-core.
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.11.1/docs/api/stream.html).
If you want to guarantee a stable streams base, regardless of what version of
Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
As of version 2.0.0 **readable-stream** uses semantic versioning.
# Streams Working Group
`readable-stream` is maintained by the Streams Working Group, which
oversees the development and maintenance of the Streams API within
Node.js. The responsibilities of the Streams Working Group include:
* Addressing stream issues on the Node.js issue tracker.
* Authoring and editing stream documentation within the Node.js project.
* Reviewing changes to stream subclasses within the Node.js project.
* Redirecting changes to streams from the Node.js project to this
project.
* Assisting in the implementation of stream providers within Node.js.
* Recommending versions of `readable-stream` to be included in Node.js.
* Messaging about the future of streams to give the community advance
notice of changes.
<a name="members"></a>
## Team Members
* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) &lt;christopher.s.dickinson@gmail.com&gt;
- Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B
* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) &lt;calvin.metcalf@gmail.com&gt;
- Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242
* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) &lt;rod@vagg.org&gt;
- Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D
* **Sam Newman** ([@sonewman](https://github.com/sonewman)) &lt;newmansam@outlook.com&gt;
* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) &lt;mathiasbuus@gmail.com&gt;
* **Domenic Denicola** ([@domenic](https://github.com/domenic)) &lt;d@domenic.me&gt;
* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) &lt;matteo.collina@gmail.com&gt;
- Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) &lt;shestak.irina@gmail.com&gt;

View File

@@ -0,0 +1,60 @@
# streams WG Meeting 2015-01-30
## Links
* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg
* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106
* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/
## Agenda
Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting.
* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105)
* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101)
* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102)
* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99)
## Minutes
### adopt a charter
* group: +1's all around
### What versioning scheme should be adopted?
* group: +1s 3.0.0
* domenic+group: pulling in patches from other sources where appropriate
* mikeal: version independently, suggesting versions for io.js
* mikeal+domenic: work with TC to notify in advance of changes
simpler stream creation
### streamline creation of streams
* sam: streamline creation of streams
* domenic: nice simple solution posted
but, we lose the opportunity to change the model
may not be backwards incompatible (double check keys)
**action item:** domenic will check
### remove implicit flowing of streams on(data)
* add isFlowing / isPaused
* mikeal: worrying that were documenting polyfill methods confuses users
* domenic: more reflective API is probably good, with warning labels for users
* new section for mad scientists (reflective stream access)
* calvin: name the “third state”
* mikeal: maybe borrow the name from whatwg?
* domenic: were missing the “third state”
* consensus: kind of difficult to name the third state
* mikeal: figure out differences in states / compat
* mathias: always flow on data eliminates third state
* explore what it breaks
**action items:**
* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream)
* ask rod/build for infrastructure
* **chris**: explore the “flow on data” approach
* add isPaused/isFlowing
* add new docs section
* move isPaused to that section

View File

@@ -0,0 +1 @@
module.exports = require('./lib/_stream_duplex.js');

View File

@@ -0,0 +1 @@
module.exports = require('./readable').Duplex

View File

@@ -0,0 +1,131 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
'use strict';
/*<replacement>*/
var pna = require('process-nextick-args');
/*</replacement>*/
/*<replacement>*/
var objectKeys = Object.keys || function (obj) {
var keys = [];
for (var key in obj) {
keys.push(key);
}return keys;
};
/*</replacement>*/
module.exports = Duplex;
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
var Readable = require('./_stream_readable');
var Writable = require('./_stream_writable');
util.inherits(Duplex, Readable);
{
// avoid scope creep, the keys array can then be collected
var keys = objectKeys(Writable.prototype);
for (var v = 0; v < keys.length; v++) {
var method = keys[v];
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
}
}
function Duplex(options) {
if (!(this instanceof Duplex)) return new Duplex(options);
Readable.call(this, options);
Writable.call(this, options);
if (options && options.readable === false) this.readable = false;
if (options && options.writable === false) this.writable = false;
this.allowHalfOpen = true;
if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
this.once('end', onend);
}
Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function () {
return this._writableState.highWaterMark;
}
});
// the no-half-open enforcer
function onend() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
if (this.allowHalfOpen || this._writableState.ended) return;
// no more data can be written.
// But allow more writes to happen in this tick.
pna.nextTick(onEndNT, this);
}
function onEndNT(self) {
self.end();
}
Object.defineProperty(Duplex.prototype, 'destroyed', {
get: function () {
if (this._readableState === undefined || this._writableState === undefined) {
return false;
}
return this._readableState.destroyed && this._writableState.destroyed;
},
set: function (value) {
// we ignore the value if the stream
// has not been initialized yet
if (this._readableState === undefined || this._writableState === undefined) {
return;
}
// backward compatibility, the user is explicitly
// managing destroyed
this._readableState.destroyed = value;
this._writableState.destroyed = value;
}
});
Duplex.prototype._destroy = function (err, cb) {
this.push(null);
this.end();
pna.nextTick(cb, err);
};

View File

@@ -0,0 +1,47 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
'use strict';
module.exports = PassThrough;
var Transform = require('./_stream_transform');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(PassThrough, Transform);
function PassThrough(options) {
if (!(this instanceof PassThrough)) return new PassThrough(options);
Transform.call(this, options);
}
PassThrough.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
};

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,214 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
'use strict';
module.exports = Transform;
var Duplex = require('./_stream_duplex');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(Transform, Duplex);
function afterTransform(er, data) {
var ts = this._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (!cb) {
return this.emit('error', new Error('write callback called multiple times'));
}
ts.writechunk = null;
ts.writecb = null;
if (data != null) // single equals check for both `null` and `undefined`
this.push(data);
cb(er);
var rs = this._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
this._read(rs.highWaterMark);
}
}
function Transform(options) {
if (!(this instanceof Transform)) return new Transform(options);
Duplex.call(this, options);
this._transformState = {
afterTransform: afterTransform.bind(this),
needTransform: false,
transforming: false,
writecb: null,
writechunk: null,
writeencoding: null
};
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
if (options) {
if (typeof options.transform === 'function') this._transform = options.transform;
if (typeof options.flush === 'function') this._flush = options.flush;
}
// When the writable side finishes, then flush out anything remaining.
this.on('prefinish', prefinish);
}
function prefinish() {
var _this = this;
if (typeof this._flush === 'function') {
this._flush(function (er, data) {
done(_this, er, data);
});
} else {
done(this, null, null);
}
}
Transform.prototype.push = function (chunk, encoding) {
this._transformState.needTransform = false;
return Duplex.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform.prototype._transform = function (chunk, encoding, cb) {
throw new Error('_transform() is not implemented');
};
Transform.prototype._write = function (chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform.prototype._read = function (n) {
var ts = this._transformState;
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
Transform.prototype._destroy = function (err, cb) {
var _this2 = this;
Duplex.prototype._destroy.call(this, err, function (err2) {
cb(err2);
_this2.emit('close');
});
};
function done(stream, er, data) {
if (er) return stream.emit('error', er);
if (data != null) // single equals check for both `null` and `undefined`
stream.push(data);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');
if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');
return stream.push(null);
}

View File

@@ -0,0 +1,687 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// A bit simpler than readable streams.
// Implement an async ._write(chunk, encoding, cb), and it'll handle all
// the drain event emission and buffering.
'use strict';
/*<replacement>*/
var pna = require('process-nextick-args');
/*</replacement>*/
module.exports = Writable;
/* <replacement> */
function WriteReq(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
this.next = null;
}
// It seems a linked list but it is not
// there will be only 2 of these for each stream
function CorkedRequest(state) {
var _this = this;
this.next = null;
this.entry = null;
this.finish = function () {
onCorkedFinish(_this, state);
};
}
/* </replacement> */
/*<replacement>*/
var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;
/*</replacement>*/
/*<replacement>*/
var Duplex;
/*</replacement>*/
Writable.WritableState = WritableState;
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
/*<replacement>*/
var internalUtil = {
deprecate: require('util-deprecate')
};
/*</replacement>*/
/*<replacement>*/
var Stream = require('./internal/streams/stream');
/*</replacement>*/
/*<replacement>*/
var Buffer = require('safe-buffer').Buffer;
var OurUint8Array = global.Uint8Array || function () {};
function _uint8ArrayToBuffer(chunk) {
return Buffer.from(chunk);
}
function _isUint8Array(obj) {
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
}
/*</replacement>*/
var destroyImpl = require('./internal/streams/destroy');
util.inherits(Writable, Stream);
function nop() {}
function WritableState(options, stream) {
Duplex = Duplex || require('./_stream_duplex');
options = options || {};
// Duplex streams are both readable and writable, but share
// the same options object.
// However, some cases require setting options to different
// values for the readable and the writable sides of the duplex stream.
// These options can be provided separately as readableXXX and writableXXX.
var isDuplex = stream instanceof Duplex;
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
var writableHwm = options.writableHighWaterMark;
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;
// cast to ints.
this.highWaterMark = Math.floor(this.highWaterMark);
// if _final has been called
this.finalCalled = false;
// drain event flag.
this.needDrain = false;
// at the start of calling end()
this.ending = false;
// when end() has been called, and returned
this.ended = false;
// when 'finish' is emitted
this.finished = false;
// has it been destroyed
this.destroyed = false;
// should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0;
// a flag to see when we're in the middle of a write.
this.writing = false;
// when true all writes will be buffered until .uncork() call
this.corked = 0;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
this.onwrite = function (er) {
onwrite(stream, er);
};
// the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null;
// the amount that is being written when _write is called.
this.writelen = 0;
this.bufferedRequest = null;
this.lastBufferedRequest = null;
// number of pending user-supplied write callbacks
// this must be 0 before 'finish' can be emitted
this.pendingcb = 0;
// emit prefinish if the only thing we're waiting for is _write cbs
// This is relevant for synchronous Transform streams
this.prefinished = false;
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
// count buffered requests
this.bufferedRequestCount = 0;
// allocate the first CorkedRequest, there is always
// one allocated and free to use, and we maintain at most two
this.corkedRequestsFree = new CorkedRequest(this);
}
WritableState.prototype.getBuffer = function getBuffer() {
var current = this.bufferedRequest;
var out = [];
while (current) {
out.push(current);
current = current.next;
}
return out;
};
(function () {
try {
Object.defineProperty(WritableState.prototype, 'buffer', {
get: internalUtil.deprecate(function () {
return this.getBuffer();
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
});
} catch (_) {}
})();
// Test _writableState for inheritance to account for Duplex streams,
// whose prototype chain only points to Readable.
var realHasInstance;
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
realHasInstance = Function.prototype[Symbol.hasInstance];
Object.defineProperty(Writable, Symbol.hasInstance, {
value: function (object) {
if (realHasInstance.call(this, object)) return true;
if (this !== Writable) return false;
return object && object._writableState instanceof WritableState;
}
});
} else {
realHasInstance = function (object) {
return object instanceof this;
};
}
function Writable(options) {
Duplex = Duplex || require('./_stream_duplex');
// Writable ctor is applied to Duplexes, too.
// `realHasInstance` is necessary because using plain `instanceof`
// would return false, as no `_writableState` property is attached.
// Trying to use the custom `instanceof` for Writable here will also break the
// Node.js LazyTransform implementation, which has a non-trivial getter for
// `_writableState` that would lead to infinite recursion.
if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {
return new Writable(options);
}
this._writableState = new WritableState(options, this);
// legacy.
this.writable = true;
if (options) {
if (typeof options.write === 'function') this._write = options.write;
if (typeof options.writev === 'function') this._writev = options.writev;
if (typeof options.destroy === 'function') this._destroy = options.destroy;
if (typeof options.final === 'function') this._final = options.final;
}
Stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
Writable.prototype.pipe = function () {
this.emit('error', new Error('Cannot pipe, not readable'));
};
function writeAfterEnd(stream, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
pna.nextTick(cb, er);
}
// Checks that a user-supplied chunk is valid, especially for the particular
// mode the stream is in. Currently this means that `null` is never accepted
// and undefined/non-string values are only allowed in object mode.
function validChunk(stream, state, chunk, cb) {
var valid = true;
var er = false;
if (chunk === null) {
er = new TypeError('May not write null values to stream');
} else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
if (er) {
stream.emit('error', er);
pna.nextTick(cb, er);
valid = false;
}
return valid;
}
Writable.prototype.write = function (chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
var isBuf = !state.objectMode && _isUint8Array(chunk);
if (isBuf && !Buffer.isBuffer(chunk)) {
chunk = _uint8ArrayToBuffer(chunk);
}
if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
if (typeof cb !== 'function') cb = nop;
if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
state.pendingcb++;
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
}
return ret;
};
Writable.prototype.cork = function () {
var state = this._writableState;
state.corked++;
};
Writable.prototype.uncork = function () {
var state = this._writableState;
if (state.corked) {
state.corked--;
if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
}
};
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
// node::ParseEncoding() requires lower case.
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
this._writableState.defaultEncoding = encoding;
return this;
};
function decodeChunk(state, chunk, encoding) {
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
chunk = Buffer.from(chunk, encoding);
}
return chunk;
}
Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function () {
return this._writableState.highWaterMark;
}
});
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
if (!isBuf) {
var newChunk = decodeChunk(state, chunk, encoding);
if (chunk !== newChunk) {
isBuf = true;
encoding = 'buffer';
chunk = newChunk;
}
}
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
if (!ret) state.needDrain = true;
if (state.writing || state.corked) {
var last = state.lastBufferedRequest;
state.lastBufferedRequest = {
chunk: chunk,
encoding: encoding,
isBuf: isBuf,
callback: cb,
next: null
};
if (last) {
last.next = state.lastBufferedRequest;
} else {
state.bufferedRequest = state.lastBufferedRequest;
}
state.bufferedRequestCount += 1;
} else {
doWrite(stream, state, false, len, chunk, encoding, cb);
}
return ret;
}
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
--state.pendingcb;
if (sync) {
// defer the callback if we are being called synchronously
// to avoid piling up things on the stack
pna.nextTick(cb, er);
// this can emit finish, and it will always happen
// after error
pna.nextTick(finishMaybe, stream, state);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
} else {
// the caller expect this to happen before if
// it is async
cb(er);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
// this can emit finish, but finish must
// always follow error
finishMaybe(stream, state);
}
}
function onwriteStateUpdate(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
onwriteStateUpdate(state);
if (er) onwriteError(stream, state, sync, er, cb);else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish(state);
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
clearBuffer(stream, state);
}
if (sync) {
/*<replacement>*/
asyncWrite(afterWrite, stream, state, finished, cb);
/*</replacement>*/
} else {
afterWrite(stream, state, finished, cb);
}
}
}
function afterWrite(stream, state, finished, cb) {
if (!finished) onwriteDrain(stream, state);
state.pendingcb--;
cb();
finishMaybe(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
}
// if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
var entry = state.bufferedRequest;
if (stream._writev && entry && entry.next) {
// Fast case, write everything using _writev()
var l = state.bufferedRequestCount;
var buffer = new Array(l);
var holder = state.corkedRequestsFree;
holder.entry = entry;
var count = 0;
var allBuffers = true;
while (entry) {
buffer[count] = entry;
if (!entry.isBuf) allBuffers = false;
entry = entry.next;
count += 1;
}
buffer.allBuffers = allBuffers;
doWrite(stream, state, true, state.length, buffer, '', holder.finish);
// doWrite is almost always async, defer these to save a bit of time
// as the hot path ends with doWrite
state.pendingcb++;
state.lastBufferedRequest = null;
if (holder.next) {
state.corkedRequestsFree = holder.next;
holder.next = null;
} else {
state.corkedRequestsFree = new CorkedRequest(state);
}
state.bufferedRequestCount = 0;
} else {
// Slow case, write chunks one-by-one
while (entry) {
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite(stream, state, false, len, chunk, encoding, cb);
entry = entry.next;
state.bufferedRequestCount--;
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
break;
}
}
if (entry === null) state.lastBufferedRequest = null;
}
state.bufferedRequest = entry;
state.bufferProcessing = false;
}
Writable.prototype._write = function (chunk, encoding, cb) {
cb(new Error('_write() is not implemented'));
};
Writable.prototype._writev = null;
Writable.prototype.end = function (chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
cb = chunk;
chunk = null;
encoding = null;
} else if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
// .end() fully uncorks
if (state.corked) {
state.corked = 1;
this.uncork();
}
// ignore unnecessary end() calls.
if (!state.ending && !state.finished) endWritable(this, state, cb);
};
function needFinish(state) {
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
}
function callFinal(stream, state) {
stream._final(function (err) {
state.pendingcb--;
if (err) {
stream.emit('error', err);
}
state.prefinished = true;
stream.emit('prefinish');
finishMaybe(stream, state);
});
}
function prefinish(stream, state) {
if (!state.prefinished && !state.finalCalled) {
if (typeof stream._final === 'function') {
state.pendingcb++;
state.finalCalled = true;
pna.nextTick(callFinal, stream, state);
} else {
state.prefinished = true;
stream.emit('prefinish');
}
}
}
function finishMaybe(stream, state) {
var need = needFinish(state);
if (need) {
prefinish(stream, state);
if (state.pendingcb === 0) {
state.finished = true;
stream.emit('finish');
}
}
return need;
}
function endWritable(stream, state, cb) {
state.ending = true;
finishMaybe(stream, state);
if (cb) {
if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);
}
state.ended = true;
stream.writable = false;
}
function onCorkedFinish(corkReq, state, err) {
var entry = corkReq.entry;
corkReq.entry = null;
while (entry) {
var cb = entry.callback;
state.pendingcb--;
cb(err);
entry = entry.next;
}
if (state.corkedRequestsFree) {
state.corkedRequestsFree.next = corkReq;
} else {
state.corkedRequestsFree = corkReq;
}
}
Object.defineProperty(Writable.prototype, 'destroyed', {
get: function () {
if (this._writableState === undefined) {
return false;
}
return this._writableState.destroyed;
},
set: function (value) {
// we ignore the value if the stream
// has not been initialized yet
if (!this._writableState) {
return;
}
// backward compatibility, the user is explicitly
// managing destroyed
this._writableState.destroyed = value;
}
});
Writable.prototype.destroy = destroyImpl.destroy;
Writable.prototype._undestroy = destroyImpl.undestroy;
Writable.prototype._destroy = function (err, cb) {
this.end();
cb(err);
};

View File

@@ -0,0 +1,79 @@
'use strict';
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var Buffer = require('safe-buffer').Buffer;
var util = require('util');
function copyBuffer(src, target, offset) {
src.copy(target, offset);
}
module.exports = function () {
function BufferList() {
_classCallCheck(this, BufferList);
this.head = null;
this.tail = null;
this.length = 0;
}
BufferList.prototype.push = function push(v) {
var entry = { data: v, next: null };
if (this.length > 0) this.tail.next = entry;else this.head = entry;
this.tail = entry;
++this.length;
};
BufferList.prototype.unshift = function unshift(v) {
var entry = { data: v, next: this.head };
if (this.length === 0) this.tail = entry;
this.head = entry;
++this.length;
};
BufferList.prototype.shift = function shift() {
if (this.length === 0) return;
var ret = this.head.data;
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
--this.length;
return ret;
};
BufferList.prototype.clear = function clear() {
this.head = this.tail = null;
this.length = 0;
};
BufferList.prototype.join = function join(s) {
if (this.length === 0) return '';
var p = this.head;
var ret = '' + p.data;
while (p = p.next) {
ret += s + p.data;
}return ret;
};
BufferList.prototype.concat = function concat(n) {
if (this.length === 0) return Buffer.alloc(0);
if (this.length === 1) return this.head.data;
var ret = Buffer.allocUnsafe(n >>> 0);
var p = this.head;
var i = 0;
while (p) {
copyBuffer(p.data, ret, i);
i += p.data.length;
p = p.next;
}
return ret;
};
return BufferList;
}();
if (util && util.inspect && util.inspect.custom) {
module.exports.prototype[util.inspect.custom] = function () {
var obj = util.inspect({ length: this.length });
return this.constructor.name + ' ' + obj;
};
}

View File

@@ -0,0 +1,74 @@
'use strict';
/*<replacement>*/
var pna = require('process-nextick-args');
/*</replacement>*/
// undocumented cb() API, needed for core, not for public API
function destroy(err, cb) {
var _this = this;
var readableDestroyed = this._readableState && this._readableState.destroyed;
var writableDestroyed = this._writableState && this._writableState.destroyed;
if (readableDestroyed || writableDestroyed) {
if (cb) {
cb(err);
} else if (err && (!this._writableState || !this._writableState.errorEmitted)) {
pna.nextTick(emitErrorNT, this, err);
}
return this;
}
// we set destroyed to true before firing error callbacks in order
// to make it re-entrance safe in case destroy() is called within callbacks
if (this._readableState) {
this._readableState.destroyed = true;
}
// if this is a duplex stream mark the writable part as destroyed as well
if (this._writableState) {
this._writableState.destroyed = true;
}
this._destroy(err || null, function (err) {
if (!cb && err) {
pna.nextTick(emitErrorNT, _this, err);
if (_this._writableState) {
_this._writableState.errorEmitted = true;
}
} else if (cb) {
cb(err);
}
});
return this;
}
function undestroy() {
if (this._readableState) {
this._readableState.destroyed = false;
this._readableState.reading = false;
this._readableState.ended = false;
this._readableState.endEmitted = false;
}
if (this._writableState) {
this._writableState.destroyed = false;
this._writableState.ended = false;
this._writableState.ending = false;
this._writableState.finished = false;
this._writableState.errorEmitted = false;
}
}
function emitErrorNT(self, err) {
self.emit('error', err);
}
module.exports = {
destroy: destroy,
undestroy: undestroy
};

View File

@@ -0,0 +1 @@
module.exports = require('events').EventEmitter;

View File

@@ -0,0 +1 @@
module.exports = require('stream');

View File

@@ -0,0 +1,84 @@
{
"_args": [
[
"readable-stream@2.3.6",
"/Users/paul/src/codeartisans/gradle-command-action"
]
],
"_from": "readable-stream@2.3.6",
"_id": "readable-stream@2.3.6",
"_inBundle": false,
"_integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==",
"_location": "/unzipper/readable-stream",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "readable-stream@2.3.6",
"name": "readable-stream",
"escapedName": "readable-stream",
"rawSpec": "2.3.6",
"saveSpec": null,
"fetchSpec": "2.3.6"
},
"_requiredBy": [
"/unzipper"
],
"_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz",
"_spec": "2.3.6",
"_where": "/Users/paul/src/codeartisans/gradle-command-action",
"browser": {
"util": false,
"./readable.js": "./readable-browser.js",
"./writable.js": "./writable-browser.js",
"./duplex.js": "./duplex-browser.js",
"./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js"
},
"bugs": {
"url": "https://github.com/nodejs/readable-stream/issues"
},
"dependencies": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.3",
"isarray": "~1.0.0",
"process-nextick-args": "~2.0.0",
"safe-buffer": "~5.1.1",
"string_decoder": "~1.1.1",
"util-deprecate": "~1.0.1"
},
"description": "Streams3, a user-land copy of the stream library from Node.js",
"devDependencies": {
"assert": "^1.4.0",
"babel-polyfill": "^6.9.1",
"buffer": "^4.9.0",
"lolex": "^2.3.2",
"nyc": "^6.4.0",
"tap": "^0.7.0",
"tape": "^4.8.0"
},
"homepage": "https://github.com/nodejs/readable-stream#readme",
"keywords": [
"readable",
"stream",
"pipe"
],
"license": "MIT",
"main": "readable.js",
"name": "readable-stream",
"nyc": {
"include": [
"lib/**.js"
]
},
"repository": {
"type": "git",
"url": "git://github.com/nodejs/readable-stream.git"
},
"scripts": {
"ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js",
"cover": "nyc npm test",
"report": "nyc report --reporter=lcov",
"test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js"
},
"version": "2.3.6"
}

View File

@@ -0,0 +1 @@
module.exports = require('./readable').PassThrough

View File

@@ -0,0 +1,7 @@
exports = module.exports = require('./lib/_stream_readable.js');
exports.Stream = exports;
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
exports.Transform = require('./lib/_stream_transform.js');
exports.PassThrough = require('./lib/_stream_passthrough.js');

View File

@@ -0,0 +1,19 @@
var Stream = require('stream');
if (process.env.READABLE_STREAM === 'disable' && Stream) {
module.exports = Stream;
exports = module.exports = Stream.Readable;
exports.Readable = Stream.Readable;
exports.Writable = Stream.Writable;
exports.Duplex = Stream.Duplex;
exports.Transform = Stream.Transform;
exports.PassThrough = Stream.PassThrough;
exports.Stream = Stream;
} else {
exports = module.exports = require('./lib/_stream_readable.js');
exports.Stream = Stream || exports;
exports.Readable = exports;
exports.Writable = require('./lib/_stream_writable.js');
exports.Duplex = require('./lib/_stream_duplex.js');
exports.Transform = require('./lib/_stream_transform.js');
exports.PassThrough = require('./lib/_stream_passthrough.js');
}

View File

@@ -0,0 +1 @@
module.exports = require('./readable').Transform

View File

@@ -0,0 +1 @@
module.exports = require('./lib/_stream_writable.js');

View File

@@ -0,0 +1,8 @@
var Stream = require("stream")
var Writable = require("./lib/_stream_writable.js")
if (process.env.READABLE_STREAM === 'disable') {
module.exports = Stream && Stream.Writable || Writable
} else {
module.exports = Writable
}

View File

@@ -0,0 +1,50 @@
sudo: false
language: node_js
before_install:
- npm install -g npm@2
- test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g
notifications:
email: false
matrix:
fast_finish: true
include:
- node_js: '0.8'
env:
- TASK=test
- NPM_LEGACY=true
- node_js: '0.10'
env:
- TASK=test
- NPM_LEGACY=true
- node_js: '0.11'
env:
- TASK=test
- NPM_LEGACY=true
- node_js: '0.12'
env:
- TASK=test
- NPM_LEGACY=true
- node_js: 1
env:
- TASK=test
- NPM_LEGACY=true
- node_js: 2
env:
- TASK=test
- NPM_LEGACY=true
- node_js: 3
env:
- TASK=test
- NPM_LEGACY=true
- node_js: 4
env: TASK=test
- node_js: 5
env: TASK=test
- node_js: 6
env: TASK=test
- node_js: 7
env: TASK=test
- node_js: 8
env: TASK=test
- node_js: 9
env: TASK=test

View File

@@ -0,0 +1,48 @@
Node.js is licensed for use as follows:
"""
Copyright Node.js contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
This license applies to parts of Node.js originating from the
https://github.com/joyent/node repository:
"""
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""

View File

@@ -0,0 +1,47 @@
# string_decoder
***Node-core v8.9.4 string_decoder for userland***
[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/)
[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/)
```bash
npm install --save string_decoder
```
***Node-core string_decoder for userland***
This package is a mirror of the string_decoder implementation in Node-core.
Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/).
As of version 1.0.0 **string_decoder** uses semantic versioning.
## Previous versions
Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10.
## Update
The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version.
## Streams Working Group
`string_decoder` is maintained by the Streams Working Group, which
oversees the development and maintenance of the Streams API within
Node.js. The responsibilities of the Streams Working Group include:
* Addressing stream issues on the Node.js issue tracker.
* Authoring and editing stream documentation within the Node.js project.
* Reviewing changes to stream subclasses within the Node.js project.
* Redirecting changes to streams from the Node.js project to this
project.
* Assisting in the implementation of stream providers within Node.js.
* Recommending versions of `readable-stream` to be included in Node.js.
* Messaging about the future of streams to give the community advance
notice of changes.
See [readable-stream](https://github.com/nodejs/readable-stream) for
more details.

View File

@@ -0,0 +1,296 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
'use strict';
/*<replacement>*/
var Buffer = require('safe-buffer').Buffer;
/*</replacement>*/
var isEncoding = Buffer.isEncoding || function (encoding) {
encoding = '' + encoding;
switch (encoding && encoding.toLowerCase()) {
case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
return true;
default:
return false;
}
};
function _normalizeEncoding(enc) {
if (!enc) return 'utf8';
var retried;
while (true) {
switch (enc) {
case 'utf8':
case 'utf-8':
return 'utf8';
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return 'utf16le';
case 'latin1':
case 'binary':
return 'latin1';
case 'base64':
case 'ascii':
case 'hex':
return enc;
default:
if (retried) return; // undefined
enc = ('' + enc).toLowerCase();
retried = true;
}
}
};
// Do not cache `Buffer.isEncoding` when checking encoding names as some
// modules monkey-patch it to support additional encodings
function normalizeEncoding(enc) {
var nenc = _normalizeEncoding(enc);
if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
return nenc || enc;
}
// StringDecoder provides an interface for efficiently splitting a series of
// buffers into a series of JS strings without breaking apart multi-byte
// characters.
exports.StringDecoder = StringDecoder;
function StringDecoder(encoding) {
this.encoding = normalizeEncoding(encoding);
var nb;
switch (this.encoding) {
case 'utf16le':
this.text = utf16Text;
this.end = utf16End;
nb = 4;
break;
case 'utf8':
this.fillLast = utf8FillLast;
nb = 4;
break;
case 'base64':
this.text = base64Text;
this.end = base64End;
nb = 3;
break;
default:
this.write = simpleWrite;
this.end = simpleEnd;
return;
}
this.lastNeed = 0;
this.lastTotal = 0;
this.lastChar = Buffer.allocUnsafe(nb);
}
StringDecoder.prototype.write = function (buf) {
if (buf.length === 0) return '';
var r;
var i;
if (this.lastNeed) {
r = this.fillLast(buf);
if (r === undefined) return '';
i = this.lastNeed;
this.lastNeed = 0;
} else {
i = 0;
}
if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
return r || '';
};
StringDecoder.prototype.end = utf8End;
// Returns only complete characters in a Buffer
StringDecoder.prototype.text = utf8Text;
// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
StringDecoder.prototype.fillLast = function (buf) {
if (this.lastNeed <= buf.length) {
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
}
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
this.lastNeed -= buf.length;
};
// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
// continuation byte. If an invalid byte is detected, -2 is returned.
function utf8CheckByte(byte) {
if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
return byte >> 6 === 0x02 ? -1 : -2;
}
// Checks at most 3 bytes at the end of a Buffer in order to detect an
// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
// needed to complete the UTF-8 character (if applicable) are returned.
function utf8CheckIncomplete(self, buf, i) {
var j = buf.length - 1;
if (j < i) return 0;
var nb = utf8CheckByte(buf[j]);
if (nb >= 0) {
if (nb > 0) self.lastNeed = nb - 1;
return nb;
}
if (--j < i || nb === -2) return 0;
nb = utf8CheckByte(buf[j]);
if (nb >= 0) {
if (nb > 0) self.lastNeed = nb - 2;
return nb;
}
if (--j < i || nb === -2) return 0;
nb = utf8CheckByte(buf[j]);
if (nb >= 0) {
if (nb > 0) {
if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
}
return nb;
}
return 0;
}
// Validates as many continuation bytes for a multi-byte UTF-8 character as
// needed or are available. If we see a non-continuation byte where we expect
// one, we "replace" the validated continuation bytes we've seen so far with
// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
// behavior. The continuation byte check is included three times in the case
// where all of the continuation bytes for a character exist in the same buffer.
// It is also done this way as a slight performance increase instead of using a
// loop.
function utf8CheckExtraBytes(self, buf, p) {
if ((buf[0] & 0xC0) !== 0x80) {
self.lastNeed = 0;
return '\ufffd';
}
if (self.lastNeed > 1 && buf.length > 1) {
if ((buf[1] & 0xC0) !== 0x80) {
self.lastNeed = 1;
return '\ufffd';
}
if (self.lastNeed > 2 && buf.length > 2) {
if ((buf[2] & 0xC0) !== 0x80) {
self.lastNeed = 2;
return '\ufffd';
}
}
}
}
// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
function utf8FillLast(buf) {
var p = this.lastTotal - this.lastNeed;
var r = utf8CheckExtraBytes(this, buf, p);
if (r !== undefined) return r;
if (this.lastNeed <= buf.length) {
buf.copy(this.lastChar, p, 0, this.lastNeed);
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
}
buf.copy(this.lastChar, p, 0, buf.length);
this.lastNeed -= buf.length;
}
// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
// partial character, the character's bytes are buffered until the required
// number of bytes are available.
function utf8Text(buf, i) {
var total = utf8CheckIncomplete(this, buf, i);
if (!this.lastNeed) return buf.toString('utf8', i);
this.lastTotal = total;
var end = buf.length - (total - this.lastNeed);
buf.copy(this.lastChar, 0, end);
return buf.toString('utf8', i, end);
}
// For UTF-8, a replacement character is added when ending on a partial
// character.
function utf8End(buf) {
var r = buf && buf.length ? this.write(buf) : '';
if (this.lastNeed) return r + '\ufffd';
return r;
}
// UTF-16LE typically needs two bytes per character, but even if we have an even
// number of bytes available, we need to check if we end on a leading/high
// surrogate. In that case, we need to wait for the next two bytes in order to
// decode the last character properly.
function utf16Text(buf, i) {
if ((buf.length - i) % 2 === 0) {
var r = buf.toString('utf16le', i);
if (r) {
var c = r.charCodeAt(r.length - 1);
if (c >= 0xD800 && c <= 0xDBFF) {
this.lastNeed = 2;
this.lastTotal = 4;
this.lastChar[0] = buf[buf.length - 2];
this.lastChar[1] = buf[buf.length - 1];
return r.slice(0, -1);
}
}
return r;
}
this.lastNeed = 1;
this.lastTotal = 2;
this.lastChar[0] = buf[buf.length - 1];
return buf.toString('utf16le', i, buf.length - 1);
}
// For UTF-16LE we do not explicitly append special replacement characters if we
// end on a partial character, we simply let v8 handle that.
function utf16End(buf) {
var r = buf && buf.length ? this.write(buf) : '';
if (this.lastNeed) {
var end = this.lastTotal - this.lastNeed;
return r + this.lastChar.toString('utf16le', 0, end);
}
return r;
}
function base64Text(buf, i) {
var n = (buf.length - i) % 3;
if (n === 0) return buf.toString('base64', i);
this.lastNeed = 3 - n;
this.lastTotal = 3;
if (n === 1) {
this.lastChar[0] = buf[buf.length - 1];
} else {
this.lastChar[0] = buf[buf.length - 2];
this.lastChar[1] = buf[buf.length - 1];
}
return buf.toString('base64', i, buf.length - n);
}
function base64End(buf) {
var r = buf && buf.length ? this.write(buf) : '';
if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
return r;
}
// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
function simpleWrite(buf) {
return buf.toString(this.encoding);
}
function simpleEnd(buf) {
return buf && buf.length ? this.write(buf) : '';
}

View File

@@ -0,0 +1,62 @@
{
"_args": [
[
"string_decoder@1.1.1",
"/Users/paul/src/codeartisans/gradle-command-action"
]
],
"_from": "string_decoder@1.1.1",
"_id": "string_decoder@1.1.1",
"_inBundle": false,
"_integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
"_location": "/unzipper/string_decoder",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "string_decoder@1.1.1",
"name": "string_decoder",
"escapedName": "string_decoder",
"rawSpec": "1.1.1",
"saveSpec": null,
"fetchSpec": "1.1.1"
},
"_requiredBy": [
"/unzipper/readable-stream"
],
"_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
"_spec": "1.1.1",
"_where": "/Users/paul/src/codeartisans/gradle-command-action",
"bugs": {
"url": "https://github.com/nodejs/string_decoder/issues"
},
"dependencies": {
"safe-buffer": "~5.1.0"
},
"description": "The string_decoder module from Node core",
"devDependencies": {
"babel-polyfill": "^6.23.0",
"core-util-is": "^1.0.2",
"inherits": "^2.0.3",
"tap": "~0.4.8"
},
"homepage": "https://github.com/nodejs/string_decoder",
"keywords": [
"string",
"decoder",
"browser",
"browserify"
],
"license": "MIT",
"main": "lib/string_decoder.js",
"name": "string_decoder",
"repository": {
"type": "git",
"url": "git://github.com/nodejs/string_decoder.git"
},
"scripts": {
"ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js",
"test": "tap test/parallel/*.js && node test/verify-dependencies"
},
"version": "1.1.1"
}

108
node_modules/unzipper/package.json generated vendored Normal file
View File

@@ -0,0 +1,108 @@
{
"_args": [
[
"unzipper@0.10.5",
"/Users/paul/src/codeartisans/gradle-command-action"
]
],
"_from": "unzipper@0.10.5",
"_id": "unzipper@0.10.5",
"_inBundle": false,
"_integrity": "sha512-i5ufkXNjWZYxU/0nKKf6LkvW8kn9YzRvfwuPWjXP+JTFce/8bqeR0gEfbiN2IDdJa6ZU6/2IzFRLK0z1v0uptw==",
"_location": "/unzipper",
"_phantomChildren": {
"core-util-is": "1.0.2",
"graceful-fs": "4.2.2",
"inherits": "2.0.4",
"isarray": "1.0.0",
"mkdirp": "0.5.1",
"process-nextick-args": "2.0.1",
"rimraf": "2.7.1",
"safe-buffer": "5.1.2",
"util-deprecate": "1.0.2"
},
"_requested": {
"type": "version",
"registry": true,
"raw": "unzipper@0.10.5",
"name": "unzipper",
"escapedName": "unzipper",
"rawSpec": "0.10.5",
"saveSpec": null,
"fetchSpec": "0.10.5"
},
"_requiredBy": [
"/"
],
"_resolved": "https://registry.npmjs.org/unzipper/-/unzipper-0.10.5.tgz",
"_spec": "0.10.5",
"_where": "/Users/paul/src/codeartisans/gradle-command-action",
"author": {
"name": "Evan Oxfeld",
"email": "eoxfeld@gmail.com"
},
"bugs": {
"url": "https://github.com/ZJONSSON/node-unzipper/issues"
},
"contributors": [
{
"name": "Ziggy Jonsson",
"email": "ziggy.jonsson.nyc@gmail.com"
},
{
"name": "Evan Oxfeld",
"email": "eoxfeld@gmail.com"
},
{
"name": "Joe Ferner",
"email": "joe.ferner@nearinfinity.com"
}
],
"dependencies": {
"big-integer": "^1.6.17",
"binary": "~0.3.0",
"bluebird": "~3.4.1",
"buffer-indexof-polyfill": "~1.0.0",
"duplexer2": "~0.1.4",
"fstream": "^1.0.12",
"graceful-fs": "^4.2.2",
"listenercount": "~1.0.1",
"readable-stream": "~2.3.6",
"setimmediate": "~1.0.4"
},
"description": "Unzip cross-platform streaming API ",
"devDependencies": {
"aws-sdk": "^2.77.0",
"dirdiff": ">= 0.0.1 < 1",
"iconv-lite": "^0.4.24",
"request": "^2.88.0",
"stream-buffers": ">= 0.2.5 < 1",
"tap": ">= 0.3.0 < 1",
"temp": ">= 0.4.0 < 1"
},
"directories": {
"example": "examples",
"test": "test"
},
"homepage": "https://github.com/ZJONSSON/node-unzipper#readme",
"keywords": [
"zip",
"unzip",
"zlib",
"uncompress",
"archive",
"stream",
"extract"
],
"license": "MIT",
"main": "unzip.js",
"name": "unzipper",
"repository": {
"type": "git",
"url": "git+https://github.com/ZJONSSON/node-unzipper.git"
},
"scripts": {
"test": "tap test/*.js --jobs=10 --coverage-report=html --no-browser"
},
"version": "0.10.5"
}

10
node_modules/unzipper/test.js generated vendored Normal file
View File

@@ -0,0 +1,10 @@
const unzipper = require('./unzip');
async function main() {
const zip = await unzipper.Open.file('./scorm-file-test.zip');
await zip.extract({path:'/home/zjonsson/git/node-unzipper/tmp'});
console.log('done');
}
main().then(console.log,console.log)

11
node_modules/unzipper/unzip.js generated vendored Normal file
View File

@@ -0,0 +1,11 @@
'use strict';
// Polyfills for node 0.8
require('listenercount');
require('buffer-indexof-polyfill');
require('setimmediate');
exports.Parse = require('./lib/parse');
exports.ParseOne = require('./lib/parseOne');
exports.Extract = require('./lib/extract');
exports.Open = require('./lib/Open');