You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
7478 lines
208 KiB
7478 lines
208 KiB
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ |
|
var Protocol = require('bittorrent-protocol'); |
|
var stream = require('stream'); |
|
var WebSocks = require("./websocks.js").WebSocks; |
|
|
|
|
|
function start_torrent(torrent_file_blob) |
|
{ |
|
var torrent = new WebsocksTorrent(); |
|
var elem = document.getElementById("ws-server"); |
|
torrent.start(elem.value); |
|
torrent.addTorrent(torrent_file_blob); |
|
} |
|
|
|
function start_downloading() |
|
{ |
|
var elem = document.getElementById("torrent-file"); |
|
var files = elem.files; |
|
for (var f of files) { |
|
if(f.slice) { |
|
start_torrent(f.slice()); |
|
} |
|
} |
|
} |
|
|
|
},{"./websocks.js":2,"bittorrent-protocol":7,"stream":56}],2:[function(require,module,exports){ |
|
var stream = require('stream'); |
|
|
|
|
|
function WebSocks(wsurl) |
|
{ |
|
this._wsurl = wsurl; |
|
this._ws = null; |
|
this._ouraddr = null; |
|
this._remoteAddr = null; |
|
this._recvbuffer = stream.PassThrough(); |
|
this._sendbuffer = stream.PassThrough(); |
|
} |
|
|
|
WebSocks.prototype.createListener = function(callback) { |
|
var self = this; |
|
self._doAccept(function(err) { |
|
if(err) callback(err, null); |
|
else { |
|
callback(null, self._sendbuffer, self._recvbuffer); |
|
} |
|
}, function(data) { |
|
self._recvbuffer.write(data); |
|
}); |
|
}; |
|
|
|
WebSocks.prototype._doAccept = function(accept_cb, recv_cb) { |
|
var self = this; |
|
self._socket(function (err, data) { |
|
if(!self._remoteAddr) |
|
{ |
|
if(!data) |
|
{ |
|
// send handshake |
|
self._ws.send("accept"); |
|
} |
|
else |
|
{ |
|
// this means handshake read |
|
var j = JSON.parse(data); |
|
if(j.error) { |
|
connect_cb(j.error); |
|
} else if(j.success){ |
|
self._remoteAddr = host; |
|
connect_cb(null); |
|
} |
|
// repeat |
|
setTimeout(function() { |
|
self._doAccept(connect_cb); |
|
}, 10); |
|
} |
|
} |
|
else |
|
{ |
|
recv_cb(err, data); |
|
} |
|
}); |
|
}; |
|
|
|
WebSocks.prototype._closed = function() |
|
{ |
|
var self = this; |
|
self._recvbuffer.close(); |
|
self._sendbuffer.close(); |
|
self._ws.close(); |
|
}; |
|
|
|
WebSocks.prototype.connect = function(host, port, connect_cb) { |
|
var self = this; |
|
self._socket(function (err, data) { |
|
if(!self._remoteAddr) |
|
{ |
|
if(!data) |
|
{ |
|
// send handshake |
|
self._ws.send("connect "+host+":"+port); |
|
} |
|
else |
|
{ |
|
// this means handshake read |
|
var j = JSON.parse(data); |
|
if(j.error) { |
|
connect_cb(j.error, null, null); |
|
} else if(j.success){ |
|
self._remoteAddr = host; |
|
connect_cb(null, self._sendbuffer, self._recvbuffer); |
|
} else { |
|
connect_cb("failed to connect", null, null); |
|
} |
|
} |
|
} |
|
else |
|
{ |
|
self._recvbuffer.write(data); |
|
} |
|
}); |
|
}; |
|
|
|
WebSocks.prototype._socket = function(cb) { |
|
var self = this; |
|
self._ws = new WebSocket(self._wsurl); |
|
self._ws.onclose = function(ev) { |
|
self._closed(); |
|
}; |
|
|
|
self._ws.onopen = function(ev) { |
|
console.log("connected to websocks"); |
|
self._ws.send("getaddr"); |
|
}; |
|
|
|
self._ws.onmessage = function(ev) { |
|
if(!self._ouraddr) |
|
{ |
|
var j = JSON.parse(ev.data); |
|
if(j.error) { |
|
console.log("connect error: "+j.error); |
|
cb(j.error, null); |
|
return; |
|
} |
|
if(j.success && j.destination) { |
|
self._ouraddr = j.destination; |
|
} else { |
|
cb("failed to connect", null); |
|
return; |
|
} |
|
self._sendbuffer.on("data", function(data) { |
|
self._ws.send(data); |
|
}); |
|
cb(null, null); |
|
} |
|
else |
|
cb(null, ev.data); |
|
}; |
|
}; |
|
|
|
module.exports = { |
|
"WebSocks": WebSocks, |
|
}; |
|
|
|
},{"stream":56}],3:[function(require,module,exports){ |
|
(function (Buffer){ |
|
const INTEGER_START = 0x69 // 'i' |
|
const STRING_DELIM = 0x3A // ':' |
|
const DICTIONARY_START = 0x64 // 'd' |
|
const LIST_START = 0x6C // 'l' |
|
const END_OF_TYPE = 0x65 // 'e' |
|
|
|
/** |
|
* replaces parseInt(buffer.toString('ascii', start, end)). |
|
* For strings with less then ~30 charachters, this is actually a lot faster. |
|
* |
|
* @param {Buffer} data |
|
* @param {Number} start |
|
* @param {Number} end |
|
* @return {Number} calculated number |
|
*/ |
|
function getIntFromBuffer (buffer, start, end) { |
|
var sum = 0 |
|
var sign = 1 |
|
|
|
for (var i = start; i < end; i++) { |
|
var num = buffer[i] |
|
|
|
if (num < 58 && num >= 48) { |
|
sum = sum * 10 + (num - 48) |
|
continue |
|
} |
|
|
|
if (i === start && num === 43) { // + |
|
continue |
|
} |
|
|
|
if (i === start && num === 45) { // - |
|
sign = -1 |
|
continue |
|
} |
|
|
|
if (num === 46) { // . |
|
// its a float. break here. |
|
break |
|
} |
|
|
|
throw new Error('not a number: buffer[' + i + '] = ' + num) |
|
} |
|
|
|
return sum * sign |
|
} |
|
|
|
/** |
|
* Decodes bencoded data. |
|
* |
|
* @param {Buffer} data |
|
* @param {Number} start (optional) |
|
* @param {Number} end (optional) |
|
* @param {String} encoding (optional) |
|
* @return {Object|Array|Buffer|String|Number} |
|
*/ |
|
function decode (data, start, end, encoding) { |
|
if (data == null || data.length === 0) { |
|
return null |
|
} |
|
|
|
if (typeof start !== 'number' && encoding == null) { |
|
encoding = start |
|
start = undefined |
|
} |
|
|
|
if (typeof end !== 'number' && encoding == null) { |
|
encoding = end |
|
end = undefined |
|
} |
|
|
|
decode.position = 0 |
|
decode.encoding = encoding || null |
|
|
|
decode.data = !(Buffer.isBuffer(data)) |
|
? new Buffer(data) |
|
: data.slice(start, end) |
|
|
|
decode.bytes = decode.data.length |
|
|
|
return decode.next() |
|
} |
|
|
|
decode.bytes = 0 |
|
decode.position = 0 |
|
decode.data = null |
|
decode.encoding = null |
|
|
|
decode.next = function () { |
|
switch (decode.data[decode.position]) { |
|
case DICTIONARY_START: |
|
return decode.dictionary() |
|
case LIST_START: |
|
return decode.list() |
|
case INTEGER_START: |
|
return decode.integer() |
|
default: |
|
return decode.buffer() |
|
} |
|
} |
|
|
|
decode.find = function (chr) { |
|
var i = decode.position |
|
var c = decode.data.length |
|
var d = decode.data |
|
|
|
while (i < c) { |
|
if (d[i] === chr) return i |
|
i++ |
|
} |
|
|
|
throw new Error( |
|
'Invalid data: Missing delimiter "' + |
|
String.fromCharCode(chr) + '" [0x' + |
|
chr.toString(16) + ']' |
|
) |
|
} |
|
|
|
decode.dictionary = function () { |
|
decode.position++ |
|
|
|
var dict = {} |
|
|
|
while (decode.data[decode.position] !== END_OF_TYPE) { |
|
dict[decode.buffer()] = decode.next() |
|
} |
|
|
|
decode.position++ |
|
|
|
return dict |
|
} |
|
|
|
decode.list = function () { |
|
decode.position++ |
|
|
|
var lst = [] |
|
|
|
while (decode.data[decode.position] !== END_OF_TYPE) { |
|
lst.push(decode.next()) |
|
} |
|
|
|
decode.position++ |
|
|
|
return lst |
|
} |
|
|
|
decode.integer = function () { |
|
var end = decode.find(END_OF_TYPE) |
|
var number = getIntFromBuffer(decode.data, decode.position + 1, end) |
|
|
|
decode.position += end + 1 - decode.position |
|
|
|
return number |
|
} |
|
|
|
decode.buffer = function () { |
|
var sep = decode.find(STRING_DELIM) |
|
var length = getIntFromBuffer(decode.data, decode.position, sep) |
|
var end = ++sep + length |
|
|
|
decode.position = end |
|
|
|
return decode.encoding |
|
? decode.data.toString(decode.encoding, sep, end) |
|
: decode.data.slice(sep, end) |
|
} |
|
|
|
module.exports = decode |
|
|
|
}).call(this,require("buffer").Buffer) |
|
},{"buffer":33}],4:[function(require,module,exports){ |
|
var Buffer = require('safe-buffer').Buffer |
|
|
|
/** |
|
* Encodes data in bencode. |
|
* |
|
* @param {Buffer|Array|String|Object|Number|Boolean} data |
|
* @return {Buffer} |
|
*/ |
|
function encode (data, buffer, offset) { |
|
var buffers = [] |
|
var result = null |
|
|
|
encode._encode(buffers, data) |
|
result = Buffer.concat(buffers) |
|
encode.bytes = result.length |
|
|
|
if (Buffer.isBuffer(buffer)) { |
|
result.copy(buffer, offset) |
|
return buffer |
|
} |
|
|
|
return result |
|
} |
|
|
|
encode.bytes = -1 |
|
encode._floatConversionDetected = false |
|
|
|
encode._encode = function (buffers, data) { |
|
if (Buffer.isBuffer(data)) { |
|
buffers.push(Buffer.from(data.length + ':')) |
|
buffers.push(data) |
|
return |
|
} |
|
|
|
if (data == null) { return } |
|
|
|
switch (typeof data) { |
|
case 'string': |
|
encode.buffer(buffers, data) |
|
break |
|
case 'number': |
|
encode.number(buffers, data) |
|
break |
|
case 'object': |
|
data.constructor === Array |
|
? encode.list(buffers, data) |
|
: encode.dict(buffers, data) |
|
break |
|
case 'boolean': |
|
encode.number(buffers, data ? 1 : 0) |
|
break |
|
} |
|
} |
|
|
|
var buffE = Buffer.from('e') |
|
var buffD = Buffer.from('d') |
|
var buffL = Buffer.from('l') |
|
|
|
encode.buffer = function (buffers, data) { |
|
buffers.push(Buffer.from(Buffer.byteLength(data) + ':' + data)) |
|
} |
|
|
|
encode.number = function (buffers, data) { |
|
var maxLo = 0x80000000 |
|
var hi = (data / maxLo) << 0 |
|
var lo = (data % maxLo) << 0 |
|
var val = hi * maxLo + lo |
|
|
|
buffers.push(Buffer.from('i' + val + 'e')) |
|
|
|
if (val !== data && !encode._floatConversionDetected) { |
|
encode._floatConversionDetected = true |
|
console.warn( |
|
'WARNING: Possible data corruption detected with value "' + data + '":', |
|
'Bencoding only defines support for integers, value was converted to "' + val + '"' |
|
) |
|
console.trace() |
|
} |
|
} |
|
|
|
encode.dict = function (buffers, data) { |
|
buffers.push(buffD) |
|
|
|
var j = 0 |
|
var k |
|
// fix for issue #13 - sorted dicts |
|
var keys = Object.keys(data).sort() |
|
var kl = keys.length |
|
|
|
for (; j < kl; j++) { |
|
k = keys[j] |
|
if (data[k] == null) continue |
|
encode.buffer(buffers, k) |
|
encode._encode(buffers, data[k]) |
|
} |
|
|
|
buffers.push(buffE) |
|
} |
|
|
|
encode.list = function (buffers, data) { |
|
var i = 0 |
|
var c = data.length |
|
buffers.push(buffL) |
|
|
|
for (; i < c; i++) { |
|
if (data[i] == null) continue |
|
encode._encode(buffers, data[i]) |
|
} |
|
|
|
buffers.push(buffE) |
|
} |
|
|
|
module.exports = encode |
|
|
|
},{"safe-buffer":25}],5:[function(require,module,exports){ |
|
var bencode = module.exports |
|
|
|
bencode.encode = require('./encode') |
|
bencode.decode = require('./decode') |
|
|
|
/** |
|
* Determines the amount of bytes |
|
* needed to encode the given value |
|
* @param {Object|Array|Buffer|String|Number|Boolean} value |
|
* @return {Number} byteCount |
|
*/ |
|
bencode.byteLength = bencode.encodingLength = function (value) { |
|
return bencode.encode(value).length |
|
} |
|
|
|
},{"./decode":3,"./encode":4}],6:[function(require,module,exports){ |
|
(function (Buffer){ |
|
var Container = typeof Buffer !== "undefined" ? Buffer //in node, use buffers |
|
: typeof Int8Array !== "undefined" ? Int8Array //in newer browsers, use webgl int8arrays |
|
: function(l){ var a = new Array(l); for(var i = 0; i < l; i++) a[i]=0; }; //else, do something similar |
|
|
|
function BitField(data, opts){ |
|
if(!(this instanceof BitField)) { |
|
return new BitField(data, opts); |
|
} |
|
|
|
if(arguments.length === 0){ |
|
data = 0; |
|
} |
|
|
|
this.grow = opts && (isFinite(opts.grow) && getByteSize(opts.grow) || opts.grow) || 0; |
|
|
|
if(typeof data === "number" || data === undefined){ |
|
data = new Container(getByteSize(data)); |
|
if(data.fill && !data._isBuffer) data.fill(0); // clear node buffers of garbage |
|
} |
|
this.buffer = data; |
|
} |
|
|
|
function getByteSize(num){ |
|
var out = num >> 3; |
|
if(num % 8 !== 0) out++; |
|
return out; |
|
} |
|
|
|
BitField.prototype.get = function(i){ |
|
var j = i >> 3; |
|
return (j < this.buffer.length) && |
|
!!(this.buffer[j] & (128 >> (i % 8))); |
|
}; |
|
|
|
BitField.prototype.set = function(i, b){ |
|
var j = i >> 3; |
|
if (b || arguments.length === 1){ |
|
if (this.buffer.length < j + 1) this._grow(Math.max(j + 1, Math.min(2 * this.buffer.length, this.grow))); |
|
// Set |
|
this.buffer[j] |= 128 >> (i % 8); |
|
} else if (j < this.buffer.length) { |
|
/// Clear |
|
this.buffer[j] &= ~(128 >> (i % 8)); |
|
} |
|
}; |
|
|
|
BitField.prototype._grow = function(length) { |
|
if (this.buffer.length < length && length <= this.grow) { |
|
var newBuffer = new Container(length); |
|
if (newBuffer.fill) newBuffer.fill(0); |
|
if (this.buffer.copy) this.buffer.copy(newBuffer, 0); |
|
else { |
|
for(var i = 0; i < this.buffer.length; i++) { |
|
newBuffer[i] = this.buffer[i]; |
|
} |
|
} |
|
this.buffer = newBuffer; |
|
} |
|
}; |
|
|
|
if(typeof module !== "undefined") module.exports = BitField; |
|
|
|
}).call(this,require("buffer").Buffer) |
|
},{"buffer":33}],7:[function(require,module,exports){ |
|
module.exports = Wire |
|
|
|
var arrayRemove = require('unordered-array-remove') |
|
var bencode = require('bencode') |
|
var BitField = require('bitfield') |
|
var Buffer = require('safe-buffer').Buffer |
|
var debug = require('debug')('bittorrent-protocol') |
|
var extend = require('xtend') |
|
var inherits = require('inherits') |
|
var randombytes = require('randombytes') |
|
var speedometer = require('speedometer') |
|
var stream = require('readable-stream') |
|
|
|
var BITFIELD_GROW = 400000 |
|
var KEEP_ALIVE_TIMEOUT = 55000 |
|
|
|
var MESSAGE_PROTOCOL = Buffer.from('\u0013BitTorrent protocol') |
|
var MESSAGE_KEEP_ALIVE = Buffer.from([0x00, 0x00, 0x00, 0x00]) |
|
var MESSAGE_CHOKE = Buffer.from([0x00, 0x00, 0x00, 0x01, 0x00]) |
|
var MESSAGE_UNCHOKE = Buffer.from([0x00, 0x00, 0x00, 0x01, 0x01]) |
|
var MESSAGE_INTERESTED = Buffer.from([0x00, 0x00, 0x00, 0x01, 0x02]) |
|
var MESSAGE_UNINTERESTED = Buffer.from([0x00, 0x00, 0x00, 0x01, 0x03]) |
|
|
|
var MESSAGE_RESERVED = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00] |
|
var MESSAGE_PORT = [0x00, 0x00, 0x00, 0x03, 0x09, 0x00, 0x00] |
|
|
|
function Request (piece, offset, length, callback) { |
|
this.piece = piece |
|
this.offset = offset |
|
this.length = length |
|
this.callback = callback |
|
} |
|
|
|
inherits(Wire, stream.Duplex) |
|
|
|
function Wire () { |
|
if (!(this instanceof Wire)) return new Wire() |
|
stream.Duplex.call(this) |
|
|
|
this._debugId = randombytes(4).toString('hex') |
|
this._debug('new wire') |
|
|
|
this.peerId = null // remote peer id (hex string) |
|
this.peerIdBuffer = null // remote peer id (buffer) |
|
this.type = null // connection type ('webrtc', 'tcpIncoming', 'tcpOutgoing', 'webSeed') |
|
|
|
this.amChoking = true // are we choking the peer? |
|
this.amInterested = false // are we interested in the peer? |
|
|
|
this.peerChoking = true // is the peer choking us? |
|
this.peerInterested = false // is the peer interested in us? |
|
|
|
// The largest torrent that I know of (the Geocities archive) is ~641 GB and has |
|
// ~41,000 pieces. Therefore, cap bitfield to 10x larger (400,000 bits) to support all |
|
// possible torrents but prevent malicious peers from growing bitfield to fill memory. |
|
this.peerPieces = new BitField(0, { grow: BITFIELD_GROW }) |
|
|
|
this.peerExtensions = {} |
|
|
|
this.requests = [] // outgoing |
|
this.peerRequests = [] // incoming |
|
|
|
this.extendedMapping = {} // number -> string, ex: 1 -> 'ut_metadata' |
|
this.peerExtendedMapping = {} // string -> number, ex: 9 -> 'ut_metadata' |
|
|
|
// The extended handshake to send, minus the "m" field, which gets automatically |
|
// filled from `this.extendedMapping` |
|
this.extendedHandshake = {} |
|
|
|
this.peerExtendedHandshake = {} // remote peer's extended handshake |
|
|
|
this._ext = {} // string -> function, ex 'ut_metadata' -> ut_metadata() |
|
this._nextExt = 1 |
|
|
|
this.uploaded = 0 |
|
this.downloaded = 0 |
|
this.uploadSpeed = speedometer() |
|
this.downloadSpeed = speedometer() |
|
|
|
this._keepAliveInterval = null |
|
this._timeout = null |
|
this._timeoutMs = 0 |
|
|
|
this.destroyed = false // was the wire ended by calling `destroy`? |
|
this._finished = false |
|
|
|
this._parserSize = 0 // number of needed bytes to parse next message from remote peer |
|
this._parser = null // function to call once `this._parserSize` bytes are available |
|
|
|
this._buffer = [] // incomplete message data |
|
this._bufferSize = 0 // cached total length of buffers in `this._buffer` |
|
|
|
this.on('finish', this._onFinish) |
|
|
|
this._parseHandshake() |
|
} |
|
|
|
/** |
|
* Set whether to send a "keep-alive" ping (sent every 55s) |
|
* @param {boolean} enable |
|
*/ |
|
Wire.prototype.setKeepAlive = function (enable) { |
|
var self = this |
|
self._debug('setKeepAlive %s', enable) |
|
clearInterval(self._keepAliveInterval) |
|
if (enable === false) return |
|
self._keepAliveInterval = setInterval(function () { |
|
self.keepAlive() |
|
}, KEEP_ALIVE_TIMEOUT) |
|
} |
|
|
|
/** |
|
* Set the amount of time to wait before considering a request to be "timed out" |
|
* @param {number} ms |
|
* @param {boolean=} unref (should the timer be unref'd? default: false) |
|
*/ |
|
Wire.prototype.setTimeout = function (ms, unref) { |
|
this._debug('setTimeout ms=%d unref=%s', ms, unref) |
|
this._clearTimeout() |
|
this._timeoutMs = ms |
|
this._timeoutUnref = !!unref |
|
this._updateTimeout() |
|
} |
|
|
|
Wire.prototype.destroy = function () { |
|
if (this.destroyed) return |
|
this.destroyed = true |
|
this._debug('destroy') |
|
this.emit('close') |
|
this.end() |
|
} |
|
|
|
Wire.prototype.end = function () { |
|
this._debug('end') |
|
this._onUninterested() |
|
this._onChoke() |
|
stream.Duplex.prototype.end.apply(this, arguments) |
|
} |
|
|
|
/** |
|
* Use the specified protocol extension. |
|
* @param {function} Extension |
|
*/ |
|
Wire.prototype.use = function (Extension) { |
|
var name = Extension.prototype.name |
|
if (!name) { |
|
throw new Error('Extension class requires a "name" property on the prototype') |
|
} |
|
this._debug('use extension.name=%s', name) |
|
|
|
var ext = this._nextExt |
|
var handler = new Extension(this) |
|
|
|
function noop () {} |
|
|
|
if (typeof handler.onHandshake !== 'function') { |
|
handler.onHandshake = noop |
|
} |
|
if (typeof handler.onExtendedHandshake !== 'function') { |
|
handler.onExtendedHandshake = noop |
|
} |
|
if (typeof handler.onMessage !== 'function') { |
|
handler.onMessage = noop |
|
} |
|
|
|
this.extendedMapping[ext] = name |
|
this._ext[name] = handler |
|
this[name] = handler |
|
|
|
this._nextExt += 1 |
|
} |
|
|
|
// |
|
// OUTGOING MESSAGES |
|
// |
|
|
|
/** |
|
* Message "keep-alive": <len=0000> |
|
*/ |
|
Wire.prototype.keepAlive = function () { |
|
this._debug('keep-alive') |
|
this._push(MESSAGE_KEEP_ALIVE) |
|
} |
|
|
|
/** |
|
* Message: "handshake" <pstrlen><pstr><reserved><info_hash><peer_id> |
|
* @param {Buffer|string} infoHash (as Buffer or *hex* string) |
|
* @param {Buffer|string} peerId |
|
* @param {Object} extensions |
|
*/ |
|
Wire.prototype.handshake = function (infoHash, peerId, extensions) { |
|
var infoHashBuffer, peerIdBuffer |
|
if (typeof infoHash === 'string') { |
|
infoHashBuffer = Buffer.from(infoHash, 'hex') |
|
} else { |
|
infoHashBuffer = infoHash |
|
infoHash = infoHashBuffer.toString('hex') |
|
} |
|
if (typeof peerId === 'string') { |
|
peerIdBuffer = Buffer.from(peerId, 'hex') |
|
} else { |
|
peerIdBuffer = peerId |
|
peerId = peerIdBuffer.toString('hex') |
|
} |
|
|
|
if (infoHashBuffer.length !== 20 || peerIdBuffer.length !== 20) { |
|
throw new Error('infoHash and peerId MUST have length 20') |
|
} |
|
|
|
this._debug('handshake i=%s p=%s exts=%o', infoHash, peerId, extensions) |
|
|
|
var reserved = Buffer.from(MESSAGE_RESERVED) |
|
|
|
// enable extended message |
|
reserved[5] |= 0x10 |
|
|
|
if (extensions && extensions.dht) reserved[7] |= 1 |
|
|
|
this._push(Buffer.concat([MESSAGE_PROTOCOL, reserved, infoHashBuffer, peerIdBuffer])) |
|
this._handshakeSent = true |
|
|
|
if (this.peerExtensions.extended && !this._extendedHandshakeSent) { |
|
// Peer's handshake indicated support already |
|
// (incoming connection) |
|
this._sendExtendedHandshake() |
|
} |
|
} |
|
|
|
/* Peer supports BEP-0010, send extended handshake. |
|
* |
|
* This comes after the 'handshake' event to give the user a chance to populate |
|
* `this.extendedHandshake` and `this.extendedMapping` before the extended handshake |
|
* is sent to the remote peer. |
|
*/ |
|
Wire.prototype._sendExtendedHandshake = function () { |
|
// Create extended message object from registered extensions |
|
var msg = extend(this.extendedHandshake) |
|
msg.m = {} |
|
for (var ext in this.extendedMapping) { |
|
var name = this.extendedMapping[ext] |
|
msg.m[name] = Number(ext) |
|
} |
|
|
|
// Send extended handshake |
|
this.extended(0, bencode.encode(msg)) |
|
this._extendedHandshakeSent = true |
|
} |
|
|
|
/** |
|
* Message "choke": <len=0001><id=0> |
|
*/ |
|
Wire.prototype.choke = function () { |
|
if (this.amChoking) return |
|
this.amChoking = true |
|
this._debug('choke') |
|
while (this.peerRequests.length) { |
|
this.peerRequests.pop() |
|
} |
|
this._push(MESSAGE_CHOKE) |
|
} |
|
|
|
/** |
|
* Message "unchoke": <len=0001><id=1> |
|
*/ |
|
Wire.prototype.unchoke = function () { |
|
if (!this.amChoking) return |
|
this.amChoking = false |
|
this._debug('unchoke') |
|
this._push(MESSAGE_UNCHOKE) |
|
} |
|
|
|
/** |
|
* Message "interested": <len=0001><id=2> |
|
*/ |
|
Wire.prototype.interested = function () { |
|
if (this.amInterested) return |
|
this.amInterested = true |
|
this._debug('interested') |
|
this._push(MESSAGE_INTERESTED) |
|
} |
|
|
|
/** |
|
* Message "uninterested": <len=0001><id=3> |
|
*/ |
|
Wire.prototype.uninterested = function () { |
|
if (!this.amInterested) return |
|
this.amInterested = false |
|
this._debug('uninterested') |
|
this._push(MESSAGE_UNINTERESTED) |
|
} |
|
|
|
/** |
|
* Message "have": <len=0005><id=4><piece index> |
|
* @param {number} index |
|
*/ |
|
Wire.prototype.have = function (index) { |
|
this._debug('have %d', index) |
|
this._message(4, [index], null) |
|
} |
|
|
|
/** |
|
* Message "bitfield": <len=0001+X><id=5><bitfield> |
|
* @param {BitField|Buffer} bitfield |
|
*/ |
|
Wire.prototype.bitfield = function (bitfield) { |
|
this._debug('bitfield') |
|
if (!Buffer.isBuffer(bitfield)) bitfield = bitfield.buffer |
|
this._message(5, [], bitfield) |
|
} |
|
|
|
/** |
|
* Message "request": <len=0013><id=6><index><begin><length> |
|
* @param {number} index |
|
* @param {number} offset |
|
* @param {number} length |
|
* @param {function} cb |
|
*/ |
|
Wire.prototype.request = function (index, offset, length, cb) { |
|
if (!cb) cb = function () {} |
|
if (this._finished) return cb(new Error('wire is closed')) |
|
if (this.peerChoking) return cb(new Error('peer is choking')) |
|
|
|
this._debug('request index=%d offset=%d length=%d', index, offset, length) |
|
|
|
this.requests.push(new Request(index, offset, length, cb)) |
|
this._updateTimeout() |
|
this._message(6, [index, offset, length], null) |
|
} |
|
|
|
/** |
|
* Message "piece": <len=0009+X><id=7><index><begin><block> |
|
* @param {number} index |
|
* @param {number} offset |
|
* @param {Buffer} buffer |
|
*/ |
|
Wire.prototype.piece = function (index, offset, buffer) { |
|
this._debug('piece index=%d offset=%d', index, offset) |
|
this.uploaded += buffer.length |
|
this.uploadSpeed(buffer.length) |
|
this.emit('upload', buffer.length) |
|
this._message(7, [index, offset], buffer) |
|
} |
|
|
|
/** |
|
* Message "cancel": <len=0013><id=8><index><begin><length> |
|
* @param {number} index |
|
* @param {number} offset |
|
* @param {number} length |
|
*/ |
|
Wire.prototype.cancel = function (index, offset, length) { |
|
this._debug('cancel index=%d offset=%d length=%d', index, offset, length) |
|
this._callback( |
|
pull(this.requests, index, offset, length), |
|
new Error('request was cancelled'), |
|
null |
|
) |
|
this._message(8, [index, offset, length], null) |
|
} |
|
|
|
/** |
|
* Message: "port" <len=0003><id=9><listen-port> |
|
* @param {Number} port |
|
*/ |
|
Wire.prototype.port = function (port) { |
|
this._debug('port %d', port) |
|
var message = Buffer.from(MESSAGE_PORT) |
|
message.writeUInt16BE(port, 5) |
|
this._push(message) |
|
} |
|
|
|
/** |
|
* Message: "extended" <len=0005+X><id=20><ext-number><payload> |
|
* @param {number|string} ext |
|
* @param {Object} obj |
|
*/ |
|
Wire.prototype.extended = function (ext, obj) { |
|
this._debug('extended ext=%s', ext) |
|
if (typeof ext === 'string' && this.peerExtendedMapping[ext]) { |
|
ext = this.peerExtendedMapping[ext] |
|
} |
|
if (typeof ext === 'number') { |
|
var extId = Buffer.from([ext]) |
|
var buf = Buffer.isBuffer(obj) ? obj : bencode.encode(obj) |
|
|
|
this._message(20, [], Buffer.concat([extId, buf])) |
|
} else { |
|
throw new Error('Unrecognized extension: ' + ext) |
|
} |
|
} |
|
|
|
/** |
|
* Duplex stream method. Called whenever the remote peer stream wants data. No-op |
|
* since we'll just push data whenever we get it. |
|
*/ |
|
Wire.prototype._read = function () {} |
|
|
|
/** |
|
* Send a message to the remote peer. |
|
*/ |
|
Wire.prototype._message = function (id, numbers, data) { |
|
var dataLength = data ? data.length : 0 |
|
var buffer = Buffer.allocUnsafe(5 + (4 * numbers.length)) |
|
|
|
buffer.writeUInt32BE(buffer.length + dataLength - 4, 0) |
|
buffer[4] = id |
|
for (var i = 0; i < numbers.length; i++) { |
|
buffer.writeUInt32BE(numbers[i], 5 + (4 * i)) |
|
} |
|
|
|
this._push(buffer) |
|
if (data) this._push(data) |
|
} |
|
|
|
Wire.prototype._push = function (data) { |
|
if (this._finished) return |
|
return this.push(data) |
|
} |
|
|
|
// |
|
// INCOMING MESSAGES |
|
// |
|
|
|
Wire.prototype._onKeepAlive = function () { |
|
this._debug('got keep-alive') |
|
this.emit('keep-alive') |
|
} |
|
|
|
Wire.prototype._onHandshake = function (infoHashBuffer, peerIdBuffer, extensions) { |
|
var infoHash = infoHashBuffer.toString('hex') |
|
var peerId = peerIdBuffer.toString('hex') |
|
|
|
this._debug('got handshake i=%s p=%s exts=%o', infoHash, peerId, extensions) |
|
|
|
this.peerId = peerId |
|
this.peerIdBuffer = peerIdBuffer |
|
this.peerExtensions = extensions |
|
|
|
this.emit('handshake', infoHash, peerId, extensions) |
|
|
|
var name |
|
for (name in this._ext) { |
|
this._ext[name].onHandshake(infoHash, peerId, extensions) |
|
} |
|
|
|
if (extensions.extended && this._handshakeSent && |
|
!this._extendedHandshakeSent) { |
|
// outgoing connection |
|
this._sendExtendedHandshake() |
|
} |
|
} |
|
|
|
Wire.prototype._onChoke = function () { |
|
this.peerChoking = true |
|
this._debug('got choke') |
|
this.emit('choke') |
|
while (this.requests.length) { |
|
this._callback(this.requests.pop(), new Error('peer is choking'), null) |
|
} |
|
} |
|
|
|
Wire.prototype._onUnchoke = function () { |
|
this.peerChoking = false |
|
this._debug('got unchoke') |
|
this.emit('unchoke') |
|
} |
|
|
|
Wire.prototype._onInterested = function () { |
|
this.peerInterested = true |
|
this._debug('got interested') |
|
this.emit('interested') |
|
} |
|
|
|
Wire.prototype._onUninterested = function () { |
|
this.peerInterested = false |
|
this._debug('got uninterested') |
|
this.emit('uninterested') |
|
} |
|
|
|
Wire.prototype._onHave = function (index) { |
|
if (this.peerPieces.get(index)) return |
|
this._debug('got have %d', index) |
|
|
|
this.peerPieces.set(index, true) |
|
this.emit('have', index) |
|
} |
|
|
|
Wire.prototype._onBitField = function (buffer) { |
|
this.peerPieces = new BitField(buffer) |
|
this._debug('got bitfield') |
|
this.emit('bitfield', this.peerPieces) |
|
} |
|
|
|
Wire.prototype._onRequest = function (index, offset, length) { |
|
var self = this |
|
if (self.amChoking) return |
|
self._debug('got request index=%d offset=%d length=%d', index, offset, length) |
|
|
|
var respond = function (err, buffer) { |
|
if (request !== pull(self.peerRequests, index, offset, length)) return |
|
if (err) return self._debug('error satisfying request index=%d offset=%d length=%d (%s)', index, offset, length, err.message) |
|
self.piece(index, offset, buffer) |
|
} |
|
|
|
var request = new Request(index, offset, length, respond) |
|
self.peerRequests.push(request) |
|
self.emit('request', index, offset, length, respond) |
|
} |
|
|
|
Wire.prototype._onPiece = function (index, offset, buffer) { |
|
this._debug('got piece index=%d offset=%d', index, offset) |
|
this._callback(pull(this.requests, index, offset, buffer.length), null, buffer) |
|
this.downloaded += buffer.length |
|
this.downloadSpeed(buffer.length) |
|
this.emit('download', buffer.length) |
|
this.emit('piece', index, offset, buffer) |
|
} |
|
|
|
Wire.prototype._onCancel = function (index, offset, length) { |
|
this._debug('got cancel index=%d offset=%d length=%d', index, offset, length) |
|
pull(this.peerRequests, index, offset, length) |
|
this.emit('cancel', index, offset, length) |
|
} |
|
|
|
Wire.prototype._onPort = function (port) { |
|
this._debug('got port %d', port) |
|
this.emit('port', port) |
|
} |
|
|
|
Wire.prototype._onExtended = function (ext, buf) { |
|
if (ext === 0) { |
|
var info |
|
try { |
|
info = bencode.decode(buf) |
|
} catch (err) { |
|
this._debug('ignoring invalid extended handshake: %s', err.message || err) |
|
} |
|
|
|
if (!info) return |
|
this.peerExtendedHandshake = info |
|
|
|
var name |
|
if (typeof info.m === 'object') { |
|
for (name in info.m) { |
|
this.peerExtendedMapping[name] = Number(info.m[name].toString()) |
|
} |
|
} |
|
for (name in this._ext) { |
|
if (this.peerExtendedMapping[name]) { |
|
this._ext[name].onExtendedHandshake(this.peerExtendedHandshake) |
|
} |
|
} |
|
this._debug('got extended handshake') |
|
this.emit('extended', 'handshake', this.peerExtendedHandshake) |
|
} else { |
|
if (this.extendedMapping[ext]) { |
|
ext = this.extendedMapping[ext] // friendly name for extension |
|
if (this._ext[ext]) { |
|
// there is an registered extension handler, so call it |
|
this._ext[ext].onMessage(buf) |
|
} |
|
} |
|
this._debug('got extended message ext=%s', ext) |
|
this.emit('extended', ext, buf) |
|
} |
|
} |
|
|
|
Wire.prototype._onTimeout = function () { |
|
this._debug('request timed out') |
|
this._callback(this.requests.shift(), new Error('request has timed out'), null) |
|
this.emit('timeout') |
|
} |
|
|
|
/** |
|
* Duplex stream method. Called whenever the remote peer has data for us. Data that the |
|
* remote peer sends gets buffered (i.e. not actually processed) until the right number |
|
* of bytes have arrived, determined by the last call to `this._parse(number, callback)`. |
|
* Once enough bytes have arrived to process the message, the callback function |
|
* (i.e. `this._parser`) gets called with the full buffer of data. |
|
* @param {Buffer} data |
|
* @param {string} encoding |
|
* @param {function} cb |
|
*/ |
|
Wire.prototype._write = function (data, encoding, cb) { |
|
this._bufferSize += data.length |
|
this._buffer.push(data) |
|
|
|
while (this._bufferSize >= this._parserSize) { |
|
var buffer = (this._buffer.length === 1) |
|
? this._buffer[0] |
|
: Buffer.concat(this._buffer) |
|
this._bufferSize -= this._parserSize |
|
this._buffer = this._bufferSize |
|
? [buffer.slice(this._parserSize)] |
|
: [] |
|
this._parser(buffer.slice(0, this._parserSize)) |
|
} |
|
|
|
cb(null) // Signal that we're ready for more data |
|
} |
|
|
|
Wire.prototype._callback = function (request, err, buffer) { |
|
if (!request) return |
|
|
|
this._clearTimeout() |
|
|
|
if (!this.peerChoking && !this._finished) this._updateTimeout() |
|
request.callback(err, buffer) |
|
} |
|
|
|
Wire.prototype._clearTimeout = function () { |
|
if (!this._timeout) return |
|
|
|
clearTimeout(this._timeout) |
|
this._timeout = null |
|
} |
|
|
|
Wire.prototype._updateTimeout = function () { |
|
var self = this |
|
if (!self._timeoutMs || !self.requests.length || self._timeout) return |
|
|
|
self._timeout = setTimeout(function () { |
|
self._onTimeout() |
|
}, self._timeoutMs) |
|
if (self._timeoutUnref && self._timeout.unref) self._timeout.unref() |
|
} |
|
|
|
/** |
|
* Takes a number of bytes that the local peer is waiting to receive from the remote peer |
|
* in order to parse a complete message, and a callback function to be called once enough |
|
* bytes have arrived. |
|
* @param {number} size |
|
* @param {function} parser |
|
*/ |
|
Wire.prototype._parse = function (size, parser) { |
|
this._parserSize = size |
|
this._parser = parser |
|
} |
|
|
|
/** |
|
* Handle the first 4 bytes of a message, to determine the length of bytes that must be |
|
* waited for in order to have the whole message. |
|
* @param {Buffer} buffer |
|
*/ |
|
Wire.prototype._onMessageLength = function (buffer) { |
|
var length = buffer.readUInt32BE(0) |
|
if (length > 0) { |
|
this._parse(length, this._onMessage) |
|
} else { |
|
this._onKeepAlive() |
|
this._parse(4, this._onMessageLength) |
|
} |
|
} |
|
|
|
/** |
|
* Handle a message from the remote peer. |
|
* @param {Buffer} buffer |
|
*/ |
|
Wire.prototype._onMessage = function (buffer) { |
|
this._parse(4, this._onMessageLength) |
|
switch (buffer[0]) { |
|
case 0: |
|
return this._onChoke() |
|
case 1: |
|
return this._onUnchoke() |
|
case 2: |
|
return this._onInterested() |
|
case 3: |
|
return this._onUninterested() |
|
case 4: |
|
return this._onHave(buffer.readUInt32BE(1)) |
|
case 5: |
|
return this._onBitField(buffer.slice(1)) |
|
case 6: |
|
return this._onRequest(buffer.readUInt32BE(1), |
|
buffer.readUInt32BE(5), buffer.readUInt32BE(9)) |
|
case 7: |
|
return this._onPiece(buffer.readUInt32BE(1), |
|
buffer.readUInt32BE(5), buffer.slice(9)) |
|
case 8: |
|
return this._onCancel(buffer.readUInt32BE(1), |
|
buffer.readUInt32BE(5), buffer.readUInt32BE(9)) |
|
case 9: |
|
return this._onPort(buffer.readUInt16BE(1)) |
|
case 20: |
|
return this._onExtended(buffer.readUInt8(1), buffer.slice(2)) |
|
default: |
|
this._debug('got unknown message') |
|
return this.emit('unknownmessage', buffer) |
|
} |
|
} |
|
|
|
Wire.prototype._parseHandshake = function () { |
|
var self = this |
|
self._parse(1, function (buffer) { |
|
var pstrlen = buffer.readUInt8(0) |
|
self._parse(pstrlen + 48, function (handshake) { |
|
var protocol = handshake.slice(0, pstrlen) |
|
if (protocol.toString() !== 'BitTorrent protocol') { |
|
self._debug('Error: wire not speaking BitTorrent protocol (%s)', protocol.toString()) |
|
self.end() |
|
return |
|
} |
|
handshake = handshake.slice(pstrlen) |
|
self._onHandshake(handshake.slice(8, 28), handshake.slice(28, 48), { |
|
dht: !!(handshake[7] & 0x01), // see bep_0005 |
|
extended: !!(handshake[5] & 0x10) // see bep_0010 |
|
}) |
|
self._parse(4, self._onMessageLength) |
|
}) |
|
}) |
|
} |
|
|
|
Wire.prototype._onFinish = function () { |
|
this._finished = true |
|
|
|
this.push(null) // stream cannot be half open, so signal the end of it |
|
while (this.read()) {} // consume and discard the rest of the stream data |
|
|
|
clearInterval(this._keepAliveInterval) |
|
this._parse(Number.MAX_VALUE, function () {}) |
|
while (this.peerRequests.length) { |
|
this.peerRequests.pop() |
|
} |
|
while (this.requests.length) { |
|
this._callback(this.requests.pop(), new Error('wire was closed'), null) |
|
} |
|
} |
|
|
|
Wire.prototype._debug = function () { |
|
var args = [].slice.call(arguments) |
|
args[0] = '[' + this._debugId + '] ' + args[0] |
|
debug.apply(null, args) |
|
} |
|
|
|
function pull (requests, piece, offset, length) { |
|
for (var i = 0; i < requests.length; i++) { |
|
var req = requests[i] |
|
if (req.piece === piece && req.offset === offset && req.length === length) { |
|
arrayRemove(requests, i) |
|
return req |
|
} |
|
} |
|
return null |
|
} |
|
|
|
},{"bencode":5,"bitfield":6,"debug":9,"inherits":11,"randombytes":15,"readable-stream":24,"safe-buffer":25,"speedometer":26,"unordered-array-remove":28,"xtend":30}],8:[function(require,module,exports){ |
|
(function (Buffer){ |
|
// Copyright Joyent, Inc. and other Node contributors. |
|
// |
|
// Permission is hereby granted, free of charge, to any person obtaining a |
|
// copy of this software and associated documentation files (the |
|
// "Software"), to deal in the Software without restriction, including |
|
// without limitation the rights to use, copy, modify, merge, publish, |
|
// distribute, sublicense, and/or sell copies of the Software, and to permit |
|
// persons to whom the Software is furnished to do so, subject to the |
|
// following conditions: |
|
// |
|
// The above copyright notice and this permission notice shall be included |
|
// in all copies or substantial portions of the Software. |
|
// |
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
|
// USE OR OTHER DEALINGS IN THE SOFTWARE. |
|
|
|
// NOTE: These type checking functions intentionally don't use `instanceof` |
|
// because it is fragile and can be easily faked with `Object.create()`. |
|
|
|
function isArray(arg) { |
|
if (Array.isArray) { |
|
return Array.isArray(arg); |
|
} |
|
return objectToString(arg) === '[object Array]'; |
|
} |
|
exports.isArray = isArray; |
|
|
|
function isBoolean(arg) { |
|
return typeof arg === 'boolean'; |
|
} |
|
exports.isBoolean = isBoolean; |
|
|
|
function isNull(arg) { |
|
return arg === null; |
|
} |
|
exports.isNull = isNull; |
|
|
|
function isNullOrUndefined(arg) { |
|
return arg == null; |
|
} |
|
exports.isNullOrUndefined = isNullOrUndefined; |
|
|
|
function isNumber(arg) { |
|
return typeof arg === 'number'; |
|
} |
|
exports.isNumber = isNumber; |
|
|
|
function isString(arg) { |
|
return typeof arg === 'string'; |
|
} |
|
exports.isString = isString; |
|
|
|
function isSymbol(arg) { |
|
return typeof arg === 'symbol'; |
|
} |
|
exports.isSymbol = isSymbol; |
|
|
|
function isUndefined(arg) { |
|
return arg === void 0; |
|
} |
|
exports.isUndefined = isUndefined; |
|
|
|
function isRegExp(re) { |
|
return objectToString(re) === '[object RegExp]'; |
|
} |
|
exports.isRegExp = isRegExp; |
|
|
|
function isObject(arg) { |
|
return typeof arg === 'object' && arg !== null; |
|
} |
|
exports.isObject = isObject; |
|
|
|
function isDate(d) { |
|
return objectToString(d) === '[object Date]'; |
|
} |
|
exports.isDate = isDate; |
|
|
|
function isError(e) { |
|
return (objectToString(e) === '[object Error]' || e instanceof Error); |
|
} |
|
exports.isError = isError; |
|
|
|
function isFunction(arg) { |
|
return typeof arg === 'function'; |
|
} |
|
exports.isFunction = isFunction; |
|
|
|
function isPrimitive(arg) { |
|
return arg === null || |
|
typeof arg === 'boolean' || |
|
typeof arg === 'number' || |
|
typeof arg === 'string' || |
|
typeof arg === 'symbol' || // ES6 symbol |
|
typeof arg === 'undefined'; |
|
} |
|
exports.isPrimitive = isPrimitive; |
|
|
|
exports.isBuffer = Buffer.isBuffer; |
|
|
|
function objectToString(o) { |
|
return Object.prototype.toString.call(o); |
|
} |
|
|
|
}).call(this,{"isBuffer":require("../../../../../mnt/storage/contents/local/node/node-v6.9.2-linux-x64/lib/node_modules/browserify/node_modules/is-buffer/index.js")}) |
|
},{"../../../../../mnt/storage/contents/local/node/node-v6.9.2-linux-x64/lib/node_modules/browserify/node_modules/is-buffer/index.js":38}],9:[function(require,module,exports){ |
|
(function (process){ |
|
/** |
|
* This is the web browser implementation of `debug()`. |
|
* |
|
* Expose `debug()` as the module. |
|
*/ |
|
|
|
exports = module.exports = require('./debug'); |
|
exports.log = log; |
|
exports.formatArgs = formatArgs; |
|
exports.save = save; |
|
exports.load = load; |
|
exports.useColors = useColors; |
|
exports.storage = 'undefined' != typeof chrome |
|
&& 'undefined' != typeof chrome.storage |
|
? chrome.storage.local |
|
: localstorage(); |
|
|
|
/** |
|
* Colors. |
|
*/ |
|
|
|
exports.colors = [ |
|
'lightseagreen', |
|
'forestgreen', |
|
'goldenrod', |
|
'dodgerblue', |
|
'darkorchid', |
|
'crimson' |
|
]; |
|
|
|
/** |
|
* Currently only WebKit-based Web Inspectors, Firefox >= v31, |
|
* and the Firebug extension (any Firefox version) are known |
|
* to support "%c" CSS customizations. |
|
* |
|
* TODO: add a `localStorage` variable to explicitly enable/disable colors |
|
*/ |
|
|
|
function useColors() { |
|
// NB: In an Electron preload script, document will be defined but not fully |
|
// initialized. Since we know we're in Chrome, we'll just detect this case |
|
// explicitly |
|
if (typeof window !== 'undefined' && window.process && window.process.type === 'renderer') { |
|
return true; |
|
} |
|
|
|
// is webkit? http://stackoverflow.com/a/16459606/376773 |
|
// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 |
|
return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || |
|
// is firebug? http://stackoverflow.com/a/398120/376773 |
|
(typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || |
|
// is firefox >= v31? |
|
// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages |
|
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || |
|
// double check webkit in userAgent just in case we are in a worker |
|
(typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); |
|
} |
|
|
|
/** |
|
* Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. |
|
*/ |
|
|
|
exports.formatters.j = function(v) { |
|
try { |
|
return JSON.stringify(v); |
|
} catch (err) { |
|
return '[UnexpectedJSONParseError]: ' + err.message; |
|
} |
|
}; |
|
|
|
|
|
/** |
|
* Colorize log arguments if enabled. |
|
* |
|
* @api public |
|
*/ |
|
|
|
function formatArgs(args) { |
|
var useColors = this.useColors; |
|
|
|
args[0] = (useColors ? '%c' : '') |
|
+ this.namespace |
|
+ (useColors ? ' %c' : ' ') |
|
+ args[0] |
|
+ (useColors ? '%c ' : ' ') |
|
+ '+' + exports.humanize(this.diff); |
|
|
|
if (!useColors) return; |
|
|
|
var c = 'color: ' + this.color; |
|
args.splice(1, 0, c, 'color: inherit') |
|
|
|
// the final "%c" is somewhat tricky, because there could be other |
|
// arguments passed either before or after the %c, so we need to |
|
// figure out the correct index to insert the CSS into |
|
var index = 0; |
|
var lastC = 0; |
|
args[0].replace(/%[a-zA-Z%]/g, function(match) { |
|
if ('%%' === match) return; |
|
index++; |
|
if ('%c' === match) { |
|
// we only are interested in the *last* %c |
|
// (the user may have provided their own) |
|
lastC = index; |
|
} |
|
}); |
|
|
|
args.splice(lastC, 0, c); |
|
} |
|
|
|
/** |
|
* Invokes `console.log()` when available. |
|
* No-op when `console.log` is not a "function". |
|
* |
|
* @api public |
|
*/ |
|
|
|
function log() { |
|
// this hackery is required for IE8/9, where |
|
// the `console.log` function doesn't have 'apply' |
|
return 'object' === typeof console |
|
&& console.log |
|
&& Function.prototype.apply.call(console.log, console, arguments); |
|
} |
|
|
|
/** |
|
* Save `namespaces`. |
|
* |
|
* @param {String} namespaces |
|
* @api private |
|
*/ |
|
|
|
function save(namespaces) { |
|
try { |
|
if (null == namespaces) { |
|
exports.storage.removeItem('debug'); |
|
} else { |
|
exports.storage.debug = namespaces; |
|
} |
|
} catch(e) {} |
|
} |
|
|
|
/** |
|
* Load `namespaces`. |
|
* |
|
* @return {String} returns the previously persisted debug modes |
|
* @api private |
|
*/ |
|
|
|
function load() { |
|
var r; |
|
try { |
|
r = exports.storage.debug; |
|
} catch(e) {} |
|
|
|
// If debug isn't set in LS, and we're in Electron, try to load $DEBUG |
|
if (!r && typeof process !== 'undefined' && 'env' in process) { |
|
r = process.env.DEBUG; |
|
} |
|
|
|
return r; |
|
} |
|
|
|
/** |
|
* Enable namespaces listed in `localStorage.debug` initially. |
|
*/ |
|
|
|
exports.enable(load()); |
|
|
|
/** |
|
* Localstorage attempts to return the localstorage. |
|
* |
|
* This is necessary because safari throws |
|
* when a user disables cookies/localstorage |
|
* and you attempt to access it. |
|
* |
|
* @return {LocalStorage} |
|
* @api private |
|
*/ |
|
|
|
function localstorage() { |
|
try { |
|
return window.localStorage; |
|
} catch (e) {} |
|
} |
|
|
|
}).call(this,require('_process')) |
|
},{"./debug":10,"_process":41}],10:[function(require,module,exports){ |
|
|
|
/** |
|
* This is the common logic for both the Node.js and web browser |
|
* implementations of `debug()`. |
|
* |
|
* Expose `debug()` as the module. |
|
*/ |
|
|
|
exports = module.exports = createDebug.debug = createDebug['default'] = createDebug; |
|
exports.coerce = coerce; |
|
exports.disable = disable; |
|
exports.enable = enable; |
|
exports.enabled = enabled; |
|
exports.humanize = require('ms'); |
|
|
|
/** |
|
* The currently active debug mode names, and names to skip. |
|
*/ |
|
|
|
exports.names = []; |
|
exports.skips = []; |
|
|
|
/** |
|
* Map of special "%n" handling functions, for the debug "format" argument. |
|
* |
|
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". |
|
*/ |
|
|
|
exports.formatters = {}; |
|
|
|
/** |
|
* Previous log timestamp. |
|
*/ |
|
|
|
var prevTime; |
|
|
|
/** |
|
* Select a color. |
|
* @param {String} namespace |
|
* @return {Number} |
|
* @api private |
|
*/ |
|
|
|
function selectColor(namespace) { |
|
var hash = 0, i; |
|
|
|
for (i in namespace) { |
|
hash = ((hash << 5) - hash) + namespace.charCodeAt(i); |
|
hash |= 0; // Convert to 32bit integer |
|
} |
|
|
|
return exports.colors[Math.abs(hash) % exports.colors.length]; |
|
} |
|
|
|
/** |
|
* Create a debugger with the given `namespace`. |
|
* |
|
* @param {String} namespace |
|
* @return {Function} |
|
* @api public |
|
*/ |
|
|
|
function createDebug(namespace) { |
|
|
|
function debug() { |
|
// disabled? |
|
if (!debug.enabled) return; |
|
|
|
var self = debug; |
|
|
|
// set `diff` timestamp |
|
var curr = +new Date(); |
|
var ms = curr - (prevTime || curr); |
|
self.diff = ms; |
|
self.prev = prevTime; |
|
self.curr = curr; |
|
prevTime = curr; |
|
|
|
// turn the `arguments` into a proper Array |
|
var args = new Array(arguments.length); |
|
for (var i = 0; i < args.length; i++) { |
|
args[i] = arguments[i]; |
|
} |
|
|
|
args[0] = exports.coerce(args[0]); |
|
|
|
if ('string' !== typeof args[0]) { |
|
// anything else let's inspect with %O |
|
args.unshift('%O'); |
|
} |
|
|
|
// apply any `formatters` transformations |
|
var index = 0; |
|
args[0] = args[0].replace(/%([a-zA-Z%])/g, function(match, format) { |
|
// if we encounter an escaped % then don't increase the array index |
|
if (match === '%%') return match; |
|
index++; |
|
var formatter = exports.formatters[format]; |
|
if ('function' === typeof formatter) { |
|
var val = args[index]; |
|
match = formatter.call(self, val); |
|
|
|
// now we need to remove `args[index]` since it's inlined in the `format` |
|
args.splice(index, 1); |
|
index--; |
|
} |
|
return match; |
|
}); |
|
|
|
// apply env-specific formatting (colors, etc.) |
|
exports.formatArgs.call(self, args); |
|
|
|
var logFn = debug.log || exports.log || console.log.bind(console); |
|
logFn.apply(self, args); |
|
} |
|
|
|
debug.namespace = namespace; |
|
debug.enabled = exports.enabled(namespace); |
|
debug.useColors = exports.useColors(); |
|
debug.color = selectColor(namespace); |
|
|
|
// env-specific initialization logic for debug instances |
|
if ('function' === typeof exports.init) { |
|
exports.init(debug); |
|
} |
|
|
|
return debug; |
|
} |
|
|
|
/** |
|
* Enables a debug mode by namespaces. This can include modes |
|
* separated by a colon and wildcards. |
|
* |
|
* @param {String} namespaces |
|
* @api public |
|
*/ |
|
|
|
function enable(namespaces) { |
|
exports.save(namespaces); |
|
|
|
exports.names = []; |
|
exports.skips = []; |
|
|
|
var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); |
|
var len = split.length; |
|
|
|
for (var i = 0; i < len; i++) { |
|
if (!split[i]) continue; // ignore empty strings |
|
namespaces = split[i].replace(/\*/g, '.*?'); |
|
if (namespaces[0] === '-') { |
|
exports.skips.push(new RegExp('^' + namespaces.substr(1) + '$')); |
|
} else { |
|
exports.names.push(new RegExp('^' + namespaces + '$')); |
|
} |
|
} |
|
} |
|
|
|
/** |
|
* Disable debug output. |
|
* |
|
* @api public |
|
*/ |
|
|
|
function disable() { |
|
exports.enable(''); |
|
} |
|
|
|
/** |
|
* Returns true if the given mode name is enabled, false otherwise. |
|
* |
|
* @param {String} name |
|
* @return {Boolean} |
|
* @api public |
|
*/ |
|
|
|
function enabled(name) { |
|
var i, len; |
|
for (i = 0, len = exports.skips.length; i < len; i++) { |
|
if (exports.skips[i].test(name)) { |
|
return false; |
|
} |
|
} |
|
for (i = 0, len = exports.names.length; i < len; i++) { |
|
if (exports.names[i].test(name)) { |
|
return true; |
|
} |
|
} |
|
return false; |
|
} |
|
|
|
/** |
|
* Coerce `val`. |
|
* |
|
* @param {Mixed} val |
|
* @return {Mixed} |
|
* @api private |
|
*/ |
|
|
|
function coerce(val) { |
|
if (val instanceof Error) return val.stack || val.message; |
|
return val; |
|
} |
|
|
|
},{"ms":13}],11:[function(require,module,exports){ |
|
if (typeof Object.create === 'function') { |
|
// implementation from standard node.js 'util' module |
|
module.exports = function inherits(ctor, superCtor) { |
|
ctor.super_ = superCtor |
|
ctor.prototype = Object.create(superCtor.prototype, { |
|
constructor: { |
|
value: ctor, |
|
enumerable: false, |
|
writable: true, |
|
configurable: true |
|
} |
|
}); |
|
}; |
|
} else { |
|
// old school shim for old browsers |
|
module.exports = function inherits(ctor, superCtor) { |
|
ctor.super_ = superCtor |
|
var TempCtor = function () {} |
|
TempCtor.prototype = superCtor.prototype |
|
ctor.prototype = new TempCtor() |
|
ctor.prototype.constructor = ctor |
|
} |
|
} |
|
|
|
},{}],12:[function(require,module,exports){ |
|
var toString = {}.toString; |
|
|
|
module.exports = Array.isArray || function (arr) { |
|
return toString.call(arr) == '[object Array]'; |
|
}; |
|
|
|
},{}],13:[function(require,module,exports){ |
|
/** |
|
* Helpers. |
|
*/ |
|
|
|
var s = 1000; |
|
var m = s * 60; |
|
var h = m * 60; |
|
var d = h * 24; |
|
var y = d * 365.25; |
|
|
|
/** |
|
* Parse or format the given `val`. |
|
* |
|
* Options: |
|
* |
|
* - `long` verbose formatting [false] |
|
* |
|
* @param {String|Number} val |
|
* @param {Object} [options] |
|
* @throws {Error} throw an error if val is not a non-empty string or a number |
|
* @return {String|Number} |
|
* @api public |
|
*/ |
|
|
|
module.exports = function(val, options) { |
|
options = options || {}; |
|
var type = typeof val; |
|
if (type === 'string' && val.length > 0) { |
|
return parse(val); |
|
} else if (type === 'number' && isNaN(val) === false) { |
|
return options.long ? fmtLong(val) : fmtShort(val); |
|
} |
|
throw new Error( |
|
'val is not a non-empty string or a valid number. val=' + |
|
JSON.stringify(val) |
|
); |
|
}; |
|
|
|
/** |
|
* Parse the given `str` and return milliseconds. |
|
* |
|
* @param {String} str |
|
* @return {Number} |
|
* @api private |
|
*/ |
|
|
|
function parse(str) { |
|
str = String(str); |
|
if (str.length > 100) { |
|
return; |
|
} |
|
var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec( |
|
str |
|
); |
|
if (!match) { |
|
return; |
|
} |
|
var n = parseFloat(match[1]); |
|
var type = (match[2] || 'ms').toLowerCase(); |
|
switch (type) { |
|
case 'years': |
|
case 'year': |
|
case 'yrs': |
|
case 'yr': |
|
case 'y': |
|
return n * y; |
|
case 'days': |
|
case 'day': |
|
case 'd': |
|
return n * d; |
|
case 'hours': |
|
case 'hour': |
|
case 'hrs': |
|
case 'hr': |
|
case 'h': |
|
return n * h; |
|
case 'minutes': |
|
case 'minute': |
|
case 'mins': |
|
case 'min': |
|
case 'm': |
|
return n * m; |
|
case 'seconds': |
|
case 'second': |
|
case 'secs': |
|
case 'sec': |
|
case 's': |
|
return n * s; |
|
case 'milliseconds': |
|
case 'millisecond': |
|
case 'msecs': |
|
case 'msec': |
|
case 'ms': |
|
return n; |
|
default: |
|
return undefined; |
|
} |
|
} |
|
|
|
/** |
|
* Short format for `ms`. |
|
* |
|
* @param {Number} ms |
|
* @return {String} |
|
* @api private |
|
*/ |
|
|
|
function fmtShort(ms) { |
|
if (ms >= d) { |
|
return Math.round(ms / d) + 'd'; |
|
} |
|
if (ms >= h) { |
|
return Math.round(ms / h) + 'h'; |
|
} |
|
if (ms >= m) { |
|
return Math.round(ms / m) + 'm'; |
|
} |
|
if (ms >= s) { |
|
return Math.round(ms / s) + 's'; |
|
} |
|
return ms + 'ms'; |
|
} |
|
|
|
/** |
|
* Long format for `ms`. |
|
* |
|
* @param {Number} ms |
|
* @return {String} |
|
* @api private |
|
*/ |
|
|
|
function fmtLong(ms) { |
|
return plural(ms, d, 'day') || |
|
plural(ms, h, 'hour') || |
|
plural(ms, m, 'minute') || |
|
plural(ms, s, 'second') || |
|
ms + ' ms'; |
|
} |
|
|
|
/** |
|
* Pluralization helper. |
|
*/ |
|
|
|
function plural(ms, n, name) { |
|
if (ms < n) { |
|
return; |
|
} |
|
if (ms < n * 1.5) { |
|
return Math.floor(ms / n) + ' ' + name; |
|
} |
|
return Math.ceil(ms / n) + ' ' + name + 's'; |
|
} |
|
|
|
},{}],14:[function(require,module,exports){ |
|
(function (process){ |
|
'use strict'; |
|
|
|
if (!process.version || |
|
process.version.indexOf('v0.') === 0 || |
|
process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { |
|
module.exports = nextTick; |
|
} else { |
|
module.exports = process.nextTick; |
|
} |
|
|
|
function nextTick(fn, arg1, arg2, arg3) { |
|
if (typeof fn !== 'function') { |
|
throw new TypeError('"callback" argument must be a function'); |
|
} |
|
var len = arguments.length; |
|
var args, i; |
|
switch (len) { |
|
case 0: |
|
case 1: |
|
return process.nextTick(fn); |
|
case 2: |
|
return process.nextTick(function afterTickOne() { |
|
fn.call(null, arg1); |
|
}); |
|
case 3: |
|
return process.nextTick(function afterTickTwo() { |
|
fn.call(null, arg1, arg2); |
|
}); |
|
case 4: |
|
return process.nextTick(function afterTickThree() { |
|
fn.call(null, arg1, arg2, arg3); |
|
}); |
|
default: |
|
args = new Array(len - 1); |
|
i = 0; |
|
while (i < args.length) { |
|
args[i++] = arguments[i]; |
|
} |
|
return process.nextTick(function afterTick() { |
|
fn.apply(null, args); |
|
}); |
|
} |
|
} |
|
|
|
}).call(this,require('_process')) |
|
},{"_process":41}],15:[function(require,module,exports){ |
|
(function (process,global){ |
|
'use strict' |
|
|
|
function oldBrowser () { |
|
throw new Error('secure random number generation not supported by this browser\nuse chrome, FireFox or Internet Explorer 11') |
|
} |
|
|
|
var Buffer = require('safe-buffer').Buffer |
|
var crypto = global.crypto || global.msCrypto |
|
|
|
if (crypto && crypto.getRandomValues) { |
|
module.exports = randomBytes |
|
} else { |
|
module.exports = oldBrowser |
|
} |
|
|
|
function randomBytes (size, cb) { |
|
// phantomjs needs to throw |
|
if (size > 65536) throw new Error('requested too many random bytes') |
|
// in case browserify isn't using the Uint8Array version |
|
var rawBytes = new global.Uint8Array(size) |
|
|
|
// This will not work in older browsers. |
|
// See https://developer.mozilla.org/en-US/docs/Web/API/window.crypto.getRandomValues |
|
if (size > 0) { // getRandomValues fails on IE if size == 0 |
|
crypto.getRandomValues(rawBytes) |
|
} |
|
|
|
// XXX: phantomjs doesn't like a buffer being passed here |
|
var bytes = Buffer.from(rawBytes.buffer) |
|
|
|
if (typeof cb === 'function') { |
|
return process.nextTick(function () { |
|
cb(null, bytes) |
|
}) |
|
} |
|
|
|
return bytes |
|
} |
|
|
|
}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) |
|
},{"_process":41,"safe-buffer":25}],16:[function(require,module,exports){ |
|
// Copyright Joyent, Inc. and other Node contributors. |
|
// |
|
// Permission is hereby granted, free of charge, to any person obtaining a |
|
// copy of this software and associated documentation files (the |
|
// "Software"), to deal in the Software without restriction, including |
|
// without limitation the rights to use, copy, modify, merge, publish, |
|
// distribute, sublicense, and/or sell copies of the Software, and to permit |
|
// persons to whom the Software is furnished to do so, subject to the |
|
// following conditions: |
|
// |
|
// The above copyright notice and this permission notice shall be included |
|
// in all copies or substantial portions of the Software. |
|
// |
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
|
// USE OR OTHER DEALINGS IN THE SOFTWARE. |
|
|
|
// a duplex stream is just a stream that is both readable and writable. |
|
// Since JS doesn't have multiple prototypal inheritance, this class |
|
// prototypally inherits from Readable, and then parasitically from |
|
// Writable. |
|
|
|
'use strict'; |
|
|
|
/*<replacement>*/ |
|
|
|
var processNextTick = require('process-nextick-args'); |
|
/*</replacement>*/ |
|
|
|
/*<replacement>*/ |
|
var objectKeys = Object.keys || function (obj) { |
|
var keys = []; |
|
for (var key in obj) { |
|
keys.push(key); |
|
}return keys; |
|
}; |
|
/*</replacement>*/ |
|
|
|
module.exports = Duplex; |
|
|
|
/*<replacement>*/ |
|
var util = require('core-util-is'); |
|
util.inherits = require('inherits'); |
|
/*</replacement>*/ |
|
|
|
var Readable = require('./_stream_readable'); |
|
var Writable = require('./_stream_writable'); |
|
|
|
util.inherits(Duplex, Readable); |
|
|
|
var keys = objectKeys(Writable.prototype); |
|
for (var v = 0; v < keys.length; v++) { |
|
var method = keys[v]; |
|
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; |
|
} |
|
|
|
function Duplex(options) { |
|
if (!(this instanceof Duplex)) return new Duplex(options); |
|
|
|
Readable.call(this, options); |
|
Writable.call(this, options); |
|
|
|
if (options && options.readable === false) this.readable = false; |
|
|
|
if (options && options.writable === false) this.writable = false; |
|
|
|
this.allowHalfOpen = true; |
|
if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; |
|
|
|
this.once('end', onend); |
|
} |
|
|
|
// the no-half-open enforcer |
|
function onend() { |
|
// if we allow half-open state, or if the writable side ended, |
|
// then we're ok. |
|
if (this.allowHalfOpen || this._writableState.ended) return; |
|
|
|
// no more data can be written. |
|
// But allow more writes to happen in this tick. |
|
processNextTick(onEndNT, this); |
|
} |
|
|
|
function onEndNT(self) { |
|
self.end(); |
|
} |
|
|
|
Object.defineProperty(Duplex.prototype, 'destroyed', { |
|
get: function () { |
|
if (this._readableState === undefined || this._writableState === undefined) { |
|
return false; |
|
} |
|
return this._readableState.destroyed && this._writableState.destroyed; |
|
}, |
|
set: function (value) { |
|
// we ignore the value if the stream |
|
// has not been initialized yet |
|
if (this._readableState === undefined || this._writableState === undefined) { |
|
return; |
|
} |
|
|
|
// backward compatibility, the user is explicitly |
|
// managing destroyed |
|
this._readableState.destroyed = value; |
|
this._writableState.destroyed = value; |
|
} |
|
}); |
|
|
|
Duplex.prototype._destroy = function (err, cb) { |
|
this.push(null); |
|
this.end(); |
|
|
|
processNextTick(cb, err); |
|
}; |
|
|
|
function forEach(xs, f) { |
|
for (var i = 0, l = xs.length; i < l; i++) { |
|
f(xs[i], i); |
|
} |
|
} |
|
},{"./_stream_readable":18,"./_stream_writable":20,"core-util-is":8,"inherits":11,"process-nextick-args":14}],17:[function(require,module,exports){ |
|
// Copyright Joyent, Inc. and other Node contributors. |
|
// |
|
// Permission is hereby granted, free of charge, to any person obtaining a |
|
// copy of this software and associated documentation files (the |
|
// "Software"), to deal in the Software without restriction, including |
|
// without limitation the rights to use, copy, modify, merge, publish, |
|
// distribute, sublicense, and/or sell copies of the Software, and to permit |
|
// persons to whom the Software is furnished to do so, subject to the |
|
// following conditions: |
|
// |
|
// The above copyright notice and this permission notice shall be included |
|
// in all copies or substantial portions of the Software. |
|
// |
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
|
// USE OR OTHER DEALINGS IN THE SOFTWARE. |
|
|
|
// a passthrough stream. |
|
// basically just the most minimal sort of Transform stream. |
|
// Every written chunk gets output as-is. |
|
|
|
'use strict'; |
|
|
|
module.exports = PassThrough; |
|
|
|
var Transform = require('./_stream_transform'); |
|
|
|
/*<replacement>*/ |
|
var util = require('core-util-is'); |
|
util.inherits = require('inherits'); |
|
/*</replacement>*/ |
|
|
|
util.inherits(PassThrough, Transform); |
|
|
|
function PassThrough(options) { |
|
if (!(this instanceof PassThrough)) return new PassThrough(options); |
|
|
|
Transform.call(this, options); |
|
} |
|
|
|
PassThrough.prototype._transform = function (chunk, encoding, cb) { |
|
cb(null, chunk); |
|
}; |
|
},{"./_stream_transform":19,"core-util-is":8,"inherits":11}],18:[function(require,module,exports){ |
|
(function (process,global){ |
|
// Copyright Joyent, Inc. and other Node contributors. |
|
// |
|
// Permission is hereby granted, free of charge, to any person obtaining a |
|
// copy of this software and associated documentation files (the |
|
// "Software"), to deal in the Software without restriction, including |
|
// without limitation the rights to use, copy, modify, merge, publish, |
|
// distribute, sublicense, and/or sell copies of the Software, and to permit |
|
// persons to whom the Software is furnished to do so, subject to the |
|
// following conditions: |
|
// |
|
// The above copyright notice and this permission notice shall be included |
|
// in all copies or substantial portions of the Software. |
|
// |
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
|
// USE OR OTHER DEALINGS IN THE SOFTWARE. |
|
|
|
'use strict'; |
|
|
|
/*<replacement>*/ |
|
|
|
var processNextTick = require('process-nextick-args'); |
|
/*</replacement>*/ |
|
|
|
module.exports = Readable; |
|
|
|
/*<replacement>*/ |
|
var isArray = require('isarray'); |
|
/*</replacement>*/ |
|
|
|
/*<replacement>*/ |
|
var Duplex; |
|
/*</replacement>*/ |
|
|
|
Readable.ReadableState = ReadableState; |
|
|
|
/*<replacement>*/ |
|
var EE = require('events').EventEmitter; |
|
|
|
var EElistenerCount = function (emitter, type) { |
|
return emitter.listeners(type).length; |
|
}; |
|
/*</replacement>*/ |
|
|
|
/*<replacement>*/ |
|
var Stream = require('./internal/streams/stream'); |
|
/*</replacement>*/ |
|
|
|
// TODO(bmeurer): Change this back to const once hole checks are |
|
// properly optimized away early in Ignition+TurboFan. |
|
/*<replacement>*/ |
|
var Buffer = require('safe-buffer').Buffer; |
|
var OurUint8Array = global.Uint8Array || function () {}; |
|
function _uint8ArrayToBuffer(chunk) { |
|
return Buffer.from(chunk); |
|
} |
|
function _isUint8Array(obj) { |
|
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; |
|
} |
|
/*</replacement>*/ |
|
|
|
/*<replacement>*/ |
|
var util = require('core-util-is'); |
|
util.inherits = require('inherits'); |
|
/*</replacement>*/ |
|
|
|
/*<replacement>*/ |
|
var debugUtil = require('util'); |
|
var debug = void 0; |
|
if (debugUtil && debugUtil.debuglog) { |
|
debug = debugUtil.debuglog('stream'); |
|
} else { |
|
debug = function () {}; |
|
} |
|
/*</replacement>*/ |
|
|
|
var BufferList = require('./internal/streams/BufferList'); |
|
var destroyImpl = require('./internal/streams/destroy'); |
|
var StringDecoder; |
|
|
|
util.inherits(Readable, Stream); |
|
|
|
var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; |
|
|
|
function prependListener(emitter, event, fn) { |
|
// Sadly this is not cacheable as some libraries bundle their own |
|
// event emitter implementation with them. |
|
if (typeof emitter.prependListener === 'function') { |
|
return emitter.prependListener(event, fn); |
|
} else { |
|
// This is a hack to make sure that our error handler is attached before any |
|
// userland ones. NEVER DO THIS. This is here only because this code needs |
|
// to continue to work with older versions of Node.js that do not include |
|
// the prependListener() method. The goal is to eventually remove this hack. |
|
if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; |
|
} |
|
} |
|
|
|
function ReadableState(options, stream) { |
|
Duplex = Duplex || require('./_stream_duplex'); |
|
|
|
options = options || {}; |
|
|
|
// object stream flag. Used to make read(n) ignore n and to |
|
// make all the buffer merging and length checks go away |
|
this.objectMode = !!options.objectMode; |
|
|
|
if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; |
|
|
|
// the point at which it stops calling _read() to fill the buffer |
|
// Note: 0 is a valid value, means "don't call _read preemptively ever" |
|
var hwm = options.highWaterMark; |
|
var defaultHwm = this.objectMode ? 16 : 16 * 1024; |
|
this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; |
|
|
|
// cast to ints. |
|
this.highWaterMark = Math.floor(this.highWaterMark); |
|
|
|
// A linked list is used to store data chunks instead of an array because the |
|
// linked list can remove elements from the beginning faster than |
|
// array.shift() |
|
this.buffer = new BufferList(); |
|
this.length = 0; |
|
this.pipes = null; |
|
this.pipesCount = 0; |
|
this.flowing = null; |
|
this.ended = false; |
|
this.endEmitted = false; |
|
this.reading = false; |
|
|
|
// a flag to be able to tell if the event 'readable'/'data' is emitted |
|
// immediately, or on a later tick. We set this to true at first, because |
|
// any actions that shouldn't happen until "later" should generally also |
|
// not happen before the first read call. |
|
this.sync = true; |
|
|
|
// whenever we return null, then we set a flag to say |
|
// that we're awaiting a 'readable' event emission. |
|
this.needReadable = false; |
|
this.emittedReadable = false; |
|
this.readableListening = false; |
|
this.resumeScheduled = false; |
|
|
|
// has it been destroyed |
|
this.destroyed = false; |
|
|
|
// Crypto is kind of old and crusty. Historically, its default string |
|
// encoding is 'binary' so we have to make this configurable. |
|
// Everything else in the universe uses 'utf8', though. |
|
this.defaultEncoding = options.defaultEncoding || 'utf8'; |
|
|
|
// the number of writers that are awaiting a drain event in .pipe()s |
|
this.awaitDrain = 0; |
|
|
|
// if true, a maybeReadMore has been scheduled |
|
this.readingMore = false; |
|
|
|
this.decoder = null; |
|
this.encoding = null; |
|
if (options.encoding) { |
|
if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; |
|
this.decoder = new StringDecoder(options.encoding); |
|
this.encoding = options.encoding; |
|
} |
|
} |
|
|
|
function Readable(options) { |
|
Duplex = Duplex || require('./_stream_duplex'); |
|
|
|
if (!(this instanceof Readable)) return new Readable(options); |
|
|
|
this._readableState = new ReadableState(options, this); |
|
|
|
// legacy |
|
this.readable = true; |
|
|
|
if (options) { |
|
if (typeof options.read === 'function') this._read = options.read; |
|
|
|
if (typeof options.destroy === 'function') this._destroy = options.destroy; |
|
} |
|
|
|
Stream.call(this); |
|
} |
|
|
|
Object.defineProperty(Readable.prototype, 'destroyed', { |
|
get: function () { |
|
if (this._readableState === undefined) { |
|
return false; |
|
} |
|
return this._readableState.destroyed; |
|
}, |
|
set: function (value) { |
|
// we ignore the value if the stream |
|
// has not been initialized yet |
|
if (!this._readableState) { |
|
return; |
|
} |
|
|
|
// backward compatibility, the user is explicitly |
|
// managing destroyed |
|
this._readableState.destroyed = value; |
|
} |
|
}); |
|
|
|
Readable.prototype.destroy = destroyImpl.destroy; |
|
Readable.prototype._undestroy = destroyImpl.undestroy; |
|
Readable.prototype._destroy = function (err, cb) { |
|
this.push(null); |
|
cb(err); |
|
}; |
|
|
|
// Manually shove something into the read() buffer. |
|
// This returns true if the highWaterMark has not been hit yet, |
|
// similar to how Writable.write() returns true if you should |
|
// write() some more. |
|
Readable.prototype.push = function (chunk, encoding) { |
|
var state = this._readableState; |
|
var skipChunkCheck; |
|
|
|
if (!state.objectMode) { |
|
if (typeof chunk === 'string') { |
|
encoding = encoding || state.defaultEncoding; |
|
if (encoding !== state.encoding) { |
|
chunk = Buffer.from(chunk, encoding); |
|
encoding = ''; |
|
} |
|
skipChunkCheck = true; |
|
} |
|
} else { |
|
skipChunkCheck = true; |
|
} |
|
|
|
return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); |
|
}; |
|
|
|
// Unshift should *always* be something directly out of read() |
|
Readable.prototype.unshift = function (chunk) { |
|
return readableAddChunk(this, chunk, null, true, false); |
|
}; |
|
|
|
function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { |
|
var state = stream._readableState; |
|
if (chunk === null) { |
|
state.reading = false; |
|
onEofChunk(stream, state); |
|
} else { |
|
var er; |
|
if (!skipChunkCheck) er = chunkInvalid(state, chunk); |
|
if (er) { |
|
stream.emit('error', er); |
|
} else if (state.objectMode || chunk && chunk.length > 0) { |
|
if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { |
|
chunk = _uint8ArrayToBuffer(chunk); |
|
} |
|
|
|
if (addToFront) { |
|
if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); |
|
} else if (state.ended) { |
|
stream.emit('error', new Error('stream.push() after EOF')); |
|
} else { |
|
state.reading = false; |
|
if (state.decoder && !encoding) { |
|
chunk = state.decoder.write(chunk); |
|
if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); |
|
} else { |
|
addChunk(stream, state, chunk, false); |
|
} |
|
} |
|
} else if (!addToFront) { |
|
state.reading = false; |
|
} |
|
} |
|
|
|
return needMoreData(state); |
|
} |
|
|
|
function addChunk(stream, state, chunk, addToFront) { |
|
if (state.flowing && state.length === 0 && !state.sync) { |
|
stream.emit('data', chunk); |
|
stream.read(0); |
|
} else { |
|
// update the buffer info. |
|
state.length += state.objectMode ? 1 : chunk.length; |
|
if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); |
|
|
|
if (state.needReadable) emitReadable(stream); |
|
} |
|
maybeReadMore(stream, state); |
|
} |
|
|
|
function chunkInvalid(state, chunk) { |
|
var er; |
|
if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { |
|
er = new TypeError('Invalid non-string/buffer chunk'); |
|
} |
|
return er; |
|
} |
|
|
|
// if it's past the high water mark, we can push in some more. |
|
// Also, if we have no data yet, we can stand some |
|
// more bytes. This is to work around cases where hwm=0, |
|
// such as the repl. Also, if the push() triggered a |
|
// readable event, and the user called read(largeNumber) such that |
|
// needReadable was set, then we ought to push more, so that another |
|
// 'readable' event will be triggered. |
|
function needMoreData(state) { |
|
return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); |
|
} |
|
|
|
Readable.prototype.isPaused = function () { |
|
return this._readableState.flowing === false; |
|
}; |
|
|
|
// backwards compatibility. |
|
Readable.prototype.setEncoding = function (enc) { |
|
if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; |
|
this._readableState.decoder = new StringDecoder(enc); |
|
this._readableState.encoding = enc; |
|
return this; |
|
}; |
|
|
|
// Don't raise the hwm > 8MB |
|
var MAX_HWM = 0x800000; |
|
function computeNewHighWaterMark(n) { |
|
if (n >= MAX_HWM) { |
|
n = MAX_HWM; |
|
} else { |
|
// Get the next highest power of 2 to prevent increasing hwm excessively in |
|
// tiny amounts |
|
n--; |
|
n |= n >>> 1; |
|
n |= n >>> 2; |
|
n |= n >>> 4; |
|
n |= n >>> 8; |
|
n |= n >>> 16; |
|
n++; |
|
} |
|
return n; |
|
} |
|
|
|
// This function is designed to be inlinable, so please take care when making |
|
// changes to the function body. |
|
function howMuchToRead(n, state) { |
|
if (n <= 0 || state.length === 0 && state.ended) return 0; |
|
if (state.objectMode) return 1; |
|
if (n !== n) { |
|
// Only flow one buffer at a time |
|
if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; |
|
} |
|
// If we're asking for more than the current hwm, then raise the hwm. |
|
if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); |
|
if (n <= state.length) return n; |
|
// Don't have enough |
|
if (!state.ended) { |
|
state.needReadable = true; |
|
return 0; |
|
} |
|
return state.length; |
|
} |
|
|
|
// you can override either this method, or the async _read(n) below. |
|
Readable.prototype.read = function (n) { |
|
debug('read', n); |
|
n = parseInt(n, 10); |
|
var state = this._readableState; |
|
var nOrig = n; |
|
|
|
if (n !== 0) state.emittedReadable = false; |
|
|
|
// if we're doing read(0) to trigger a readable event, but we |
|
// already have a bunch of data in the buffer, then just trigger |
|
// the 'readable' event and move on. |
|
if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { |
|
debug('read: emitReadable', state.length, state.ended); |
|
if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); |
|
return null; |
|
} |
|
|
|
n = howMuchToRead(n, state); |
|
|
|
// if we've ended, and we're now clear, then finish it up. |
|
if (n === 0 && state.ended) { |
|
if (state.length === 0) endReadable(this); |
|
return null; |
|
} |
|
|
|
// All the actual chunk generation logic needs to be |
|
// *below* the call to _read. The reason is that in certain |
|
// synthetic stream cases, such as passthrough streams, _read |
|
// may be a completely synchronous operation which may change |
|
// the state of the read buffer, providing enough data when |
|
// before there was *not* enough. |
|
// |
|
// So, the steps are: |
|
// 1. Figure out what the state of things will be after we do |
|
// a read from the buffer. |
|
// |
|
// 2. If that resulting state will trigger a _read, then call _read. |
|
// Note that this may be asynchronous, or synchronous. Yes, it is |
|
// deeply ugly to write APIs this way, but that still doesn't mean |
|
// that the Readable class should behave improperly, as streams are |
|
// designed to be sync/async agnostic. |
|
// Take note if the _read call is sync or async (ie, if the read call |
|
// has returned yet), so that we know whether or not it's safe to emit |
|
// 'readable' etc. |
|
// |
|
// 3. Actually pull the requested chunks out of the buffer and return. |
|
|
|
// if we need a readable event, then we need to do some reading. |
|
var doRead = state.needReadable; |
|
debug('need readable', doRead); |
|
|
|
// if we currently have less than the highWaterMark, then also read some |
|
if (state.length === 0 || state.length - n < state.highWaterMark) { |
|
doRead = true; |
|
debug('length less than watermark', doRead); |
|
} |
|
|
|
// however, if we've ended, then there's no point, and if we're already |
|
// reading, then it's unnecessary. |
|
if (state.ended || state.reading) { |
|
doRead = false; |
|
debug('reading or ended', doRead); |
|
} else if (doRead) { |
|
debug('do read'); |
|
state.reading = true; |
|
state.sync = true; |
|
// if the length is currently zero, then we *need* a readable event. |
|
if (state.length === 0) state.needReadable = true; |
|
// call internal read method |
|
this._read(state.highWaterMark); |
|
state.sync = false; |
|
// If _read pushed data synchronously, then `reading` will be false, |
|
// and we need to re-evaluate how much data we can return to the user. |
|
if (!state.reading) n = howMuchToRead(nOrig, state); |
|
} |
|
|
|
var ret; |
|
if (n > 0) ret = fromList(n, state);else ret = null; |
|
|
|
if (ret === null) { |
|
state.needReadable = true; |
|
n = 0; |
|
} else { |
|
state.length -= n; |
|
} |
|
|
|
if (state.length === 0) { |
|
// If we have nothing in the buffer, then we want to know |
|
// as soon as we *do* get something into the buffer. |
|
if (!state.ended) state.needReadable = true; |
|
|
|
// If we tried to read() past the EOF, then emit end on the next tick. |
|
if (nOrig !== n && state.ended) endReadable(this); |
|
} |
|
|
|
if (ret !== null) this.emit('data', ret); |
|
|
|
return ret; |
|
}; |
|
|
|
function onEofChunk(stream, state) { |
|
if (state.ended) return; |
|
if (state.decoder) { |
|
var chunk = state.decoder.end(); |
|
if (chunk && chunk.length) { |
|
state.buffer.push(chunk); |
|
state.length += state.objectMode ? 1 : chunk.length; |
|
} |
|
} |
|
state.ended = true; |
|
|
|
// emit 'readable' now to make sure it gets picked up. |
|
emitReadable(stream); |
|
} |
|
|
|
// Don't emit readable right away in sync mode, because this can trigger |
|
// another read() call => stack overflow. This way, it might trigger |
|
// a nextTick recursion warning, but that's not so bad. |
|
function emitReadable(stream) { |
|
var state = stream._readableState; |
|
state.needReadable = false; |
|
if (!state.emittedReadable) { |
|
debug('emitReadable', state.flowing); |
|
state.emittedReadable = true; |
|
if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream); |
|
} |
|
} |
|
|
|
function emitReadable_(stream) { |
|
debug('emit readable'); |
|
stream.emit('readable'); |
|
flow(stream); |
|
} |
|
|
|
// at this point, the user has presumably seen the 'readable' event, |
|
// and called read() to consume some data. that may have triggered |
|
// in turn another _read(n) call, in which case reading = true if |
|
// it's in progress. |
|
// However, if we're not ended, or reading, and the length < hwm, |
|
// then go ahead and try to read some more preemptively. |
|
function maybeReadMore(stream, state) { |
|
if (!state.readingMore) { |
|
state.readingMore = true; |
|
processNextTick(maybeReadMore_, stream, state); |
|
} |
|
} |
|
|
|
function maybeReadMore_(stream, state) { |
|
var len = state.length; |
|
while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { |
|
debug('maybeReadMore read 0'); |
|
stream.read(0); |
|
if (len === state.length) |
|
// didn't get any data, stop spinning. |
|
break;else len = state.length; |
|
} |
|
state.readingMore = false; |
|
} |
|
|
|
// abstract method. to be overridden in specific implementation classes. |
|
// call cb(er, data) where data is <= n in length. |
|
// for virtual (non-string, non-buffer) streams, "length" is somewhat |
|
// arbitrary, and perhaps not very meaningful. |
|
Readable.prototype._read = function (n) { |
|
this.emit('error', new Error('_read() is not implemented')); |
|
}; |
|
|
|
Readable.prototype.pipe = function (dest, pipeOpts) { |
|
var src = this; |
|
var state = this._readableState; |
|
|
|
switch (state.pipesCount) { |
|
case 0: |
|
state.pipes = dest; |
|
break; |
|
case 1: |
|
state.pipes = [state.pipes, dest]; |
|
break; |
|
default: |
|
state.pipes.push(dest); |
|
break; |
|
} |
|
state.pipesCount += 1; |
|
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); |
|
|
|
var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; |
|
|
|
var endFn = doEnd ? onend : unpipe; |
|
if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn); |
|
|
|
dest.on('unpipe', onunpipe); |
|
function onunpipe(readable, unpipeInfo) { |
|
debug('onunpipe'); |
|
if (readable === src) { |
|
if (unpipeInfo && unpipeInfo.hasUnpiped === false) { |
|
unpipeInfo.hasUnpiped = true; |
|
cleanup(); |
|
} |
|
} |
|
} |
|
|
|
function onend() { |
|
debug('onend'); |
|
dest.end(); |
|
} |
|
|
|
// when the dest drains, it reduces the awaitDrain counter |
|
// on the source. This would be more elegant with a .once() |
|
// handler in flow(), but adding and removing repeatedly is |
|
// too slow. |
|
var ondrain = pipeOnDrain(src); |
|
dest.on('drain', ondrain); |
|
|
|
var cleanedUp = false; |
|
function cleanup() { |
|
debug('cleanup'); |
|
// cleanup event handlers once the pipe is broken |
|
dest.removeListener('close', onclose); |
|
dest.removeListener('finish', onfinish); |
|
dest.removeListener('drain', ondrain); |
|
dest.removeListener('error', onerror); |
|
dest.removeListener('unpipe', onunpipe); |
|
src.removeListener('end', onend); |
|
src.removeListener('end', unpipe); |
|
src.removeListener('data', ondata); |
|
|
|
cleanedUp = true; |
|
|
|
// if the reader is waiting for a drain event from this |
|
// specific writer, then it would cause it to never start |
|
// flowing again. |
|
// So, if this is awaiting a drain, then we just call it now. |
|
// If we don't know, then assume that we are waiting for one. |
|
if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); |
|
} |
|
|
|
// If the user pushes more data while we're writing to dest then we'll end up |
|
// in ondata again. However, we only want to increase awaitDrain once because |
|
// dest will only emit one 'drain' event for the multiple writes. |
|
// => Introduce a guard on increasing awaitDrain. |
|
var increasedAwaitDrain = false; |
|
src.on('data', ondata); |
|
function ondata(chunk) { |
|
debug('ondata'); |
|
increasedAwaitDrain = false; |
|
var ret = dest.write(chunk); |
|
if (false === ret && !increasedAwaitDrain) { |
|
// If the user unpiped during `dest.write()`, it is possible |
|
// to get stuck in a permanently paused state if that write |
|
// also returned false. |
|
// => Check whether `dest` is still a piping destination. |
|
if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { |
|
debug('false write response, pause', src._readableState.awaitDrain); |
|
src._readableState.awaitDrain++; |
|
increasedAwaitDrain = true; |
|
} |
|
src.pause(); |
|
} |
|
} |
|
|
|
// if the dest has an error, then stop piping into it. |
|
// however, don't suppress the throwing behavior for this. |
|
function onerror(er) { |
|
debug('onerror', er); |
|
unpipe(); |
|
dest.removeListener('error', onerror); |
|
if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); |
|
} |
|
|
|
// Make sure our error handler is attached before userland ones. |
|
prependListener(dest, 'error', onerror); |
|
|
|
// Both close and finish should trigger unpipe, but only once. |
|
function onclose() { |
|
dest.removeListener('finish', onfinish); |
|
unpipe(); |
|
} |
|
dest.once('close', onclose); |
|
function onfinish() { |
|
debug('onfinish'); |
|
dest.removeListener('close', onclose); |
|
unpipe(); |
|
} |
|
dest.once('finish', onfinish); |
|
|
|
function unpipe() { |
|
debug('unpipe'); |
|
src.unpipe(dest); |
|
} |
|
|
|
// tell the dest that it's being piped to |
|
dest.emit('pipe', src); |
|
|
|
// start the flow if it hasn't been started already. |
|
if (!state.flowing) { |
|
debug('pipe resume'); |
|
src.resume(); |
|
} |
|
|
|
return dest; |
|
}; |
|
|
|
function pipeOnDrain(src) { |
|
return function () { |
|
var state = src._readableState; |
|
debug('pipeOnDrain', state.awaitDrain); |
|
if (state.awaitDrain) state.awaitDrain--; |
|
if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { |
|
state.flowing = true; |
|
flow(src); |
|
} |
|
}; |
|
} |
|
|
|
Readable.prototype.unpipe = function (dest) { |
|
var state = this._readableState; |
|
var unpipeInfo = { hasUnpiped: false }; |
|
|
|
// if we're not piping anywhere, then do nothing. |
|
if (state.pipesCount === 0) return this; |
|
|
|
// just one destination. most common case. |
|
if (state.pipesCount === 1) { |
|
// passed in one, but it's not the right one. |
|
if (dest && dest !== state.pipes) return this; |
|
|
|
if (!dest) dest = state.pipes; |
|
|
|
// got a match. |
|
state.pipes = null; |
|
state.pipesCount = 0; |
|
state.flowing = false; |
|
if (dest) dest.emit('unpipe', this, unpipeInfo); |
|
return this; |
|
} |
|
|
|
// slow case. multiple pipe destinations. |
|
|
|
if (!dest) { |
|
// remove all. |
|
var dests = state.pipes; |
|
var len = state.pipesCount; |
|
state.pipes = null; |
|
state.pipesCount = 0; |
|
state.flowing = false; |
|
|
|
for (var i = 0; i < len; i++) { |
|
dests[i].emit('unpipe', this, unpipeInfo); |
|
}return this; |
|
} |
|
|
|
// try to find the right one. |
|
var index = indexOf(state.pipes, dest); |
|
if (index === -1) return this; |
|
|
|
state.pipes.splice(index, 1); |
|
state.pipesCount -= 1; |
|
if (state.pipesCount === 1) state.pipes = state.pipes[0]; |
|
|
|
dest.emit('unpipe', this, unpipeInfo); |
|
|
|
return this; |
|
}; |
|
|
|
// set up data events if they are asked for |
|
// Ensure readable listeners eventually get something |
|
Readable.prototype.on = function (ev, fn) { |
|
var res = Stream.prototype.on.call(this, ev, fn); |
|
|
|
if (ev === 'data') { |
|
// Start flowing on next tick if stream isn't explicitly paused |
|
if (this._readableState.flowing !== false) this.resume(); |
|
} else if (ev === 'readable') { |
|
var state = this._readableState; |
|
if (!state.endEmitted && !state.readableListening) { |
|
state.readableListening = state.needReadable = true; |
|
state.emittedReadable = false; |
|
if (!state.reading) { |
|
processNextTick(nReadingNextTick, this); |
|
} else if (state.length) { |
|
emitReadable(this); |
|
} |
|
} |
|
} |
|
|
|
return res; |
|
}; |
|
Readable.prototype.addListener = Readable.prototype.on; |
|
|
|
function nReadingNextTick(self) { |
|
debug('readable nexttick read 0'); |
|
self.read(0); |
|
} |
|
|
|
// pause() and resume() are remnants of the legacy readable stream API |
|
// If the user uses them, then switch into old mode. |
|
Readable.prototype.resume = function () { |
|
var state = this._readableState; |
|
if (!state.flowing) { |
|
debug('resume'); |
|
state.flowing = true; |
|
resume(this, state); |
|
} |
|
return this; |
|
}; |
|
|
|
function resume(stream, state) { |
|
if (!state.resumeScheduled) { |
|
state.resumeScheduled = true; |
|
processNextTick(resume_, stream, state); |
|
} |
|
} |
|
|
|
function resume_(stream, state) { |
|
if (!state.reading) { |
|
debug('resume read 0'); |
|
stream.read(0); |
|
} |
|
|
|
state.resumeScheduled = false; |
|
state.awaitDrain = 0; |
|
stream.emit('resume'); |
|
flow(stream); |
|
if (state.flowing && !state.reading) stream.read(0); |
|
} |
|
|
|
Readable.prototype.pause = function () { |
|
debug('call pause flowing=%j', this._readableState.flowing); |
|
if (false !== this._readableState.flowing) { |
|
debug('pause'); |
|
this._readableState.flowing = false; |
|
this.emit('pause'); |
|
} |
|
return this; |
|
}; |
|
|
|
function flow(stream) { |
|
var state = stream._readableState; |
|
debug('flow', state.flowing); |
|
while (state.flowing && stream.read() !== null) {} |
|
} |
|
|
|
// wrap an old-style stream as the async data source. |
|
// This is *not* part of the readable stream interface. |
|
// It is an ugly unfortunate mess of history. |
|
Readable.prototype.wrap = function (stream) { |
|
var state = this._readableState; |
|
var paused = false; |
|
|
|
var self = this; |
|
stream.on('end', function () { |
|
debug('wrapped end'); |
|
if (state.decoder && !state.ended) { |
|
var chunk = state.decoder.end(); |
|
if (chunk && chunk.length) self.push(chunk); |
|
} |
|
|
|
self.push(null); |
|
}); |
|
|
|
stream.on('data', function (chunk) { |
|
debug('wrapped data'); |
|
if (state.decoder) chunk = state.decoder.write(chunk); |
|
|
|
// don't skip over falsy values in objectMode |
|
if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; |
|
|
|
var ret = self.push(chunk); |
|
if (!ret) { |
|
paused = true; |
|
stream.pause(); |
|
} |
|
}); |
|
|
|
// proxy all the other methods. |
|
// important when wrapping filters and duplexes. |
|
for (var i in stream) { |
|
if (this[i] === undefined && typeof stream[i] === 'function') { |
|
this[i] = function (method) { |
|
return function () { |
|
return stream[method].apply(stream, arguments); |
|
}; |
|
}(i); |
|
} |
|
} |
|
|
|
// proxy certain important events. |
|
for (var n = 0; n < kProxyEvents.length; n++) { |
|
stream.on(kProxyEvents[n], self.emit.bind(self, kProxyEvents[n])); |
|
} |
|
|
|
// when we try to consume some more bytes, simply unpause the |
|
// underlying stream. |
|
self._read = function (n) { |
|
debug('wrapped _read', n); |
|
if (paused) { |
|
paused = false; |
|
stream.resume(); |
|
} |
|
}; |
|
|
|
return self; |
|
}; |
|
|
|
// exposed for testing purposes only. |
|
Readable._fromList = fromList; |
|
|
|
// Pluck off n bytes from an array of buffers. |
|
// Length is the combined lengths of all the buffers in the list. |
|
// This function is designed to be inlinable, so please take care when making |
|
// changes to the function body. |
|
function fromList(n, state) { |
|
// nothing buffered |
|
if (state.length === 0) return null; |
|
|
|
var ret; |
|
if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { |
|
// read it all, truncate the list |
|
if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); |
|
state.buffer.clear(); |
|
} else { |
|
// read part of list |
|
ret = fromListPartial(n, state.buffer, state.decoder); |
|
} |
|
|
|
return ret; |
|
} |
|
|
|
// Extracts only enough buffered data to satisfy the amount requested. |
|
// This function is designed to be inlinable, so please take care when making |
|
// changes to the function body. |
|
function fromListPartial(n, list, hasStrings) { |
|
var ret; |
|
if (n < list.head.data.length) { |
|
// slice is the same for buffers and strings |
|
ret = list.head.data.slice(0, n); |
|
list.head.data = list.head.data.slice(n); |
|
} else if (n === list.head.data.length) { |
|
// first chunk is a perfect match |
|
ret = list.shift(); |
|
} else { |
|
// result spans more than one buffer |
|
ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); |
|
} |
|
return ret; |
|
} |
|
|
|
// Copies a specified amount of characters from the list of buffered data |
|
// chunks. |
|
// This function is designed to be inlinable, so please take care when making |
|
// changes to the function body. |
|
function copyFromBufferString(n, list) { |
|
var p = list.head; |
|
var c = 1; |
|
var ret = p.data; |
|
n -= ret.length; |
|
while (p = p.next) { |
|
var str = p.data; |
|
var nb = n > str.length ? str.length : n; |
|
if (nb === str.length) ret += str;else ret += str.slice(0, n); |
|
n -= nb; |
|
if (n === 0) { |
|
if (nb === str.length) { |
|
++c; |
|
if (p.next) list.head = p.next;else list.head = list.tail = null; |
|
} else { |
|
list.head = p; |
|
p.data = str.slice(nb); |
|
} |
|
break; |
|
} |
|
++c; |
|
} |
|
list.length -= c; |
|
return ret; |
|
} |
|
|
|
// Copies a specified amount of bytes from the list of buffered data chunks. |
|
// This function is designed to be inlinable, so please take care when making |
|
// changes to the function body. |
|
function copyFromBuffer(n, list) { |
|
var ret = Buffer.allocUnsafe(n); |
|
var p = list.head; |
|
var c = 1; |
|
p.data.copy(ret); |
|
n -= p.data.length; |
|
while (p = p.next) { |
|
var buf = p.data; |
|
var nb = n > buf.length ? buf.length : n; |
|
buf.copy(ret, ret.length - n, 0, nb); |
|
n -= nb; |
|
if (n === 0) { |
|
if (nb === buf.length) { |
|
++c; |
|
if (p.next) list.head = p.next;else list.head = list.tail = null; |
|
} else { |
|
list.head = p; |
|
p.data = buf.slice(nb); |
|
} |
|
break; |
|
} |
|
++c; |
|
} |
|
list.length -= c; |
|
return ret; |
|
} |
|
|
|
function endReadable(stream) { |
|
var state = stream._readableState; |
|
|
|
// If we get here before consuming all the bytes, then that is a |
|
// bug in node. Should never happen. |
|
if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); |
|
|
|
if (!state.endEmitted) { |
|
state.ended = true; |
|
processNextTick(endReadableNT, state, stream); |
|
} |
|
} |
|
|
|
function endReadableNT(state, stream) { |
|
// Check that we didn't get one last unshift. |
|
if (!state.endEmitted && state.length === 0) { |
|
state.endEmitted = true; |
|
stream.readable = false; |
|
stream.emit('end'); |
|
} |
|
} |
|
|
|
function forEach(xs, f) { |
|
for (var i = 0, l = xs.length; i < l; i++) { |
|
f(xs[i], i); |
|
} |
|
} |
|
|
|
function indexOf(xs, x) { |
|
for (var i = 0, l = xs.length; i < l; i++) { |
|
if (xs[i] === x) return i; |
|
} |
|
return -1; |
|
} |
|
}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) |
|
},{"./_stream_duplex":16,"./internal/streams/BufferList":21,"./internal/streams/destroy":22,"./internal/streams/stream":23,"_process":41,"core-util-is":8,"events":35,"inherits":11,"isarray":12,"process-nextick-args":14,"safe-buffer":25,"string_decoder/":27,"util":32}],19:[function(require,module,exports){ |
|
// Copyright Joyent, Inc. and other Node contributors. |
|
// |
|
// Permission is hereby granted, free of charge, to any person obtaining a |
|
// copy of this software and associated documentation files (the |
|
// "Software"), to deal in the Software without restriction, including |
|
// without limitation the rights to use, copy, modify, merge, publish, |
|
// distribute, sublicense, and/or sell copies of the Software, and to permit |
|
// persons to whom the Software is furnished to do so, subject to the |
|
// following conditions: |
|
// |
|
// The above copyright notice and this permission notice shall be included |
|
// in all copies or substantial portions of the Software. |
|
// |
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
|
// USE OR OTHER DEALINGS IN THE SOFTWARE. |
|
|
|
// a transform stream is a readable/writable stream where you do |
|
// something with the data. Sometimes it's called a "filter", |
|
// but that's not a great name for it, since that implies a thing where |
|
// some bits pass through, and others are simply ignored. (That would |
|
// be a valid example of a transform, of course.) |
|
// |
|
// While the output is causally related to the input, it's not a |
|
// necessarily symmetric or synchronous transformation. For example, |
|
// a zlib stream might take multiple plain-text writes(), and then |
|
// emit a single compressed chunk some time in the future. |
|
// |
|
// Here's how this works: |
|
// |
|
// The Transform stream has all the aspects of the readable and writable |
|
// stream classes. When you write(chunk), that calls _write(chunk,cb) |
|
// internally, and returns false if there's a lot of pending writes |
|
// buffered up. When you call read(), that calls _read(n) until |
|
// there's enough pending readable data buffered up. |
|
// |
|
// In a transform stream, the written data is placed in a buffer. When |
|
// _read(n) is called, it transforms the queued up data, calling the |
|
// buffered _write cb's as it consumes chunks. If consuming a single |
|
// written chunk would result in multiple output chunks, then the first |
|
// outputted bit calls the readcb, and subsequent chunks just go into |
|
// the read buffer, and will cause it to emit 'readable' if necessary. |
|
// |
|
// This way, back-pressure is actually determined by the reading side, |
|
// since _read has to be called to start processing a new chunk. However, |
|
// a pathological inflate type of transform can cause excessive buffering |
|
// here. For example, imagine a stream where every byte of input is |
|
// interpreted as an integer from 0-255, and then results in that many |
|
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in |
|
// 1kb of data being output. In this case, you could write a very small |
|
// amount of input, and end up with a very large amount of output. In |
|
// such a pathological inflating mechanism, there'd be no way to tell |
|
// the system to stop doing the transform. A single 4MB write could |
|
// cause the system to run out of memory. |
|
// |
|
// However, even in such a pathological case, only a single written chunk |
|
// would be consumed, and then the rest would wait (un-transformed) until |
|
// the results of the previous transformed chunk were consumed. |
|
|
|
'use strict'; |
|
|
|
module.exports = Transform; |
|
|
|
var Duplex = require('./_stream_duplex'); |
|
|
|
/*<replacement>*/ |
|
var util = require('core-util-is'); |
|
util.inherits = require('inherits'); |
|
/*</replacement>*/ |
|
|
|
util.inherits(Transform, Duplex); |
|
|
|
function TransformState(stream) { |
|
this.afterTransform = function (er, data) { |
|
return afterTransform(stream, er, data); |
|
}; |
|
|
|
this.needTransform = false; |
|
this.transforming = false; |
|
this.writecb = null; |
|
this.writechunk = null; |
|
this.writeencoding = null; |
|
} |
|
|
|
function afterTransform(stream, er, data) { |
|
var ts = stream._transformState; |
|
ts.transforming = false; |
|
|
|
var cb = ts.writecb; |
|
|
|
if (!cb) { |
|
return stream.emit('error', new Error('write callback called multiple times')); |
|
} |
|
|
|
ts.writechunk = null; |
|
ts.writecb = null; |
|
|
|
if (data !== null && data !== undefined) stream.push(data); |
|
|
|
cb(er); |
|
|
|
var rs = stream._readableState; |
|
rs.reading = false; |
|
if (rs.needReadable || rs.length < rs.highWaterMark) { |
|
stream._read(rs.highWaterMark); |
|
} |
|
} |
|
|
|
function Transform(options) { |
|
if (!(this instanceof Transform)) return new Transform(options); |
|
|
|
Duplex.call(this, options); |
|
|
|
this._transformState = new TransformState(this); |
|
|
|
var stream = this; |
|
|
|
// start out asking for a readable event once data is transformed. |
|
this._readableState.needReadable = true; |
|
|
|
// we have implemented the _read method, and done the other things |
|
// that Readable wants before the first _read call, so unset the |
|
// sync guard flag. |
|
this._readableState.sync = false; |
|
|
|
if (options) { |
|
if (typeof options.transform === 'function') this._transform = options.transform; |
|
|
|
if (typeof options.flush === 'function') this._flush = options.flush; |
|
} |
|
|
|
// When the writable side finishes, then flush out anything remaining. |
|
this.once('prefinish', function () { |
|
if (typeof this._flush === 'function') this._flush(function (er, data) { |
|
done(stream, er, data); |
|
});else done(stream); |
|
}); |
|
} |
|
|
|
Transform.prototype.push = function (chunk, encoding) { |
|
this._transformState.needTransform = false; |
|
return Duplex.prototype.push.call(this, chunk, encoding); |
|
}; |
|
|
|
// This is the part where you do stuff! |
|
// override this function in implementation classes. |
|
// 'chunk' is an input chunk. |
|
// |
|
// Call `push(newChunk)` to pass along transformed output |
|
// to the readable side. You may call 'push' zero or more times. |
|
// |
|
// Call `cb(err)` when you are done with this chunk. If you pass |
|
// an error, then that'll put the hurt on the whole operation. If you |
|
// never call cb(), then you'll never get another chunk. |
|
Transform.prototype._transform = function (chunk, encoding, cb) { |
|
throw new Error('_transform() is not implemented'); |
|
}; |
|
|
|
Transform.prototype._write = function (chunk, encoding, cb) { |
|
var ts = this._transformState; |
|
ts.writecb = cb; |
|
ts.writechunk = chunk; |
|
ts.writeencoding = encoding; |
|
if (!ts.transforming) { |
|
var rs = this._readableState; |
|
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); |
|
} |
|
}; |
|
|
|
// Doesn't matter what the args are here. |
|
// _transform does all the work. |
|
// That we got here means that the readable side wants more data. |
|
Transform.prototype._read = function (n) { |
|
var ts = this._transformState; |
|
|
|
if (ts.writechunk !== null && ts.writecb && !ts.transforming) { |
|
ts.transforming = true; |
|
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); |
|
} else { |
|
// mark that we need a transform, so that any data that comes in |
|
// will get processed, now that we've asked for it. |
|
ts.needTransform = true; |
|
} |
|
}; |
|
|
|
Transform.prototype._destroy = function (err, cb) { |
|
var _this = this; |
|
|
|
Duplex.prototype._destroy.call(this, err, function (err2) { |
|
cb(err2); |
|
_this.emit('close'); |
|
}); |
|
}; |
|
|
|
function done(stream, er, data) { |
|
if (er) return stream.emit('error', er); |
|
|
|
if (data !== null && data !== undefined) stream.push(data); |
|
|
|
// if there's nothing in the write buffer, then that means |
|
// that nothing more will ever be provided |
|
var ws = stream._writableState; |
|
var ts = stream._transformState; |
|
|
|
if (ws.length) throw new Error('Calling transform done when ws.length != 0'); |
|
|
|
if (ts.transforming) throw new Error('Calling transform done when still transforming'); |
|
|
|
return stream.push(null); |
|
} |
|
},{"./_stream_duplex":16,"core-util-is":8,"inherits":11}],20:[function(require,module,exports){ |
|
(function (process,global){ |
|
// Copyright Joyent, Inc. and other Node contributors. |
|
// |
|
// Permission is hereby granted, free of charge, to any person obtaining a |
|
// copy of this software and associated documentation files (the |
|
// "Software"), to deal in the Software without restriction, including |
|
// without limitation the rights to use, copy, modify, merge, publish, |
|
// distribute, sublicense, and/or sell copies of the Software, and to permit |
|
// persons to whom the Software is furnished to do so, subject to the |
|
// following conditions: |
|
// |
|
// The above copyright notice and this permission notice shall be included |
|
// in all copies or substantial portions of the Software. |
|
// |
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
|
// USE OR OTHER DEALINGS IN THE SOFTWARE. |
|
|
|
// A bit simpler than readable streams. |
|
// Implement an async ._write(chunk, encoding, cb), and it'll handle all |
|
// the drain event emission and buffering. |
|
|
|
'use strict'; |
|
|
|
/*<replacement>*/ |
|
|
|
var processNextTick = require('process-nextick-args'); |
|
/*</replacement>*/ |
|
|
|
module.exports = Writable; |
|
|
|
/* <replacement> */ |
|
function WriteReq(chunk, encoding, cb) { |
|
this.chunk = chunk; |
|
this.encoding = encoding; |
|
this.callback = cb; |
|
this.next = null; |
|
} |
|
|
|
// It seems a linked list but it is not |
|
// there will be only 2 of these for each stream |
|
function CorkedRequest(state) { |
|
var _this = this; |
|
|
|
this.next = null; |
|
this.entry = null; |
|
this.finish = function () { |
|
onCorkedFinish(_this, state); |
|
}; |
|
} |
|
/* </replacement> */ |
|
|
|
/*<replacement>*/ |
|
var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick; |
|
/*</replacement>*/ |
|
|
|
/*<replacement>*/ |
|
var Duplex; |
|
/*</replacement>*/ |
|
|
|
Writable.WritableState = WritableState; |
|
|
|
/*<replacement>*/ |
|
var util = require('core-util-is'); |
|
util.inherits = require('inherits'); |
|
/*</replacement>*/ |
|
|
|
/*<replacement>*/ |
|
var internalUtil = { |
|
deprecate: require('util-deprecate') |
|
}; |
|
/*</replacement>*/ |
|
|
|
/*<replacement>*/ |
|
var Stream = require('./internal/streams/stream'); |
|
/*</replacement>*/ |
|
|
|
/*<replacement>*/ |
|
var Buffer = require('safe-buffer').Buffer; |
|
var OurUint8Array = global.Uint8Array || function () {}; |
|
function _uint8ArrayToBuffer(chunk) { |
|
return Buffer.from(chunk); |
|
} |
|
function _isUint8Array(obj) { |
|
return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; |
|
} |
|
/*</replacement>*/ |
|
|
|
var destroyImpl = require('./internal/streams/destroy'); |
|
|
|
util.inherits(Writable, Stream); |
|
|
|
function nop() {} |
|
|
|
function WritableState(options, stream) { |
|
Duplex = Duplex || require('./_stream_duplex'); |
|
|
|
options = options || {}; |
|
|
|
// object stream flag to indicate whether or not this stream |
|
// contains buffers or objects. |
|
this.objectMode = !!options.objectMode; |
|
|
|
if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; |
|
|
|
// the point at which write() starts returning false |
|
// Note: 0 is a valid value, means that we always return false if |
|
// the entire buffer is not flushed immediately on write() |
|
var hwm = options.highWaterMark; |
|
var defaultHwm = this.objectMode ? 16 : 16 * 1024; |
|
this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; |
|
|
|
// cast to ints. |
|
this.highWaterMark = Math.floor(this.highWaterMark); |
|
|
|
// if _final has been called |
|
this.finalCalled = false; |
|
|
|
// drain event flag. |
|
this.needDrain = false; |
|
// at the start of calling end() |
|
this.ending = false; |
|
// when end() has been called, and returned |
|
this.ended = false; |
|
// when 'finish' is emitted |
|
this.finished = false; |
|
|
|
// has it been destroyed |
|
this.destroyed = false; |
|
|
|
// should we decode strings into buffers before passing to _write? |
|
// this is here so that some node-core streams can optimize string |
|
// handling at a lower level. |
|
var noDecode = options.decodeStrings === false; |
|
this.decodeStrings = !noDecode; |
|
|
|
// Crypto is kind of old and crusty. Historically, its default string |
|
// encoding is 'binary' so we have to make this configurable. |
|
// Everything else in the universe uses 'utf8', though. |
|
this.defaultEncoding = options.defaultEncoding || 'utf8'; |
|
|
|
// not an actual buffer we keep track of, but a measurement |
|
// of how much we're waiting to get pushed to some underlying |
|
// socket or file. |
|
this.length = 0; |
|
|
|
// a flag to see when we're in the middle of a write. |
|
this.writing = false; |
|
|
|
// when true all writes will be buffered until .uncork() call |
|
this.corked = 0; |
|
|
|
// a flag to be able to tell if the onwrite cb is called immediately, |
|
// or on a later tick. We set this to true at first, because any |
|
// actions that shouldn't happen until "later" should generally also |
|
// not happen before the first write call. |
|
this.sync = true; |
|
|
|
// a flag to know if we're processing previously buffered items, which |
|
// may call the _write() callback in the same tick, so that we don't |
|
// end up in an overlapped onwrite situation. |
|
this.bufferProcessing = false; |
|
|
|
// the callback that's passed to _write(chunk,cb) |
|
this.onwrite = function (er) { |
|
onwrite(stream, er); |
|
}; |
|
|
|
// the callback that the user supplies to write(chunk,encoding,cb) |
|
this.writecb = null; |
|
|
|
// the amount that is being written when _write is called. |
|
this.writelen = 0; |
|
|
|
this.bufferedRequest = null; |
|
this.lastBufferedRequest = null; |
|
|
|
// number of pending user-supplied write callbacks |
|
// this must be 0 before 'finish' can be emitted |
|
this.pendingcb = 0; |
|
|
|
// emit prefinish if the only thing we're waiting for is _write cbs |
|
// This is relevant for synchronous Transform streams |
|
this.prefinished = false; |
|
|
|
// True if the error was already emitted and should not be thrown again |
|
this.errorEmitted = false; |
|
|
|
// count buffered requests |
|
this.bufferedRequestCount = 0; |
|
|
|
// allocate the first CorkedRequest, there is always |
|
// one allocated and free to use, and we maintain at most two |
|
this.corkedRequestsFree = new CorkedRequest(this); |
|
} |
|
|
|
WritableState.prototype.getBuffer = function getBuffer() { |
|
var current = this.bufferedRequest; |
|
var out = []; |
|
while (current) { |
|
out.push(current); |
|
current = current.next; |
|
} |
|
return out; |
|
}; |
|
|
|
(function () { |
|
try { |
|
Object.defineProperty(WritableState.prototype, 'buffer', { |
|
get: internalUtil.deprecate(function () { |
|
return this.getBuffer(); |
|
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') |
|
}); |
|
} catch (_) {} |
|
})(); |
|
|
|
// Test _writableState for inheritance to account for Duplex streams, |
|
// whose prototype chain only points to Readable. |
|
var realHasInstance; |
|
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { |
|
realHasInstance = Function.prototype[Symbol.hasInstance]; |
|
Object.defineProperty(Writable, Symbol.hasInstance, { |
|
value: function (object) { |
|
if (realHasInstance.call(this, object)) return true; |
|
|
|
return object && object._writableState instanceof WritableState; |
|
} |
|
}); |
|
} else { |
|
realHasInstance = function (object) { |
|
return object instanceof this; |
|
}; |
|
} |
|
|
|
function Writable(options) { |
|
Duplex = Duplex || require('./_stream_duplex'); |
|
|
|
// Writable ctor is applied to Duplexes, too. |
|
// `realHasInstance` is necessary because using plain `instanceof` |
|
// would return false, as no `_writableState` property is attached. |
|
|
|
// Trying to use the custom `instanceof` for Writable here will also break the |
|
// Node.js LazyTransform implementation, which has a non-trivial getter for |
|
// `_writableState` that would lead to infinite recursion. |
|
if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { |
|
return new Writable(options); |
|
} |
|
|
|
this._writableState = new WritableState(options, this); |
|
|
|
// legacy. |
|
this.writable = true; |
|
|
|
if (options) { |
|
if (typeof options.write === 'function') this._write = options.write; |
|
|
|
if (typeof options.writev === 'function') this._writev = options.writev; |
|
|
|
if (typeof options.destroy === 'function') this._destroy = options.destroy; |
|
|
|
if (typeof options.final === 'function') this._final = options.final; |
|
} |
|
|
|
Stream.call(this); |
|
} |
|
|
|
// Otherwise people can pipe Writable streams, which is just wrong. |
|
Writable.prototype.pipe = function () { |
|
this.emit('error', new Error('Cannot pipe, not readable')); |
|
}; |
|
|
|
function writeAfterEnd(stream, cb) { |
|
var er = new Error('write after end'); |
|
// TODO: defer error events consistently everywhere, not just the cb |
|
stream.emit('error', er); |
|
processNextTick(cb, er); |
|
} |
|
|
|
// Checks that a user-supplied chunk is valid, especially for the particular |
|
// mode the stream is in. Currently this means that `null` is never accepted |
|
// and undefined/non-string values are only allowed in object mode. |
|
function validChunk(stream, state, chunk, cb) { |
|
var valid = true; |
|
var er = false; |
|
|
|
if (chunk === null) { |
|
er = new TypeError('May not write null values to stream'); |
|
} else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { |
|
er = new TypeError('Invalid non-string/buffer chunk'); |
|
} |
|
if (er) { |
|
stream.emit('error', er); |
|
processNextTick(cb, er); |
|
valid = false; |
|
} |
|
return valid; |
|
} |
|
|
|
Writable.prototype.write = function (chunk, encoding, cb) { |
|
var state = this._writableState; |
|
var ret = false; |
|
var isBuf = _isUint8Array(chunk) && !state.objectMode; |
|
|
|
if (isBuf && !Buffer.isBuffer(chunk)) { |
|
chunk = _uint8ArrayToBuffer(chunk); |
|
} |
|
|
|
if (typeof encoding === 'function') { |
|
cb = encoding; |
|
encoding = null; |
|
} |
|
|
|
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; |
|
|
|
if (typeof cb !== 'function') cb = nop; |
|
|
|
if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { |
|
state.pendingcb++; |
|
ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); |
|
} |
|
|
|
return ret; |
|
}; |
|
|
|
Writable.prototype.cork = function () { |
|
var state = this._writableState; |
|
|
|
state.corked++; |
|
}; |
|
|
|
Writable.prototype.uncork = function () { |
|
var state = this._writableState; |
|
|
|
if (state.corked) { |
|
state.corked--; |
|
|
|
if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); |
|
} |
|
}; |
|
|
|
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { |
|
// node::ParseEncoding() requires lower case. |
|
if (typeof encoding === 'string') encoding = encoding.toLowerCase(); |
|
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); |
|
this._writableState.defaultEncoding = encoding; |
|
return this; |
|
}; |
|
|
|
function decodeChunk(state, chunk, encoding) { |
|
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { |
|
chunk = Buffer.from(chunk, encoding); |
|
} |
|
return chunk; |
|
} |
|
|
|
// if we're already writing something, then just put this |
|
// in the queue, and wait our turn. Otherwise, call _write |
|
// If we return false, then we need a drain event, so set that flag. |
|
function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { |
|
if (!isBuf) { |
|
var newChunk = decodeChunk(state, chunk, encoding); |
|
if (chunk !== newChunk) { |
|
isBuf = true; |
|
encoding = 'buffer'; |
|
chunk = newChunk; |
|
} |
|
} |
|
var len = state.objectMode ? 1 : chunk.length; |
|
|
|
state.length += len; |
|
|
|
var ret = state.length < state.highWaterMark; |
|
// we must ensure that previous needDrain will not be reset to false. |
|
if (!ret) state.needDrain = true; |
|
|
|
if (state.writing || state.corked) { |
|
var last = state.lastBufferedRequest; |
|
state.lastBufferedRequest = { |
|
chunk: chunk, |
|
encoding: encoding, |
|
isBuf: isBuf, |
|
callback: cb, |
|
next: null |
|
}; |
|
if (last) { |
|
last.next = state.lastBufferedRequest; |
|
} else { |
|
state.bufferedRequest = state.lastBufferedRequest; |
|
} |
|
state.bufferedRequestCount += 1; |
|
} else { |
|
doWrite(stream, state, false, len, chunk, encoding, cb); |
|
} |
|
|
|
return ret; |
|
} |
|
|
|
function doWrite(stream, state, writev, len, chunk, encoding, cb) { |
|
state.writelen = len; |
|
state.writecb = cb; |
|
state.writing = true; |
|
state.sync = true; |
|
if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); |
|
state.sync = false; |
|
} |
|
|
|
function onwriteError(stream, state, sync, er, cb) { |
|
--state.pendingcb; |
|
|
|
if (sync) { |
|
// defer the callback if we are being called synchronously |
|
// to avoid piling up things on the stack |
|
processNextTick(cb, er); |
|
// this can emit finish, and it will always happen |
|
// after error |
|
processNextTick(finishMaybe, stream, state); |
|
stream._writableState.errorEmitted = true; |
|
stream.emit('error', er); |
|
} else { |
|
// the caller expect this to happen before if |
|
// it is async |
|
cb(er); |
|
stream._writableState.errorEmitted = true; |
|
stream.emit('error', er); |
|
// this can emit finish, but finish must |
|
// always follow error |
|
finishMaybe(stream, state); |
|
} |
|
} |
|
|
|
function onwriteStateUpdate(state) { |
|
state.writing = false; |
|
state.writecb = null; |
|
state.length -= state.writelen; |
|
state.writelen = 0; |
|
} |
|
|
|
function onwrite(stream, er) { |
|
var state = stream._writableState; |
|
var sync = state.sync; |
|
var cb = state.writecb; |
|
|
|
onwriteStateUpdate(state); |
|
|
|
if (er) onwriteError(stream, state, sync, er, cb);else { |
|
// Check if we're actually ready to finish, but don't emit yet |
|
var finished = needFinish(state); |
|
|
|
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { |
|
clearBuffer(stream, state); |
|
} |
|
|
|
if (sync) { |
|
/*<replacement>*/ |
|
asyncWrite(afterWrite, stream, state, finished, cb); |
|
/*</replacement>*/ |
|
} else { |
|
afterWrite(stream, state, finished, cb); |
|
} |
|
} |
|
} |
|
|
|
function afterWrite(stream, state, finished, cb) { |
|
if (!finished) onwriteDrain(stream, state); |
|
state.pendingcb--; |
|
cb(); |
|
finishMaybe(stream, state); |
|
} |
|
|
|
// Must force callback to be called on nextTick, so that we don't |
|
// emit 'drain' before the write() consumer gets the 'false' return |
|
// value, and has a chance to attach a 'drain' listener. |
|
function onwriteDrain(stream, state) { |
|
if (state.length === 0 && state.needDrain) { |
|
state.needDrain = false; |
|
stream.emit('drain'); |
|
} |
|
} |
|
|
|
// if there's something in the buffer waiting, then process it |
|
function clearBuffer(stream, state) { |
|
state.bufferProcessing = true; |
|
var entry = state.bufferedRequest; |
|
|
|
if (stream._writev && entry && entry.next) { |
|
// Fast case, write everything using _writev() |
|
var l = state.bufferedRequestCount; |
|
var buffer = new Array(l); |
|
var holder = state.corkedRequestsFree; |
|
holder.entry = entry; |
|
|
|
var count = 0; |
|
var allBuffers = true; |
|
while (entry) { |
|
buffer[count] = entry; |
|
if (!entry.isBuf) allBuffers = false; |
|
entry = entry.next; |
|
count += 1; |
|
} |
|
buffer.allBuffers = allBuffers; |
|
|
|
doWrite(stream, state, true, state.length, buffer, '', holder.finish); |
|
|
|
// doWrite is almost always async, defer these to save a bit of time |
|
// as the hot path ends with doWrite |
|
state.pendingcb++; |
|
state.lastBufferedRequest = null; |
|
if (holder.next) { |
|
state.corkedRequestsFree = holder.next; |
|
holder.next = null; |
|
} else { |
|
state.corkedRequestsFree = new CorkedRequest(state); |
|
} |
|
} else { |
|
// Slow case, write chunks one-by-one |
|
while (entry) { |
|
var chunk = entry.chunk; |
|
var encoding = entry.encoding; |
|
var cb = entry.callback; |
|
var len = state.objectMode ? 1 : chunk.length; |
|
|
|
doWrite(stream, state, false, len, chunk, encoding, cb); |
|
entry = entry.next; |
|
// if we didn't call the onwrite immediately, then |
|
// it means that we need to wait until it does. |
|
// also, that means that the chunk and cb are currently |
|
// being processed, so move the buffer counter past them. |
|
if (state.writing) { |
|
break; |
|
} |
|
} |
|
|
|
if (entry === null) state.lastBufferedRequest = null; |
|
} |
|
|
|
state.bufferedRequestCount = 0; |
|
state.bufferedRequest = entry; |
|
state.bufferProcessing = false; |
|
} |
|
|
|
Writable.prototype._write = function (chunk, encoding, cb) { |
|
cb(new Error('_write() is not implemented')); |
|
}; |
|
|
|
Writable.prototype._writev = null; |
|
|
|
Writable.prototype.end = function (chunk, encoding, cb) { |
|
var state = this._writableState; |
|
|
|
if (typeof chunk === 'function') { |
|
cb = chunk; |
|
chunk = null; |
|
encoding = null; |
|
} else if (typeof encoding === 'function') { |
|
cb = encoding; |
|
encoding = null; |
|
} |
|
|
|
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); |
|
|
|
// .end() fully uncorks |
|
if (state.corked) { |
|
state.corked = 1; |
|
this.uncork(); |
|
} |
|
|
|
// ignore unnecessary end() calls. |
|
if (!state.ending && !state.finished) endWritable(this, state, cb); |
|
}; |
|
|
|
function needFinish(state) { |
|
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; |
|
} |
|
function callFinal(stream, state) { |
|
stream._final(function (err) { |
|
state.pendingcb--; |
|
if (err) { |
|
stream.emit('error', err); |
|
} |
|
state.prefinished = true; |
|
stream.emit('prefinish'); |
|
finishMaybe(stream, state); |
|
}); |
|
} |
|
function prefinish(stream, state) { |
|
if (!state.prefinished && !state.finalCalled) { |
|
if (typeof stream._final === 'function') { |
|
state.pendingcb++; |
|
state.finalCalled = true; |
|
processNextTick(callFinal, stream, state); |
|
} else { |
|
state.prefinished = true; |
|
stream.emit('prefinish'); |
|
} |
|
} |
|
} |
|
|
|
function finishMaybe(stream, state) { |
|
var need = needFinish(state); |
|
if (need) { |
|
prefinish(stream, state); |
|
if (state.pendingcb === 0) { |
|
state.finished = true; |
|
stream.emit('finish'); |
|
} |
|
} |
|
return need; |
|
} |
|
|
|
function endWritable(stream, state, cb) { |
|
state.ending = true; |
|
finishMaybe(stream, state); |
|
if (cb) { |
|
if (state.finished) processNextTick(cb);else stream.once('finish', cb); |
|
} |
|
state.ended = true; |
|
stream.writable = false; |
|
} |
|
|
|
function onCorkedFinish(corkReq, state, err) { |
|
var entry = corkReq.entry; |
|
corkReq.entry = null; |
|
while (entry) { |
|
var cb = entry.callback; |
|
state.pendingcb--; |
|
cb(err); |
|
entry = entry.next; |
|
} |
|
if (state.corkedRequestsFree) { |
|
state.corkedRequestsFree.next = corkReq; |
|
} else { |
|
state.corkedRequestsFree = corkReq; |
|
} |
|
} |
|
|
|
Object.defineProperty(Writable.prototype, 'destroyed', { |
|
get: function () { |
|
if (this._writableState === undefined) { |
|
return false; |
|
} |
|
return this._writableState.destroyed; |
|
}, |
|
set: function (value) { |
|
// we ignore the value if the stream |
|
// has not been initialized yet |
|
if (!this._writableState) { |
|
return; |
|
} |
|
|
|
// backward compatibility, the user is explicitly |
|
// managing destroyed |
|
this._writableState.destroyed = value; |
|
} |
|
}); |
|
|
|
Writable.prototype.destroy = destroyImpl.destroy; |
|
Writable.prototype._undestroy = destroyImpl.undestroy; |
|
Writable.prototype._destroy = function (err, cb) { |
|
this.end(); |
|
cb(err); |
|
}; |
|
}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) |
|
},{"./_stream_duplex":16,"./internal/streams/destroy":22,"./internal/streams/stream":23,"_process":41,"core-util-is":8,"inherits":11,"process-nextick-args":14,"safe-buffer":25,"util-deprecate":29}],21:[function(require,module,exports){ |
|
'use strict'; |
|
|
|
/*<replacement>*/ |
|
|
|
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } |
|
|
|
var Buffer = require('safe-buffer').Buffer; |
|
/*</replacement>*/ |
|
|
|
function copyBuffer(src, target, offset) { |
|
src.copy(target, offset); |
|
} |
|
|
|
module.exports = function () { |
|
function BufferList() { |
|
_classCallCheck(this, BufferList); |
|
|
|
this.head = null; |
|
this.tail = null; |
|
this.length = 0; |
|
} |
|
|
|
BufferList.prototype.push = function push(v) { |
|
var entry = { data: v, next: null }; |
|
if (this.length > 0) this.tail.next = entry;else this.head = entry; |
|
this.tail = entry; |
|
++this.length; |
|
}; |
|
|
|
BufferList.prototype.unshift = function unshift(v) { |
|
var entry = { data: v, next: this.head }; |
|
if (this.length === 0) this.tail = entry; |
|
this.head = entry; |
|
++this.length; |
|
}; |
|
|
|
BufferList.prototype.shift = function shift() { |
|
if (this.length === 0) return; |
|
var ret = this.head.data; |
|
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; |
|
--this.length; |
|
return ret; |
|
}; |
|
|
|
BufferList.prototype.clear = function clear() { |
|
this.head = this.tail = null; |
|
this.length = 0; |
|
}; |
|
|
|
BufferList.prototype.join = function join(s) { |
|
if (this.length === 0) return ''; |
|
var p = this.head; |
|
var ret = '' + p.data; |
|
while (p = p.next) { |
|
ret += s + p.data; |
|
}return ret; |
|
}; |
|
|
|
BufferList.prototype.concat = function concat(n) { |
|
if (this.length === 0) return Buffer.alloc(0); |
|
if (this.length === 1) return this.head.data; |
|
var ret = Buffer.allocUnsafe(n >>> 0); |
|
var p = this.head; |
|
var i = 0; |
|
while (p) { |
|
copyBuffer(p.data, ret, i); |
|
i += p.data.length; |
|
p = p.next; |
|
} |
|
return ret; |
|
}; |
|
|
|
return BufferList; |
|
}(); |
|
},{"safe-buffer":25}],22:[function(require,module,exports){ |
|
'use strict'; |
|
|
|
/*<replacement>*/ |
|
|
|
var processNextTick = require('process-nextick-args'); |
|
/*</replacement>*/ |
|
|
|
// undocumented cb() API, needed for core, not for public API |
|
function destroy(err, cb) { |
|
var _this = this; |
|
|
|
var readableDestroyed = this._readableState && this._readableState.destroyed; |
|
var writableDestroyed = this._writableState && this._writableState.destroyed; |
|
|
|
if (readableDestroyed || writableDestroyed) { |
|
if (cb) { |
|
cb(err); |
|
} else if (err && (!this._writableState || !this._writableState.errorEmitted)) { |
|
processNextTick(emitErrorNT, this, err); |
|
} |
|
return; |
|
} |
|
|
|
// we set destroyed to true before firing error callbacks in order |
|
// to make it re-entrance safe in case destroy() is called within callbacks |
|
|
|
if (this._readableState) { |
|
this._readableState.destroyed = true; |
|
} |
|
|
|
// if this is a duplex stream mark the writable part as destroyed as well |
|
if (this._writableState) { |
|
this._writableState.destroyed = true; |
|
} |
|
|
|
this._destroy(err || null, function (err) { |
|
if (!cb && err) { |
|
processNextTick(emitErrorNT, _this, err); |
|
if (_this._writableState) { |
|
_this._writableState.errorEmitted = true; |
|
} |
|
} else if (cb) { |
|
cb(err); |
|
} |
|
}); |
|
} |
|
|
|
function undestroy() { |
|
if (this._readableState) { |
|
this._readableState.destroyed = false; |
|
this._readableState.reading = false; |
|
this._readableState.ended = false; |
|
this._readableState.endEmitted = false; |
|
} |
|
|
|
if (this._writableState) { |
|
this._writableState.destroyed = false; |
|
this._writableState.ended = false; |
|
this._writableState.ending = false; |
|
this._writableState.finished = false; |
|
this._writableState.errorEmitted = false; |
|
} |
|
} |
|
|
|
function emitErrorNT(self, err) { |
|
self.emit('error', err); |
|
} |
|
|
|
module.exports = { |
|
destroy: destroy, |
|
undestroy: undestroy |
|
}; |
|
},{"process-nextick-args":14}],23:[function(require,module,exports){ |
|
module.exports = require('events').EventEmitter; |
|
|
|
},{"events":35}],24:[function(require,module,exports){ |
|
exports = module.exports = require('./lib/_stream_readable.js'); |
|
exports.Stream = exports; |
|
exports.Readable = exports; |
|
exports.Writable = require('./lib/_stream_writable.js'); |
|
exports.Duplex = require('./lib/_stream_duplex.js'); |
|
exports.Transform = require('./lib/_stream_transform.js'); |
|
exports.PassThrough = require('./lib/_stream_passthrough.js'); |
|
|
|
},{"./lib/_stream_duplex.js":16,"./lib/_stream_passthrough.js":17,"./lib/_stream_readable.js":18,"./lib/_stream_transform.js":19,"./lib/_stream_writable.js":20}],25:[function(require,module,exports){ |
|
/* eslint-disable node/no-deprecated-api */ |
|
var buffer = require('buffer') |
|
var Buffer = buffer.Buffer |
|
|
|
// alternative to using Object.keys for old browsers |
|
function copyProps (src, dst) { |
|
for (var key in src) { |
|
dst[key] = src[key] |
|
} |
|
} |
|
if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { |
|
module.exports = buffer |
|
} else { |
|
// Copy properties from require('buffer') |
|
copyProps(buffer, exports) |
|
exports.Buffer = SafeBuffer |
|
} |
|
|
|
function SafeBuffer (arg, encodingOrOffset, length) { |
|
return Buffer(arg, encodingOrOffset, length) |
|
} |
|
|
|
// Copy static methods from Buffer |
|
copyProps(Buffer, SafeBuffer) |
|
|
|
SafeBuffer.from = function (arg, encodingOrOffset, length) { |
|
if (typeof arg === 'number') { |
|
throw new TypeError('Argument must not be a number') |
|
} |
|
return Buffer(arg, encodingOrOffset, length) |
|
} |
|
|
|
SafeBuffer.alloc = function (size, fill, encoding) { |
|
if (typeof size !== 'number') { |
|
throw new TypeError('Argument must be a number') |
|
} |
|
var buf = Buffer(size) |
|
if (fill !== undefined) { |
|
if (typeof encoding === 'string') { |
|
buf.fill(fill, encoding) |
|
} else { |
|
buf.fill(fill) |
|
} |
|
} else { |
|
buf.fill(0) |
|
} |
|
return buf |
|
} |
|
|
|
SafeBuffer.allocUnsafe = function (size) { |
|
if (typeof size !== 'number') { |
|
throw new TypeError('Argument must be a number') |
|
} |
|
return Buffer(size) |
|
} |
|
|
|
SafeBuffer.allocUnsafeSlow = function (size) { |
|
if (typeof size !== 'number') { |
|
throw new TypeError('Argument must be a number') |
|
} |
|
return buffer.SlowBuffer(size) |
|
} |
|
|
|
},{"buffer":33}],26:[function(require,module,exports){ |
|
var tick = 1 |
|
var maxTick = 65535 |
|
var resolution = 4 |
|
var inc = function () { |
|
tick = (tick + 1) & maxTick |
|
} |
|
|
|
var timer = setInterval(inc, (1000 / resolution) | 0) |
|
if (timer.unref) timer.unref() |
|
|
|
module.exports = function (seconds) { |
|
var size = resolution * (seconds || 5) |
|
var buffer = [0] |
|
var pointer = 1 |
|
var last = (tick - 1) & maxTick |
|
|
|
return function (delta) { |
|
var dist = (tick - last) & maxTick |
|
if (dist > size) dist = size |
|
last = tick |
|
|
|
while (dist--) { |
|
if (pointer === size) pointer = 0 |
|
buffer[pointer] = buffer[pointer === 0 ? size - 1 : pointer - 1] |
|
pointer++ |
|
} |
|
|
|
if (delta) buffer[pointer - 1] += delta |
|
|
|
var top = buffer[pointer - 1] |
|
var btm = buffer.length < size ? 0 : buffer[pointer === size ? 0 : pointer] |
|
|
|
return buffer.length < resolution ? top : (top - btm) * resolution / buffer.length |
|
} |
|
} |
|
|
|
},{}],27:[function(require,module,exports){ |
|
'use strict'; |
|
|
|
var Buffer = require('safe-buffer').Buffer; |
|
|
|
var isEncoding = Buffer.isEncoding || function (encoding) { |
|
encoding = '' + encoding; |
|
switch (encoding && encoding.toLowerCase()) { |
|
case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': |
|
return true; |
|
default: |
|
return false; |
|
} |
|
}; |
|
|
|
function _normalizeEncoding(enc) { |
|
if (!enc) return 'utf8'; |
|
var retried; |
|
while (true) { |
|
switch (enc) { |
|
case 'utf8': |
|
case 'utf-8': |
|
return 'utf8'; |
|
case 'ucs2': |
|
case 'ucs-2': |
|
case 'utf16le': |
|
case 'utf-16le': |
|
return 'utf16le'; |
|
case 'latin1': |
|
case 'binary': |
|
return 'latin1'; |
|
case 'base64': |
|
case 'ascii': |
|
case 'hex': |
|
return enc; |
|
default: |
|
if (retried) return; // undefined |
|
enc = ('' + enc).toLowerCase(); |
|
retried = true; |
|
} |
|
} |
|
}; |
|
|
|
// Do not cache `Buffer.isEncoding` when checking encoding names as some |
|
// modules monkey-patch it to support additional encodings |
|
function normalizeEncoding(enc) { |
|
var nenc = _normalizeEncoding(enc); |
|
if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); |
|
return nenc || enc; |
|
} |
|
|
|
// StringDecoder provides an interface for efficiently splitting a series of |
|
// buffers into a series of JS strings without breaking apart multi-byte |
|
// characters. |
|
exports.StringDecoder = StringDecoder; |
|
function StringDecoder(encoding) { |
|
this.encoding = normalizeEncoding(encoding); |
|
var nb; |
|
switch (this.encoding) { |
|
case 'utf16le': |
|
this.text = utf16Text; |
|
this.end = utf16End; |
|
nb = 4; |
|
break; |
|
case 'utf8': |
|
this.fillLast = utf8FillLast; |
|
nb = 4; |
|
break; |
|
case 'base64': |
|
this.text = base64Text; |
|
this.end = base64End; |
|
nb = 3; |
|
break; |
|
default: |
|
this.write = simpleWrite; |
|
this.end = simpleEnd; |
|
return; |
|
} |
|
this.lastNeed = 0; |
|
this.lastTotal = 0; |
|
this.lastChar = Buffer.allocUnsafe(nb); |
|
} |
|
|
|
StringDecoder.prototype.write = function (buf) { |
|
if (buf.length === 0) return ''; |
|
var r; |
|
var i; |
|
if (this.lastNeed) { |
|
r = this.fillLast(buf); |
|
if (r === undefined) return ''; |
|
i = this.lastNeed; |
|
this.lastNeed = 0; |
|
} else { |
|
i = 0; |
|
} |
|
if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); |
|
return r || ''; |
|
}; |
|
|
|
StringDecoder.prototype.end = utf8End; |
|
|
|
// Returns only complete characters in a Buffer |
|
StringDecoder.prototype.text = utf8Text; |
|
|
|
// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer |
|
StringDecoder.prototype.fillLast = function (buf) { |
|
if (this.lastNeed <= buf.length) { |
|
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); |
|
return this.lastChar.toString(this.encoding, 0, this.lastTotal); |
|
} |
|
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); |
|
this.lastNeed -= buf.length; |
|
}; |
|
|
|
// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a |
|
// continuation byte. |
|
function utf8CheckByte(byte) { |
|
if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; |
|
return -1; |
|
} |
|
|
|
// Checks at most 3 bytes at the end of a Buffer in order to detect an |
|
// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) |
|
// needed to complete the UTF-8 character (if applicable) are returned. |
|
function utf8CheckIncomplete(self, buf, i) { |
|
var j = buf.length - 1; |
|
if (j < i) return 0; |
|
var nb = utf8CheckByte(buf[j]); |
|
if (nb >= 0) { |
|
if (nb > 0) self.lastNeed = nb - 1; |
|
return nb; |
|
} |
|
if (--j < i) return 0; |
|
nb = utf8CheckByte(buf[j]); |
|
if (nb >= 0) { |
|
if (nb > 0) self.lastNeed = nb - 2; |
|
return nb; |
|
} |
|
if (--j < i) return 0; |
|
nb = utf8CheckByte(buf[j]); |
|
if (nb >= 0) { |
|
if (nb > 0) { |
|
if (nb === 2) nb = 0;else self.lastNeed = nb - 3; |
|
} |
|
return nb; |
|
} |
|
return 0; |
|
} |
|
|
|
// Validates as many continuation bytes for a multi-byte UTF-8 character as |
|
// needed or are available. If we see a non-continuation byte where we expect |
|
// one, we "replace" the validated continuation bytes we've seen so far with |
|
// UTF-8 replacement characters ('\ufffd'), to match v8's UTF-8 decoding |
|
// behavior. The continuation byte check is included three times in the case |
|
// where all of the continuation bytes for a character exist in the same buffer. |
|
// It is also done this way as a slight performance increase instead of using a |
|
// loop. |
|
function utf8CheckExtraBytes(self, buf, p) { |
|
if ((buf[0] & 0xC0) !== 0x80) { |
|
self.lastNeed = 0; |
|
return '\ufffd'.repeat(p); |
|
} |
|
if (self.lastNeed > 1 && buf.length > 1) { |
|
if ((buf[1] & 0xC0) !== 0x80) { |
|
self.lastNeed = 1; |
|
return '\ufffd'.repeat(p + 1); |
|
} |
|
if (self.lastNeed > 2 && buf.length > 2) { |
|
if ((buf[2] & 0xC0) !== 0x80) { |
|
self.lastNeed = 2; |
|
return '\ufffd'.repeat(p + 2); |
|
} |
|
} |
|
} |
|
} |
|
|
|
// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. |
|
function utf8FillLast(buf) { |
|
var p = this.lastTotal - this.lastNeed; |
|
var r = utf8CheckExtraBytes(this, buf, p); |
|
if (r !== undefined) return r; |
|
if (this.lastNeed <= buf.length) { |
|
buf.copy(this.lastChar, p, 0, this.lastNeed); |
|
return this.lastChar.toString(this.encoding, 0, this.lastTotal); |
|
} |
|
buf.copy(this.lastChar, p, 0, buf.length); |
|
this.lastNeed -= buf.length; |
|
} |
|
|
|
// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a |
|
// partial character, the character's bytes are buffered until the required |
|
// number of bytes are available. |
|
function utf8Text(buf, i) { |
|
var total = utf8CheckIncomplete(this, buf, i); |
|
if (!this.lastNeed) return buf.toString('utf8', i); |
|
this.lastTotal = total; |
|
var end = buf.length - (total - this.lastNeed); |
|
buf.copy(this.lastChar, 0, end); |
|
return buf.toString('utf8', i, end); |
|
} |
|
|
|
// For UTF-8, a replacement character for each buffered byte of a (partial) |
|
// character needs to be added to the output. |
|
function utf8End(buf) { |
|
var r = buf && buf.length ? this.write(buf) : ''; |
|
if (this.lastNeed) return r + '\ufffd'.repeat(this.lastTotal - this.lastNeed); |
|
return r; |
|
} |
|
|
|
// UTF-16LE typically needs two bytes per character, but even if we have an even |
|
// number of bytes available, we need to check if we end on a leading/high |
|
// surrogate. In that case, we need to wait for the next two bytes in order to |
|
// decode the last character properly. |
|
function utf16Text(buf, i) { |
|
if ((buf.length - i) % 2 === 0) { |
|
var r = buf.toString('utf16le', i); |
|
if (r) { |
|
var c = r.charCodeAt(r.length - 1); |
|
if (c >= 0xD800 && c <= 0xDBFF) { |
|
this.lastNeed = 2; |
|
this.lastTotal = 4; |
|
this.lastChar[0] = buf[buf.length - 2]; |
|
this.lastChar[1] = buf[buf.length - 1]; |
|
return r.slice(0, -1); |
|
} |
|
} |
|
return r; |
|
} |
|
this.lastNeed = 1; |
|
this.lastTotal = 2; |
|
this.lastChar[0] = buf[buf.length - 1]; |
|
return buf.toString('utf16le', i, buf.length - 1); |
|
} |
|
|
|
// For UTF-16LE we do not explicitly append special replacement characters if we |
|
// end on a partial character, we simply let v8 handle that. |
|
function utf16End(buf) { |
|
var r = buf && buf.length ? this.write(buf) : ''; |
|
if (this.lastNeed) { |
|
var end = this.lastTotal - this.lastNeed; |
|
return r + this.lastChar.toString('utf16le', 0, end); |
|
} |
|
return r; |
|
} |
|
|
|
function base64Text(buf, i) { |
|
var n = (buf.length - i) % 3; |
|
if (n === 0) return buf.toString('base64', i); |
|
this.lastNeed = 3 - n; |
|
this.lastTotal = 3; |
|
if (n === 1) { |
|
this.lastChar[0] = buf[buf.length - 1]; |
|
} else { |
|
this.lastChar[0] = buf[buf.length - 2]; |
|
this.lastChar[1] = buf[buf.length - 1]; |
|
} |
|
return buf.toString('base64', i, buf.length - n); |
|
} |
|
|
|
function base64End(buf) { |
|
var r = buf && buf.length ? this.write(buf) : ''; |
|
if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); |
|
return r; |
|
} |
|
|
|
// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) |
|
function simpleWrite(buf) { |
|
return buf.toString(this.encoding); |
|
} |
|
|
|
function simpleEnd(buf) { |
|
return buf && buf.length ? this.write(buf) : ''; |
|
} |
|
},{"safe-buffer":25}],28:[function(require,module,exports){ |
|
module.exports = remove |
|
|
|
function remove (arr, i) { |
|
if (i >= arr.length || i < 0) return |
|
var last = arr.pop() |
|
if (i < arr.length) { |
|
var tmp = arr[i] |
|
arr[i] = last |
|
return tmp |
|
} |
|
return last |
|
} |
|
|
|
},{}],29:[function(require,module,exports){ |
|
(function (global){ |
|
|
|
/** |
|
* Module exports. |
|
*/ |
|
|
|
module.exports = deprecate; |
|
|
|
/** |
|
* Mark that a method should not be used. |
|
* Returns a modified function which warns once by default. |
|
* |
|
* If `localStorage.noDeprecation = true` is set, then it is a no-op. |
|
* |
|
* If `localStorage.throwDeprecation = true` is set, then deprecated functions |
|
* will throw an Error when invoked. |
|
* |
|
* If `localStorage.traceDeprecation = true` is set, then deprecated functions |
|
* will invoke `console.trace()` instead of `console.error()`. |
|
* |
|
* @param {Function} fn - the function to deprecate |
|
* @param {String} msg - the string to print to the console when `fn` is invoked |
|
* @returns {Function} a new "deprecated" version of `fn` |
|
* @api public |
|
*/ |
|
|
|
function deprecate (fn, msg) { |
|
if (config('noDeprecation')) { |
|
return fn; |
|
} |
|
|
|
var warned = false; |
|
function deprecated() { |
|
if (!warned) { |
|
if (config('throwDeprecation')) { |
|
throw new Error(msg); |
|
} else if (config('traceDeprecation')) { |
|
console.trace(msg); |
|
} else { |
|
console.warn(msg); |
|
} |
|
warned = true; |
|
} |
|
return fn.apply(this, arguments); |
|
} |
|
|
|
return deprecated; |
|
} |
|
|
|
/** |
|
* Checks `localStorage` for boolean values for the given `name`. |
|
* |
|
* @param {String} name |
|
* @returns {Boolean} |
|
* @api private |
|
*/ |
|
|
|
function config (name) { |
|
// accessing global.localStorage can trigger a DOMException in sandboxed iframes |
|
try { |
|
if (!global.localStorage) return false; |
|
} catch (_) { |
|
return false; |
|
} |
|
var val = global.localStorage[name]; |
|
if (null == val) return false; |
|
return String(val).toLowerCase() === 'true'; |
|
} |
|
|
|
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) |
|
},{}],30:[function(require,module,exports){ |
|
module.exports = extend |
|
|
|
var hasOwnProperty = Object.prototype.hasOwnProperty; |
|
|
|
function extend() { |
|
var target = {} |
|
|
|
for (var i = 0; i < arguments.length; i++) { |
|
var source = arguments[i] |
|
|
|
for (var key in source) { |
|
if (hasOwnProperty.call(source, key)) { |
|
target[key] = source[key] |
|
} |
|
} |
|
} |
|
|
|
return target |
|
} |
|
|
|
},{}],31:[function(require,module,exports){ |
|
'use strict' |
|
|
|
exports.byteLength = byteLength |
|
exports.toByteArray = toByteArray |
|
exports.fromByteArray = fromByteArray |
|
|
|
var lookup = [] |
|
var revLookup = [] |
|
var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array |
|
|
|
var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' |
|
for (var i = 0, len = code.length; i < len; ++i) { |
|
lookup[i] = code[i] |
|
revLookup[code.charCodeAt(i)] = i |
|
} |
|
|
|
revLookup['-'.charCodeAt(0)] = 62 |
|
revLookup['_'.charCodeAt(0)] = 63 |
|
|
|
function placeHoldersCount (b64) { |
|
var len = b64.length |
|
if (len % 4 > 0) { |
|
throw new Error('Invalid string. Length must be a multiple of 4') |
|
} |
|
|
|
// the number of equal signs (place holders) |
|
// if there are two placeholders, than the two characters before it |
|
// represent one byte |
|
// if there is only one, then the three characters before it represent 2 bytes |
|
// this is just a cheap hack to not do indexOf twice |
|
return b64[len - 2] === '=' ? 2 : b64[len - 1] === '=' ? 1 : 0 |
|
} |
|
|
|
function byteLength (b64) { |
|
// base64 is 4/3 + up to two characters of the original data |
|
return (b64.length * 3 / 4) - placeHoldersCount(b64) |
|
} |
|
|
|
function toByteArray (b64) { |
|
var i, l, tmp, placeHolders, arr |
|
var len = b64.length |
|
placeHolders = placeHoldersCount(b64) |
|
|
|
arr = new Arr((len * 3 / 4) - placeHolders) |
|
|
|
// if there are placeholders, only get up to the last complete 4 chars |
|
l = placeHolders > 0 ? len - 4 : len |
|
|
|
var L = 0 |
|
|
|
for (i = 0; i < l; i += 4) { |
|
tmp = (revLookup[b64.charCodeAt(i)] << 18) | (revLookup[b64.charCodeAt(i + 1)] << 12) | (revLookup[b64.charCodeAt(i + 2)] << 6) | revLookup[b64.charCodeAt(i + 3)] |
|
arr[L++] = (tmp >> 16) & 0xFF |
|
arr[L++] = (tmp >> 8) & 0xFF |
|
arr[L++] = tmp & 0xFF |
|
} |
|
|
|
if (placeHolders === 2) { |
|
tmp = (revLookup[b64.charCodeAt(i)] << 2) | (revLookup[b64.charCodeAt(i + 1)] >> 4) |
|
arr[L++] = tmp & 0xFF |
|
} else if (placeHolders === 1) { |
|
tmp = (revLookup[b64.charCodeAt(i)] << 10) | (revLookup[b64.charCodeAt(i + 1)] << 4) | (revLookup[b64.charCodeAt(i + 2)] >> 2) |
|
arr[L++] = (tmp >> 8) & 0xFF |
|
arr[L++] = tmp & 0xFF |
|
} |
|
|
|
return arr |
|
} |
|
|
|
function tripletToBase64 (num) { |
|
return lookup[num >> 18 & 0x3F] + lookup[num >> 12 & 0x3F] + lookup[num >> 6 & 0x3F] + lookup[num & 0x3F] |
|
} |
|
|
|
function encodeChunk (uint8, start, end) { |
|
var tmp |
|
var output = [] |
|
for (var i = start; i < end; i += 3) { |
|
tmp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2]) |
|
output.push(tripletToBase64(tmp)) |
|
} |
|
return output.join('') |
|
} |
|
|
|
function fromByteArray (uint8) { |
|
var tmp |
|
var len = uint8.length |
|
var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes |
|
var output = '' |
|
var parts = [] |
|
var maxChunkLength = 16383 // must be multiple of 3 |
|
|
|
// go through the array every three bytes, we'll deal with trailing stuff later |
|
for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { |
|
parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) |
|
} |
|
|
|
// pad the end with zeros, but make sure to not forget the extra bytes |
|
if (extraBytes === 1) { |
|
tmp = uint8[len - 1] |
|
output += lookup[tmp >> 2] |
|
output += lookup[(tmp << 4) & 0x3F] |
|
output += '==' |
|
} else if (extraBytes === 2) { |
|
tmp = (uint8[len - 2] << 8) + (uint8[len - 1]) |
|
output += lookup[tmp >> 10] |
|
output += lookup[(tmp >> 4) & 0x3F] |
|
output += lookup[(tmp << 2) & 0x3F] |
|
output += '=' |
|
} |
|
|
|
parts.push(output) |
|
|
|
return parts.join('') |
|
} |
|
|
|
},{}],32:[function(require,module,exports){ |
|
|
|
},{}],33:[function(require,module,exports){ |
|
/*! |
|
* The buffer module from node.js, for the browser. |
|
* |
|
* @author Feross Aboukhadijeh <feross@feross.org> <http://feross.org> |
|
* @license MIT |
|
*/ |
|
/* eslint-disable no-proto */ |
|
|
|
'use strict' |
|
|
|
var base64 = require('base64-js') |
|
var ieee754 = require('ieee754') |
|
|
|
exports.Buffer = Buffer |
|
exports.SlowBuffer = SlowBuffer |
|
exports.INSPECT_MAX_BYTES = 50 |
|
|
|
var K_MAX_LENGTH = 0x7fffffff |
|
exports.kMaxLength = K_MAX_LENGTH |
|
|
|
/** |
|
* If `Buffer.TYPED_ARRAY_SUPPORT`: |
|
* === true Use Uint8Array implementation (fastest) |
|
* === false Print warning and recommend using `buffer` v4.x which has an Object |
|
* implementation (most compatible, even IE6) |
|
* |
|
* Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+, |
|
* Opera 11.6+, iOS 4.2+. |
|
* |
|
* We report that the browser does not support typed arrays if the are not subclassable |
|
* using __proto__. Firefox 4-29 lacks support for adding new properties to `Uint8Array` |
|
* (See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438). IE 10 lacks support |
|
* for __proto__ and has a buggy typed array implementation. |
|
*/ |
|
Buffer.TYPED_ARRAY_SUPPORT = typedArraySupport() |
|
|
|
if (!Buffer.TYPED_ARRAY_SUPPORT && typeof console !== 'undefined' && |
|
typeof console.error === 'function') { |
|
console.error( |
|
'This browser lacks typed array (Uint8Array) support which is required by ' + |
|
'`buffer` v5.x. Use `buffer` v4.x if you require old browser support.' |
|
) |
|
} |
|
|
|
function typedArraySupport () { |
|
// Can typed array instances can be augmented? |
|
try { |
|
var arr = new Uint8Array(1) |
|
arr.__proto__ = {__proto__: Uint8Array.prototype, foo: function () { return 42 }} |
|
return arr.foo() === 42 |
|
} catch (e) { |
|
return false |
|
} |
|
} |
|
|
|
function createBuffer (length) { |
|
if (length > K_MAX_LENGTH) { |
|
throw new RangeError('Invalid typed array length') |
|
} |
|
// Return an augmented `Uint8Array` instance |
|
var buf = new Uint8Array(length) |
|
buf.__proto__ = Buffer.prototype |
|
return buf |
|
} |
|
|
|
/** |
|
* The Buffer constructor returns instances of `Uint8Array` that have their |
|
* prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of |
|
* `Uint8Array`, so the returned instances will have all the node `Buffer` methods |
|
* and the `Uint8Array` methods. Square bracket notation works as expected -- it |
|
* returns a single octet. |
|
* |
|
* The `Uint8Array` prototype remains unmodified. |
|
*/ |
|
|
|
function Buffer (arg, encodingOrOffset, length) { |
|
// Common case. |
|
if (typeof arg === 'number') { |
|
if (typeof encodingOrOffset === 'string') { |
|
throw new Error( |
|
'If encoding is specified then the first argument must be a string' |
|
) |
|
} |
|
return allocUnsafe(arg) |
|
} |
|
return from(arg, encodingOrOffset, length) |
|
} |
|
|
|
// Fix subarray() in ES2016. See: https://github.com/feross/buffer/pull/97 |
|
if (typeof Symbol !== 'undefined' && Symbol.species && |
|
Buffer[Symbol.species] === Buffer) { |
|
Object.defineProperty(Buffer, Symbol.species, { |
|
value: null, |
|
configurable: true, |
|
enumerable: false, |
|
writable: false |
|
}) |
|
} |
|
|
|
Buffer.poolSize = 8192 // not used by this implementation |
|
|
|
function from (value, encodingOrOffset, length) { |
|
if (typeof value === 'number') { |
|
throw new TypeError('"value" argument must not be a number') |
|
} |
|
|
|
if (isArrayBuffer(value)) { |
|
return fromArrayBuffer(value, encodingOrOffset, length) |
|
} |
|
|
|
if (typeof value === 'string') { |
|
return fromString(value, encodingOrOffset) |
|
} |
|
|
|
return fromObject(value) |
|
} |
|
|
|
/** |
|
* Functionally equivalent to Buffer(arg, encoding) but throws a TypeError |
|
* if value is a number. |
|
* Buffer.from(str[, encoding]) |
|
* Buffer.from(array) |
|
* Buffer.from(buffer) |
|
* Buffer.from(arrayBuffer[, byteOffset[, length]]) |
|
**/ |
|
Buffer.from = function (value, encodingOrOffset, length) { |
|
return from(value, encodingOrOffset, length) |
|
} |
|
|
|
// Note: Change prototype *after* Buffer.from is defined to workaround Chrome bug: |
|
// https://github.com/feross/buffer/pull/148 |
|
Buffer.prototype.__proto__ = Uint8Array.prototype |
|
Buffer.__proto__ = Uint8Array |
|
|
|
function assertSize (size) { |
|
if (typeof size !== 'number') { |
|
throw new TypeError('"size" argument must be a number') |
|
} else if (size < 0) { |
|
throw new RangeError('"size" argument must not be negative') |
|
} |
|
} |
|
|
|
function alloc (size, fill, encoding) { |
|
assertSize(size) |
|
if (size <= 0) { |
|
return createBuffer(size) |
|
} |
|
if (fill !== undefined) { |
|
// Only pay attention to encoding if it's a string. This |
|
// prevents accidentally sending in a number that would |
|
// be interpretted as a start offset. |
|
return typeof encoding === 'string' |
|
? createBuffer(size).fill(fill, encoding) |
|
: createBuffer(size).fill(fill) |
|
} |
|
return createBuffer(size) |
|
} |
|
|
|
/** |
|
* Creates a new filled Buffer instance. |
|
* alloc(size[, fill[, encoding]]) |
|
**/ |
|
Buffer.alloc = function (size, fill, encoding) { |
|
return alloc(size, fill, encoding) |
|
} |
|
|
|
function allocUnsafe (size) { |
|
assertSize(size) |
|
return createBuffer(size < 0 ? 0 : checked(size) | 0) |
|
} |
|
|
|
/** |
|
* Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance. |
|
* */ |
|
Buffer.allocUnsafe = function (size) { |
|
return allocUnsafe(size) |
|
} |
|
/** |
|
* Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance. |
|
*/ |
|
Buffer.allocUnsafeSlow = function (size) { |
|
return allocUnsafe(size) |
|
} |
|
|
|
function fromString (string, encoding) { |
|
if (typeof encoding !== 'string' || encoding === '') { |
|
encoding = 'utf8' |
|
} |
|
|
|
if (!Buffer.isEncoding(encoding)) { |
|
throw new TypeError('"encoding" must be a valid string encoding') |
|
} |
|
|
|
var length = byteLength(string, encoding) | 0 |
|
var buf = createBuffer(length) |
|
|
|
var actual = buf.write(string, encoding) |
|
|
|
if (actual !== length) { |
|
// Writing a hex string, for example, that contains invalid characters will |
|
// cause everything after the first invalid character to be ignored. (e.g. |
|
// 'abxxcd' will be treated as 'ab') |
|
buf = buf.slice(0, actual) |
|
} |
|
|
|
return buf |
|
} |
|
|
|
function fromArrayLike (array) { |
|
var length = array.length < 0 ? 0 : checked(array.length) | 0 |
|
var buf = createBuffer(length) |
|
for (var i = 0; i < length; i += 1) { |
|
buf[i] = array[i] & 255 |
|
} |
|
return buf |
|
} |
|
|
|
function fromArrayBuffer (array, byteOffset, length) { |
|
if (byteOffset < 0 || array.byteLength < byteOffset) { |
|
throw new RangeError('\'offset\' is out of bounds') |
|
} |
|
|
|
if (array.byteLength < byteOffset + (length || 0)) { |
|
throw new RangeError('\'length\' is out of bounds') |
|
} |
|
|
|
var buf |
|
if (byteOffset === undefined && length === undefined) { |
|
buf = new Uint8Array(array) |
|
} else if (length === undefined) { |
|
buf = new Uint8Array(array, byteOffset) |
|
} else { |
|
buf = new Uint8Array(array, byteOffset, length) |
|
} |
|
|
|
// Return an augmented `Uint8Array` instance |
|
buf.__proto__ = Buffer.prototype |
|
return buf |
|
} |
|
|
|
function fromObject (obj) { |
|
if (Buffer.isBuffer(obj)) { |
|
var len = checked(obj.length) | 0 |
|
var buf = createBuffer(len) |
|
|
|
if (buf.length === 0) { |
|
return buf |
|
} |
|
|
|
obj.copy(buf, 0, 0, len) |
|
return buf |
|
} |
|
|
|
if (obj) { |
|
if (isArrayBufferView(obj) || 'length' in obj) { |
|
if (typeof obj.length !== 'number' || numberIsNaN(obj.length)) { |
|
return createBuffer(0) |
|
} |
|
return fromArrayLike(obj) |
|
} |
|
|
|
if (obj.type === 'Buffer' && Array.isArray(obj.data)) { |
|
return fromArrayLike(obj.data) |
|
} |
|
} |
|
|
|
throw new TypeError('First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.') |
|
} |
|
|
|
function checked (length) { |
|
// Note: cannot use `length < K_MAX_LENGTH` here because that fails when |
|
// length is NaN (which is otherwise coerced to zero.) |
|
if (length >= K_MAX_LENGTH) { |
|
throw new RangeError('Attempt to allocate Buffer larger than maximum ' + |
|
'size: 0x' + K_MAX_LENGTH.toString(16) + ' bytes') |
|
} |
|
return length | 0 |
|
} |
|
|
|
function SlowBuffer (length) { |
|
if (+length != length) { // eslint-disable-line eqeqeq |
|
length = 0 |
|
} |
|
return Buffer.alloc(+length) |
|
} |
|
|
|
Buffer.isBuffer = function isBuffer (b) { |
|
return b != null && b._isBuffer === true |
|
} |
|
|
|
Buffer.compare = function compare (a, b) { |
|
if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { |
|
throw new TypeError('Arguments must be Buffers') |
|
} |
|
|
|
if (a === b) return 0 |
|
|
|
var x = a.length |
|
var y = b.length |
|
|
|
for (var i = 0, len = Math.min(x, y); i < len; ++i) { |
|
if (a[i] !== b[i]) { |
|
x = a[i] |
|
y = b[i] |
|
break |
|
} |
|
} |
|
|
|
if (x < y) return -1 |
|
if (y < x) return 1 |
|
return 0 |
|
} |
|
|
|
Buffer.isEncoding = function isEncoding (encoding) { |
|
switch (String(encoding).toLowerCase()) { |
|
case 'hex': |
|
case 'utf8': |
|
case 'utf-8': |
|
case 'ascii': |
|
case 'latin1': |
|
case 'binary': |
|
case 'base64': |
|
case 'ucs2': |
|
case 'ucs-2': |
|
case 'utf16le': |
|
case 'utf-16le': |
|
return true |
|
default: |
|
return false |
|
} |
|
} |
|
|
|
Buffer.concat = function concat (list, length) { |
|
if (!Array.isArray(list)) { |
|
throw new TypeError('"list" argument must be an Array of Buffers') |
|
} |
|
|
|
if (list.length === 0) { |
|
return Buffer.alloc(0) |
|
} |
|
|
|
var i |
|
if (length === undefined) { |
|
length = 0 |
|
for (i = 0; i < list.length; ++i) { |
|
length += list[i].length |
|
} |
|
} |
|
|
|
var buffer = Buffer.allocUnsafe(length) |
|
var pos = 0 |
|
for (i = 0; i < list.length; ++i) { |
|
var buf = list[i] |
|
if (!Buffer.isBuffer(buf)) { |
|
throw new TypeError('"list" argument must be an Array of Buffers') |
|
} |
|
buf.copy(buffer, pos) |
|
pos += buf.length |
|
} |
|
return buffer |
|
} |
|
|
|
function byteLength (string, encoding) { |
|
if (Buffer.isBuffer(string)) { |
|
return string.length |
|
} |
|
if (isArrayBufferView(string) || isArrayBuffer(string)) { |
|
return string.byteLength |
|
} |
|
if (typeof string !== 'string') { |
|
string = '' + string |
|
} |
|
|
|
var len = string.length |
|
if (len === 0) return 0 |
|
|
|
// Use a for loop to avoid recursion |
|
var loweredCase = false |
|
for (;;) { |
|
switch (encoding) { |
|
case 'ascii': |
|
case 'latin1': |
|
case 'binary': |
|
return len |
|
case 'utf8': |
|
case 'utf-8': |
|
case undefined: |
|
return utf8ToBytes(string).length |
|
case 'ucs2': |
|
case 'ucs-2': |
|
case 'utf16le': |
|
case 'utf-16le': |
|
return len * 2 |
|
case 'hex': |
|
return len >>> 1 |
|
case 'base64': |
|
return base64ToBytes(string).length |
|
default: |
|
if (loweredCase) return utf8ToBytes(string).length // assume utf8 |
|
encoding = ('' + encoding).toLowerCase() |
|
loweredCase = true |
|
} |
|
} |
|
} |
|
Buffer.byteLength = byteLength |
|
|
|
function slowToString (encoding, start, end) { |
|
var loweredCase = false |
|
|
|
// No need to verify that "this.length <= MAX_UINT32" since it's a read-only |
|
// property of a typed array. |
|
|
|
// This behaves neither like String nor Uint8Array in that we set start/end |
|
// to their upper/lower bounds if the value passed is out of range. |
|
// undefined is handled specially as per ECMA-262 6th Edition, |
|
// Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization. |
|
if (start === undefined || start < 0) { |
|
start = 0 |
|
} |
|
// Return early if start > this.length. Done here to prevent potential uint32 |
|
// coercion fail below. |
|
if (start > this.length) { |
|
return '' |
|
} |
|
|
|
if (end === undefined || end > this.length) { |
|
end = this.length |
|
} |
|
|
|
if (end <= 0) { |
|
return '' |
|
} |
|
|
|
// Force coersion to uint32. This will also coerce falsey/NaN values to 0. |
|
end >>>= 0 |
|
start >>>= 0 |
|
|
|
if (end <= start) { |
|
return '' |
|
} |
|
|
|
if (!encoding) encoding = 'utf8' |
|
|
|
while (true) { |
|
switch (encoding) { |
|
case 'hex': |
|
return hexSlice(this, start, end) |
|
|
|
case 'utf8': |
|
case 'utf-8': |
|
return utf8Slice(this, start, end) |
|
|
|
case 'ascii': |
|
return asciiSlice(this, start, end) |
|
|
|
case 'latin1': |
|
case 'binary': |
|
return latin1Slice(this, start, end) |
|
|
|
case 'base64': |
|
return base64Slice(this, start, end) |
|
|
|
case 'ucs2': |
|
case 'ucs-2': |
|
case 'utf16le': |
|
case 'utf-16le': |
|
return utf16leSlice(this, start, end) |
|
|
|
default: |
|
if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) |
|
encoding = (encoding + '').toLowerCase() |
|
loweredCase = true |
|
} |
|
} |
|
} |
|
|
|
// This property is used by `Buffer.isBuffer` (and the `is-buffer` npm package) |
|
// to detect a Buffer instance. It's not possible to use `instanceof Buffer` |
|
// reliably in a browserify context because there could be multiple different |
|
// copies of the 'buffer' package in use. This method works even for Buffer |
|
// instances that were created from another copy of the `buffer` package. |
|
// See: https://github.com/feross/buffer/issues/154 |
|
Buffer.prototype._isBuffer = true |
|
|
|
function swap (b, n, m) { |
|
var i = b[n] |
|
b[n] = b[m] |
|
b[m] = i |
|
} |
|
|
|
Buffer.prototype.swap16 = function swap16 () { |
|
var len = this.length |
|
if (len % 2 !== 0) { |
|
throw new RangeError('Buffer size must be a multiple of 16-bits') |
|
} |
|
for (var i = 0; i < len; i += 2) { |
|
swap(this, i, i + 1) |
|
} |
|
return this |
|
} |
|
|
|
Buffer.prototype.swap32 = function swap32 () { |
|
var len = this.length |
|
if (len % 4 !== 0) { |
|
throw new RangeError('Buffer size must be a multiple of 32-bits') |
|
} |
|
for (var i = 0; i < len; i += 4) { |
|
swap(this, i, i + 3) |
|
swap(this, i + 1, i + 2) |
|
} |
|
return this |
|
} |
|
|
|
Buffer.prototype.swap64 = function swap64 () { |
|
var len = this.length |
|
if (len % 8 !== 0) { |
|
throw new RangeError('Buffer size must be a multiple of 64-bits') |
|
} |
|
for (var i = 0; i < len; i += 8) { |
|
swap(this, i, i + 7) |
|
swap(this, i + 1, i + 6) |
|
swap(this, i + 2, i + 5) |
|
swap(this, i + 3, i + 4) |
|
} |
|
return this |
|
} |
|
|
|
Buffer.prototype.toString = function toString () { |
|
var length = this.length |
|
if (length === 0) return '' |
|
if (arguments.length === 0) return utf8Slice(this, 0, length) |
|
return slowToString.apply(this, arguments) |
|
} |
|
|
|
Buffer.prototype.equals = function equals (b) { |
|
if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') |
|
if (this === b) return true |
|
return Buffer.compare(this, b) === 0 |
|
} |
|
|
|
Buffer.prototype.inspect = function inspect () { |
|
var str = '' |
|
var max = exports.INSPECT_MAX_BYTES |
|
if (this.length > 0) { |
|
str = this.toString('hex', 0, max).match(/.{2}/g).join(' ') |
|
if (this.length > max) str += ' ... ' |
|
} |
|
return '<Buffer ' + str + '>' |
|
} |
|
|
|
Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) { |
|
if (!Buffer.isBuffer(target)) { |
|
throw new TypeError('Argument must be a Buffer') |
|
} |
|
|
|
if (start === undefined) { |
|
start = 0 |
|
} |
|
if (end === undefined) { |
|
end = target ? target.length : 0 |
|
} |
|
if (thisStart === undefined) { |
|
thisStart = 0 |
|
} |
|
if (thisEnd === undefined) { |
|
thisEnd = this.length |
|
} |
|
|
|
if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) { |
|
throw new RangeError('out of range index') |
|
} |
|
|
|
if (thisStart >= thisEnd && start >= end) { |
|
return 0 |
|
} |
|
if (thisStart >= thisEnd) { |
|
return -1 |
|
} |
|
if (start >= end) { |
|
return 1 |
|
} |
|
|
|
start >>>= 0 |
|
end >>>= 0 |
|
thisStart >>>= 0 |
|
thisEnd >>>= 0 |
|
|
|
if (this === target) return 0 |
|
|
|
var x = thisEnd - thisStart |
|
var y = end - start |
|
var len = Math.min(x, y) |
|
|
|
var thisCopy = this.slice(thisStart, thisEnd) |
|
var targetCopy = target.slice(start, end) |
|
|
|
for (var i = 0; i < len; ++i) { |
|
if (thisCopy[i] !== targetCopy[i]) { |
|
x = thisCopy[i] |
|
y = targetCopy[i] |
|
break |
|
} |
|
} |
|
|
|
if (x < y) return -1 |
|
if (y < x) return 1 |
|
return 0 |
|
} |
|
|
|
// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, |
|
// OR the last index of `val` in `buffer` at offset <= `byteOffset`. |
|
// |
|
// Arguments: |
|
// - buffer - a Buffer to search |
|
// - val - a string, Buffer, or number |
|
// - byteOffset - an index into `buffer`; will be clamped to an int32 |
|
// - encoding - an optional encoding, relevant is val is a string |
|
// - dir - true for indexOf, false for lastIndexOf |
|
function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) { |
|
// Empty buffer means no match |
|
if (buffer.length === 0) return -1 |
|
|
|
// Normalize byteOffset |
|
if (typeof byteOffset === 'string') { |
|
encoding = byteOffset |
|
byteOffset = 0 |
|
} else if (byteOffset > 0x7fffffff) { |
|
byteOffset = 0x7fffffff |
|
} else if (byteOffset < -0x80000000) { |
|
byteOffset = -0x80000000 |
|
} |
|
byteOffset = +byteOffset // Coerce to Number. |
|
if (numberIsNaN(byteOffset)) { |
|
// byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer |
|
byteOffset = dir ? 0 : (buffer.length - 1) |
|
} |
|
|
|
// Normalize byteOffset: negative offsets start from the end of the buffer |
|
if (byteOffset < 0) byteOffset = buffer.length + byteOffset |
|
if (byteOffset >= buffer.length) { |
|
if (dir) return -1 |
|
else byteOffset = buffer.length - 1 |
|
} else if (byteOffset < 0) { |
|
if (dir) byteOffset = 0 |
|
else return -1 |
|
} |
|
|
|
// Normalize val |
|
if (typeof val === 'string') { |
|
val = Buffer.from(val, encoding) |
|
} |
|
|
|
// Finally, search either indexOf (if dir is true) or lastIndexOf |
|
if (Buffer.isBuffer(val)) { |
|
// Special case: looking for empty string/buffer always fails |
|
if (val.length === 0) { |
|
return -1 |
|
} |
|
return arrayIndexOf(buffer, val, byteOffset, encoding, dir) |
|
} else if (typeof val === 'number') { |
|
val = val & 0xFF // Search for a byte value [0-255] |
|
if (typeof Uint8Array.prototype.indexOf === 'function') { |
|
if (dir) { |
|
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset) |
|
} else { |
|
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset) |
|
} |
|
} |
|
return arrayIndexOf(buffer, [ val ], byteOffset, encoding, dir) |
|
} |
|
|
|
throw new TypeError('val must be string, number or Buffer') |
|
} |
|
|
|
function arrayIndexOf (arr, val, byteOffset, encoding, dir) { |
|
var indexSize = 1 |
|
var arrLength = arr.length |
|
var valLength = val.length |
|
|
|
if (encoding !== undefined) { |
|
encoding = String(encoding).toLowerCase() |
|
if (encoding === 'ucs2' || encoding === 'ucs-2' || |
|
encoding === 'utf16le' || encoding === 'utf-16le') { |
|
if (arr.length < 2 || val.length < 2) { |
|
return -1 |
|
} |
|
indexSize = 2 |
|
arrLength /= 2 |
|
valLength /= 2 |
|
byteOffset /= 2 |
|
} |
|
} |
|
|
|
function read (buf, i) { |
|
if (indexSize === 1) { |
|
return buf[i] |
|
} else { |
|
return buf.readUInt16BE(i * indexSize) |
|
} |
|
} |
|
|
|
var i |
|
if (dir) { |
|
var foundIndex = -1 |
|
for (i = byteOffset; i < arrLength; i++) { |
|
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) { |
|
if (foundIndex === -1) foundIndex = i |
|
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize |
|
} else { |
|
if (foundIndex !== -1) i -= i - foundIndex |
|
foundIndex = -1 |
|
} |
|
} |
|
} else { |
|
if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength |
|
for (i = byteOffset; i >= 0; i--) { |
|
var found = true |
|
for (var j = 0; j < valLength; j++) { |
|
if (read(arr, i + j) !== read(val, j)) { |
|
found = false |
|
break |
|
} |
|
} |
|
if (found) return i |
|
} |
|
} |
|
|
|
return -1 |
|
} |
|
|
|
Buffer.prototype.includes = function includes (val, byteOffset, encoding) { |
|
return this.indexOf(val, byteOffset, encoding) !== -1 |
|
} |
|
|
|
Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) { |
|
return bidirectionalIndexOf(this, val, byteOffset, encoding, true) |
|
} |
|
|
|
Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) { |
|
return bidirectionalIndexOf(this, val, byteOffset, encoding, false) |
|
} |
|
|
|
function hexWrite (buf, string, offset, length) { |
|
offset = Number(offset) || 0 |
|
var remaining = buf.length - offset |
|
if (!length) { |
|
length = remaining |
|
} else { |
|
length = Number(length) |
|
if (length > remaining) { |
|
length = remaining |
|
} |
|
} |
|
|
|
// must be an even number of digits |
|
var strLen = string.length |
|
if (strLen % 2 !== 0) throw new TypeError('Invalid hex string') |
|
|
|
if (length > strLen / 2) { |
|
length = strLen / 2 |
|
} |
|
for (var i = 0; i < length; ++i) { |
|
var parsed = parseInt(string.substr(i * 2, 2), 16) |
|
if (numberIsNaN(parsed)) return i |
|
buf[offset + i] = parsed |
|
} |
|
return i |
|
} |
|
|
|
function utf8Write (buf, string, offset, length) { |
|
return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length) |
|
} |
|
|
|
function asciiWrite (buf, string, offset, length) { |
|
return blitBuffer(asciiToBytes(string), buf, offset, length) |
|
} |
|
|
|
function latin1Write (buf, string, offset, length) { |
|
return asciiWrite(buf, string, offset, length) |
|
} |
|
|
|
function base64Write (buf, string, offset, length) { |
|
return blitBuffer(base64ToBytes(string), buf, offset, length) |
|
} |
|
|
|
function ucs2Write (buf, string, offset, length) { |
|
return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length) |
|
} |
|
|
|
Buffer.prototype.write = function write (string, offset, length, encoding) { |
|
// Buffer#write(string) |
|
if (offset === undefined) { |
|
encoding = 'utf8' |
|
length = this.length |
|
offset = 0 |
|
// Buffer#write(string, encoding) |
|
} else if (length === undefined && typeof offset === 'string') { |
|
encoding = offset |
|
length = this.length |
|
offset = 0 |
|
// Buffer#write(string, offset[, length][, encoding]) |
|
} else if (isFinite(offset)) { |
|
offset = offset >>> 0 |
|
if (isFinite(length)) { |
|
length = length >>> 0 |
|
if (encoding === undefined) encoding = 'utf8' |
|
} else { |
|
encoding = length |
|
length = undefined |
|
} |
|
} else { |
|
throw new Error( |
|
'Buffer.write(string, encoding, offset[, length]) is no longer supported' |
|
) |
|
} |
|
|
|
var remaining = this.length - offset |
|
if (length === undefined || length > remaining) length = remaining |
|
|
|
if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) { |
|
throw new RangeError('Attempt to write outside buffer bounds') |
|
} |
|
|
|
if (!encoding) encoding = 'utf8' |
|
|
|
var loweredCase = false |
|
for (;;) { |
|
switch (encoding) { |
|
case 'hex': |
|
return hexWrite(this, string, offset, length) |
|
|
|
case 'utf8': |
|
case 'utf-8': |
|
return utf8Write(this, string, offset, length) |
|
|
|
case 'ascii': |
|
return asciiWrite(this, string, offset, length) |
|
|
|
case 'latin1': |
|
case 'binary': |
|
return latin1Write(this, string, offset, length) |
|
|
|
case 'base64': |
|
// Warning: maxLength not taken into account in base64Write |
|
return base64Write(this, string, offset, length) |
|
|
|
case 'ucs2': |
|
case 'ucs-2': |
|
case 'utf16le': |
|
case 'utf-16le': |
|
return ucs2Write(this, string, offset, length) |
|
|
|
default: |
|
if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) |
|
encoding = ('' + encoding).toLowerCase() |
|
loweredCase = true |
|
} |
|
} |
|
} |
|
|
|
Buffer.prototype.toJSON = function toJSON () { |
|
return { |
|
type: 'Buffer', |
|
data: Array.prototype.slice.call(this._arr || this, 0) |
|
} |
|
} |
|
|
|
function base64Slice (buf, start, end) { |
|
if (start === 0 && end === buf.length) { |
|
return base64.fromByteArray(buf) |
|
} else { |
|
return base64.fromByteArray(buf.slice(start, end)) |
|
} |
|
} |
|
|
|
function utf8Slice (buf, start, end) { |
|
end = Math.min(buf.length, end) |
|
var res = [] |
|
|
|
var i = start |
|
while (i < end) { |
|
var firstByte = buf[i] |
|
var codePoint = null |
|
var bytesPerSequence = (firstByte > 0xEF) ? 4 |
|
: (firstByte > 0xDF) ? 3 |
|
: (firstByte > 0xBF) ? 2 |
|
: 1 |
|
|
|
if (i + bytesPerSequence <= end) { |
|
var secondByte, thirdByte, fourthByte, tempCodePoint |
|
|
|
switch (bytesPerSequence) { |
|
case 1: |
|
if (firstByte < 0x80) { |
|
codePoint = firstByte |
|
} |
|
break |
|
case 2: |
|
secondByte = buf[i + 1] |
|
if ((secondByte & 0xC0) === 0x80) { |
|
tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F) |
|
if (tempCodePoint > 0x7F) { |
|
codePoint = tempCodePoint |
|
} |
|
} |
|
break |
|
case 3: |
|
secondByte = buf[i + 1] |
|
thirdByte = buf[i + 2] |
|
if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) { |
|
tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F) |
|
if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) { |
|
codePoint = tempCodePoint |
|
} |
|
} |
|
break |
|
case 4: |
|
secondByte = buf[i + 1] |
|
thirdByte = buf[i + 2] |
|
fourthByte = buf[i + 3] |
|
if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) { |
|
tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F) |
|
if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) { |
|
codePoint = tempCodePoint |
|
} |
|
} |
|
} |
|
} |
|
|
|
if (codePoint === null) { |
|
// we did not generate a valid codePoint so insert a |
|
// replacement char (U+FFFD) and advance only 1 byte |
|
codePoint = 0xFFFD |
|
bytesPerSequence = 1 |
|
} else if (codePoint > 0xFFFF) { |
|
// encode to utf16 (surrogate pair dance) |
|
codePoint -= 0x10000 |
|
res.push(codePoint >>> 10 & 0x3FF | 0xD800) |
|
codePoint = 0xDC00 | codePoint & 0x3FF |
|
} |
|
|
|
res.push(codePoint) |
|
i += bytesPerSequence |
|
} |
|
|
|
return decodeCodePointsArray(res) |
|
} |
|
|
|
// Based on http://stackoverflow.com/a/22747272/680742, the browser with |
|
// the lowest limit is Chrome, with 0x10000 args. |
|
// We go 1 magnitude less, for safety |
|
var MAX_ARGUMENTS_LENGTH = 0x1000 |
|
|
|
function decodeCodePointsArray (codePoints) { |
|
var len = codePoints.length |
|
if (len <= MAX_ARGUMENTS_LENGTH) { |
|
return String.fromCharCode.apply(String, codePoints) // avoid extra slice() |
|
} |
|
|
|
// Decode in chunks to avoid "call stack size exceeded". |
|
var res = '' |
|
var i = 0 |
|
while (i < len) { |
|
res += String.fromCharCode.apply( |
|
String, |
|
codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH) |
|
) |
|
} |
|
return res |
|
} |
|
|
|
function asciiSlice (buf, start, end) { |
|
var ret = '' |
|
end = Math.min(buf.length, end) |
|
|
|
for (var i = start; i < end; ++i) { |
|
ret += String.fromCharCode(buf[i] & 0x7F) |
|
} |
|
return ret |
|
} |
|
|
|
function latin1Slice (buf, start, end) { |
|
var ret = '' |
|
end = Math.min(buf.length, end) |
|
|
|
for (var i = start; i < end; ++i) { |
|
ret += String.fromCharCode(buf[i]) |
|
} |
|
return ret |
|
} |
|
|
|
function hexSlice (buf, start, end) { |
|
var len = buf.length |
|
|
|
if (!start || start < 0) start = 0 |
|
if (!end || end < 0 || end > len) end = len |
|
|
|
var out = '' |
|
for (var i = start; i < end; ++i) { |
|
out += toHex(buf[i]) |
|
} |
|
return out |
|
} |
|
|
|
function utf16leSlice (buf, start, end) { |
|
var bytes = buf.slice(start, end) |
|
var res = '' |
|
for (var i = 0; i < bytes.length; i += 2) { |
|
res += String.fromCharCode(bytes[i] + (bytes[i + 1] * 256)) |
|
} |
|
return res |
|
} |
|
|
|
Buffer.prototype.slice = function slice (start, end) { |
|
var len = this.length |
|
start = ~~start |
|
end = end === undefined ? len : ~~end |
|
|
|
if (start < 0) { |
|
start += len |
|
if (start < 0) start = 0 |
|
} else if (start > len) { |
|
start = len |
|
} |
|
|
|
if (end < 0) { |
|
end += len |
|
if (end < 0) end = 0 |
|
} else if (end > len) { |
|
end = len |
|
} |
|
|
|
if (end < start) end = start |
|
|
|
var newBuf = this.subarray(start, end) |
|
// Return an augmented `Uint8Array` instance |
|
newBuf.__proto__ = Buffer.prototype |
|
return newBuf |
|
} |
|
|
|
/* |
|
* Need to make sure that buffer isn't trying to write out of bounds. |
|
*/ |
|
function checkOffset (offset, ext, length) { |
|
if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint') |
|
if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length') |
|
} |
|
|
|
Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) { |
|
offset = offset >>> 0 |
|
byteLength = byteLength >>> 0 |
|
if (!noAssert) checkOffset(offset, byteLength, this.length) |
|
|
|
var val = this[offset] |
|
var mul = 1 |
|
var i = 0 |
|
while (++i < byteLength && (mul *= 0x100)) { |
|
val += this[offset + i] * mul |
|
} |
|
|
|
return val |
|
} |
|
|
|
Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) { |
|
offset = offset >>> 0 |
|
byteLength = byteLength >>> 0 |
|
if (!noAssert) { |
|
checkOffset(offset, byteLength, this.length) |
|
} |
|
|
|
var val = this[offset + --byteLength] |
|
var mul = 1 |
|
while (byteLength > 0 && (mul *= 0x100)) { |
|
val += this[offset + --byteLength] * mul |
|
} |
|
|
|
return val |
|
} |
|
|
|
Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 1, this.length) |
|
return this[offset] |
|
} |
|
|
|
Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 2, this.length) |
|
return this[offset] | (this[offset + 1] << 8) |
|
} |
|
|
|
Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 2, this.length) |
|
return (this[offset] << 8) | this[offset + 1] |
|
} |
|
|
|
Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 4, this.length) |
|
|
|
return ((this[offset]) | |
|
(this[offset + 1] << 8) | |
|
(this[offset + 2] << 16)) + |
|
(this[offset + 3] * 0x1000000) |
|
} |
|
|
|
Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 4, this.length) |
|
|
|
return (this[offset] * 0x1000000) + |
|
((this[offset + 1] << 16) | |
|
(this[offset + 2] << 8) | |
|
this[offset + 3]) |
|
} |
|
|
|
Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) { |
|
offset = offset >>> 0 |
|
byteLength = byteLength >>> 0 |
|
if (!noAssert) checkOffset(offset, byteLength, this.length) |
|
|
|
var val = this[offset] |
|
var mul = 1 |
|
var i = 0 |
|
while (++i < byteLength && (mul *= 0x100)) { |
|
val += this[offset + i] * mul |
|
} |
|
mul *= 0x80 |
|
|
|
if (val >= mul) val -= Math.pow(2, 8 * byteLength) |
|
|
|
return val |
|
} |
|
|
|
Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) { |
|
offset = offset >>> 0 |
|
byteLength = byteLength >>> 0 |
|
if (!noAssert) checkOffset(offset, byteLength, this.length) |
|
|
|
var i = byteLength |
|
var mul = 1 |
|
var val = this[offset + --i] |
|
while (i > 0 && (mul *= 0x100)) { |
|
val += this[offset + --i] * mul |
|
} |
|
mul *= 0x80 |
|
|
|
if (val >= mul) val -= Math.pow(2, 8 * byteLength) |
|
|
|
return val |
|
} |
|
|
|
Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 1, this.length) |
|
if (!(this[offset] & 0x80)) return (this[offset]) |
|
return ((0xff - this[offset] + 1) * -1) |
|
} |
|
|
|
Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 2, this.length) |
|
var val = this[offset] | (this[offset + 1] << 8) |
|
return (val & 0x8000) ? val | 0xFFFF0000 : val |
|
} |
|
|
|
Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 2, this.length) |
|
var val = this[offset + 1] | (this[offset] << 8) |
|
return (val & 0x8000) ? val | 0xFFFF0000 : val |
|
} |
|
|
|
Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 4, this.length) |
|
|
|
return (this[offset]) | |
|
(this[offset + 1] << 8) | |
|
(this[offset + 2] << 16) | |
|
(this[offset + 3] << 24) |
|
} |
|
|
|
Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 4, this.length) |
|
|
|
return (this[offset] << 24) | |
|
(this[offset + 1] << 16) | |
|
(this[offset + 2] << 8) | |
|
(this[offset + 3]) |
|
} |
|
|
|
Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 4, this.length) |
|
return ieee754.read(this, offset, true, 23, 4) |
|
} |
|
|
|
Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 4, this.length) |
|
return ieee754.read(this, offset, false, 23, 4) |
|
} |
|
|
|
Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 8, this.length) |
|
return ieee754.read(this, offset, true, 52, 8) |
|
} |
|
|
|
Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) { |
|
offset = offset >>> 0 |
|
if (!noAssert) checkOffset(offset, 8, this.length) |
|
return ieee754.read(this, offset, false, 52, 8) |
|
} |
|
|
|
function checkInt (buf, value, offset, ext, max, min) { |
|
if (!Buffer.isBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance') |
|
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds') |
|
if (offset + ext > buf.length) throw new RangeError('Index out of range') |
|
} |
|
|
|
Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
byteLength = byteLength >>> 0 |
|
if (!noAssert) { |
|
var maxBytes = Math.pow(2, 8 * byteLength) - 1 |
|
checkInt(this, value, offset, byteLength, maxBytes, 0) |
|
} |
|
|
|
var mul = 1 |
|
var i = 0 |
|
this[offset] = value & 0xFF |
|
while (++i < byteLength && (mul *= 0x100)) { |
|
this[offset + i] = (value / mul) & 0xFF |
|
} |
|
|
|
return offset + byteLength |
|
} |
|
|
|
Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
byteLength = byteLength >>> 0 |
|
if (!noAssert) { |
|
var maxBytes = Math.pow(2, 8 * byteLength) - 1 |
|
checkInt(this, value, offset, byteLength, maxBytes, 0) |
|
} |
|
|
|
var i = byteLength - 1 |
|
var mul = 1 |
|
this[offset + i] = value & 0xFF |
|
while (--i >= 0 && (mul *= 0x100)) { |
|
this[offset + i] = (value / mul) & 0xFF |
|
} |
|
|
|
return offset + byteLength |
|
} |
|
|
|
Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0) |
|
this[offset] = (value & 0xff) |
|
return offset + 1 |
|
} |
|
|
|
Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) |
|
this[offset] = (value & 0xff) |
|
this[offset + 1] = (value >>> 8) |
|
return offset + 2 |
|
} |
|
|
|
Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) |
|
this[offset] = (value >>> 8) |
|
this[offset + 1] = (value & 0xff) |
|
return offset + 2 |
|
} |
|
|
|
Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) |
|
this[offset + 3] = (value >>> 24) |
|
this[offset + 2] = (value >>> 16) |
|
this[offset + 1] = (value >>> 8) |
|
this[offset] = (value & 0xff) |
|
return offset + 4 |
|
} |
|
|
|
Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) |
|
this[offset] = (value >>> 24) |
|
this[offset + 1] = (value >>> 16) |
|
this[offset + 2] = (value >>> 8) |
|
this[offset + 3] = (value & 0xff) |
|
return offset + 4 |
|
} |
|
|
|
Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) { |
|
var limit = Math.pow(2, (8 * byteLength) - 1) |
|
|
|
checkInt(this, value, offset, byteLength, limit - 1, -limit) |
|
} |
|
|
|
var i = 0 |
|
var mul = 1 |
|
var sub = 0 |
|
this[offset] = value & 0xFF |
|
while (++i < byteLength && (mul *= 0x100)) { |
|
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) { |
|
sub = 1 |
|
} |
|
this[offset + i] = ((value / mul) >> 0) - sub & 0xFF |
|
} |
|
|
|
return offset + byteLength |
|
} |
|
|
|
Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) { |
|
var limit = Math.pow(2, (8 * byteLength) - 1) |
|
|
|
checkInt(this, value, offset, byteLength, limit - 1, -limit) |
|
} |
|
|
|
var i = byteLength - 1 |
|
var mul = 1 |
|
var sub = 0 |
|
this[offset + i] = value & 0xFF |
|
while (--i >= 0 && (mul *= 0x100)) { |
|
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) { |
|
sub = 1 |
|
} |
|
this[offset + i] = ((value / mul) >> 0) - sub & 0xFF |
|
} |
|
|
|
return offset + byteLength |
|
} |
|
|
|
Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80) |
|
if (value < 0) value = 0xff + value + 1 |
|
this[offset] = (value & 0xff) |
|
return offset + 1 |
|
} |
|
|
|
Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) |
|
this[offset] = (value & 0xff) |
|
this[offset + 1] = (value >>> 8) |
|
return offset + 2 |
|
} |
|
|
|
Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) |
|
this[offset] = (value >>> 8) |
|
this[offset + 1] = (value & 0xff) |
|
return offset + 2 |
|
} |
|
|
|
Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) |
|
this[offset] = (value & 0xff) |
|
this[offset + 1] = (value >>> 8) |
|
this[offset + 2] = (value >>> 16) |
|
this[offset + 3] = (value >>> 24) |
|
return offset + 4 |
|
} |
|
|
|
Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) |
|
if (value < 0) value = 0xffffffff + value + 1 |
|
this[offset] = (value >>> 24) |
|
this[offset + 1] = (value >>> 16) |
|
this[offset + 2] = (value >>> 8) |
|
this[offset + 3] = (value & 0xff) |
|
return offset + 4 |
|
} |
|
|
|
function checkIEEE754 (buf, value, offset, ext, max, min) { |
|
if (offset + ext > buf.length) throw new RangeError('Index out of range') |
|
if (offset < 0) throw new RangeError('Index out of range') |
|
} |
|
|
|
function writeFloat (buf, value, offset, littleEndian, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) { |
|
checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38) |
|
} |
|
ieee754.write(buf, value, offset, littleEndian, 23, 4) |
|
return offset + 4 |
|
} |
|
|
|
Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) { |
|
return writeFloat(this, value, offset, true, noAssert) |
|
} |
|
|
|
Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) { |
|
return writeFloat(this, value, offset, false, noAssert) |
|
} |
|
|
|
function writeDouble (buf, value, offset, littleEndian, noAssert) { |
|
value = +value |
|
offset = offset >>> 0 |
|
if (!noAssert) { |
|
checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308) |
|
} |
|
ieee754.write(buf, value, offset, littleEndian, 52, 8) |
|
return offset + 8 |
|
} |
|
|
|
Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) { |
|
return writeDouble(this, value, offset, true, noAssert) |
|
} |
|
|
|
Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) { |
|
return writeDouble(this, value, offset, false, noAssert) |
|
} |
|
|
|
// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) |
|
Buffer.prototype.copy = function copy (target, targetStart, start, end) { |
|
if (!start) start = 0 |
|
if (!end && end !== 0) end = this.length |
|
if (targetStart >= target.length) targetStart = target.length |
|
if (!targetStart) targetStart = 0 |
|
if (end > 0 && end < start) end = start |
|
|
|
// Copy 0 bytes; we're done |
|
if (end === start) return 0 |
|
if (target.length === 0 || this.length === 0) return 0 |
|
|
|
// Fatal error conditions |
|
if (targetStart < 0) { |
|
throw new RangeError('targetStart out of bounds') |
|
} |
|
if (start < 0 || start >= this.length) throw new RangeError('sourceStart out of bounds') |
|
if (end < 0) throw new RangeError('sourceEnd out of bounds') |
|
|
|
// Are we oob? |
|
if (end > this.length) end = this.length |
|
if (target.length - targetStart < end - start) { |
|
end = target.length - targetStart + start |
|
} |
|
|
|
var len = end - start |
|
var i |
|
|
|
if (this === target && start < targetStart && targetStart < end) { |
|
// descending copy from end |
|
for (i = len - 1; i >= 0; --i) { |
|
target[i + targetStart] = this[i + start] |
|
} |
|
} else if (len < 1000) { |
|
// ascending copy from start |
|
for (i = 0; i < len; ++i) { |
|
target[i + targetStart] = this[i + start] |
|
} |
|
} else { |
|
Uint8Array.prototype.set.call( |
|
target, |
|
this.subarray(start, start + len), |
|
targetStart |
|
) |
|
} |
|
|
|
return len |
|
} |
|
|
|
// Usage: |
|
// buffer.fill(number[, offset[, end]]) |
|
// buffer.fill(buffer[, offset[, end]]) |
|
// buffer.fill(string[, offset[, end]][, encoding]) |
|
Buffer.prototype.fill = function fill (val, start, end, encoding) { |
|
// Handle string cases: |
|
if (typeof val === 'string') { |
|
if (typeof start === 'string') { |
|
encoding = start |
|
start = 0 |
|
end = this.length |
|
} else if (typeof end === 'string') { |
|
encoding = end |
|
end = this.length |
|
} |
|
if (val.length === 1) { |
|
var code = val.charCodeAt(0) |
|
if (code < 256) { |
|
val = code |
|
} |
|
} |
|
if (encoding !== undefined && typeof encoding !== 'string') { |
|
throw new TypeError('encoding must be a string') |
|
} |
|
if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) { |
|
throw new TypeError('Unknown encoding: ' + encoding) |
|
} |
|
} else if (typeof val === 'number') { |
|
val = val & 255 |
|
} |
|
|
|
// Invalid ranges are not set to a default, so can range check early. |
|
if (start < 0 || this.length < start || this.length < end) { |
|
throw new RangeError('Out of range index') |
|
} |
|
|
|
if (end <= start) { |
|
return this |
|
} |
|
|
|
start = start >>> 0 |
|
end = end === undefined ? this.length : end >>> 0 |
|
|
|
if (!val) val = 0 |
|
|
|
var i |
|
if (typeof val === 'number') { |
|
for (i = start; i < end; ++i) { |
|
this[i] = val |
|
} |
|
} else { |
|
var bytes = Buffer.isBuffer(val) |
|
? val |
|
: new Buffer(val, encoding) |
|
var len = bytes.length |
|
for (i = 0; i < end - start; ++i) { |
|
this[i + start] = bytes[i % len] |
|
} |
|
} |
|
|
|
return this |
|
} |
|
|
|
// HELPER FUNCTIONS |
|
// ================ |
|
|
|
var INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g |
|
|
|
function base64clean (str) { |
|
// Node strips out invalid characters like \n and \t from the string, base64-js does not |
|
str = str.trim().replace(INVALID_BASE64_RE, '') |
|
// Node converts strings with length < 2 to '' |
|
if (str.length < 2) return '' |
|
// Node allows for non-padded base64 strings (missing trailing ===), base64-js does not |
|
while (str.length % 4 !== 0) { |
|
str = str + '=' |
|
} |
|
return str |
|
} |
|
|
|
function toHex (n) { |
|
if (n < 16) return '0' + n.toString(16) |
|
return n.toString(16) |
|
} |
|
|
|
function utf8ToBytes (string, units) { |
|
units = units || Infinity |
|
var codePoint |
|
var length = string.length |
|
var leadSurrogate = null |
|
var bytes = [] |
|
|
|
for (var i = 0; i < length; ++i) { |
|
codePoint = string.charCodeAt(i) |
|
|
|
// is surrogate component |
|
if (codePoint > 0xD7FF && codePoint < 0xE000) { |
|
// last char was a lead |
|
if (!leadSurrogate) { |
|
// no lead yet |
|
if (codePoint > 0xDBFF) { |
|
// unexpected trail |
|
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) |
|
continue |
|
} else if (i + 1 === length) { |
|
// unpaired lead |
|
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) |
|
continue |
|
} |
|
|
|
// valid lead |
|
leadSurrogate = codePoint |
|
|
|
continue |
|
} |
|
|
|
// 2 leads in a row |
|
if (codePoint < 0xDC00) { |
|
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) |
|
leadSurrogate = codePoint |
|
continue |
|
} |
|
|
|
// valid surrogate pair |
|
codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000 |
|
} else if (leadSurrogate) { |
|
// valid bmp char, but last char was a lead |
|
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) |
|
} |
|
|
|
leadSurrogate = null |
|
|
|
// encode utf8 |
|
if (codePoint < 0x80) { |
|
if ((units -= 1) < 0) break |
|
bytes.push(codePoint) |
|
} else if (codePoint < 0x800) { |
|
if ((units -= 2) < 0) break |
|
bytes.push( |
|
codePoint >> 0x6 | 0xC0, |
|
codePoint & 0x3F | 0x80 |
|
) |
|
} else if (codePoint < 0x10000) { |
|
if ((units -= 3) < 0) break |
|
bytes.push( |
|
codePoint >> 0xC | 0xE0, |
|
codePoint >> 0x6 & 0x3F | 0x80, |
|
codePoint & 0x3F | 0x80 |
|
) |
|
} else if (codePoint < 0x110000) { |
|
if ((units -= 4) < 0) break |
|
bytes.push( |
|
codePoint >> 0x12 | 0xF0, |
|
codePoint >> 0xC & 0x3F | 0x80, |
|
codePoint >> 0x6 & 0x3F | 0x80, |
|
codePoint & 0x3F | 0x80 |
|
) |
|
} else { |
|
throw new Error('Invalid code point') |
|
} |
|
} |
|
|
|
return bytes |
|
} |
|
|
|
function asciiToBytes (str) { |
|
var byteArray = [] |
|
for (var i = 0; i < str.length; ++i) { |
|
// Node's code seems to be doing this and not & 0x7F.. |
|
byteArray.push(str.charCodeAt(i) & 0xFF) |
|
} |
|
return byteArray |
|
} |
|
|
|
function utf16leToBytes (str, units) { |
|
var c, hi, lo |
|
var byteArray = [] |
|
for (var i = 0; i < str.length; ++i) { |
|
if ((units -= 2) < 0) break |
|
|
|
c = str.charCodeAt(i) |
|
hi = c >> 8 |
|
lo = c % 256 |
|
byteArray.push(lo) |
|
byteArray.push(hi) |
|
} |
|
|
|
return byteArray |
|
} |
|
|
|
function base64ToBytes (str) { |
|
return base64.toByteArray(base64clean(str)) |
|
} |
|
|
|
function blitBuffer (src, dst, offset, length) { |
|
for (var i = 0; i < length; ++i) { |
|
if ((i + offset >= dst.length) || (i >= src.length)) break |
|
dst[i + offset] = src[i] |
|
} |
|
return i |
|
} |
|
|
|
// ArrayBuffers from another context (i.e. an iframe) do not pass the `instanceof` check |
|
// but they should be treated as valid. See: https://github.com/feross/buffer/issues/166 |
|
function isArrayBuffer (obj) { |
|
return obj instanceof ArrayBuffer || |
|
(obj != null && obj.constructor != null && obj.constructor.name === 'ArrayBuffer' && |
|
typeof obj.byteLength === 'number') |
|
} |
|
|
|
// Node 0.10 supports `ArrayBuffer` but lacks `ArrayBuffer.isView` |
|
function isArrayBufferView (obj) { |
|
return (typeof ArrayBuffer.isView === 'function') && ArrayBuffer.isView(obj) |
|
} |
|
|
|
function numberIsNaN (obj) { |
|
return obj !== obj // eslint-disable-line no-self-compare |
|
} |
|
|
|
},{"base64-js":31,"ieee754":36}],34:[function(require,module,exports){ |
|
(function (Buffer){ |
|
// Copyright Joyent, Inc. and other Node contributors. |
|
// |
|
// Permission is hereby granted, free of charge, to any person obtaining a |
|
// copy of this software and associated documentation files (the |
|
// "Software"), to deal in the Software without restriction, including |
|
// without limitation the rights to use, copy, modify, merge, publish, |
|
// distribute, sublicense, and/or sell copies of the Software, and to permit |
|
// persons to whom the Software is furnished to do so, subject to the |
|
// following conditions: |
|
// |
|
// The above copyright notice and this permission notice shall be included |
|
// in all copies or substantial portions of the Software. |
|
// |
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
|
// USE OR OTHER DEALINGS IN THE SOFTWARE. |
|
|
|
// NOTE: These type checking functions intentionally don't use `instanceof` |
|
// because it is fragile and can be easily faked with `Object.create()`. |
|
|
|
function isArray(arg) { |
|
if (Array.isArray) { |
|
return Array.isArray(arg); |
|
} |
|
return objectToString(arg) === '[object Array]'; |
|
} |
|
exports.isArray = isArray; |
|
|
|
function isBoolean(arg) { |
|
return typeof arg === 'boolean'; |
|
} |
|
exports.isBoolean = isBoolean; |
|
|
|
function isNull(arg) { |
|
return arg === null; |
|
} |
|
exports.isNull = isNull; |
|
|
|
function isNullOrUndefined(arg) { |
|
return arg == null; |
|
} |
|
exports.isNullOrUndefined = isNullOrUndefined; |
|
|
|
function isNumber(arg) { |
|
return typeof arg === 'number'; |
|
} |
|
exports.isNumber = isNumber; |
|
|
|
function isString(arg) { |
|
return typeof arg === 'string'; |
|
} |
|
exports.isString = isString; |
|
|
|
function isSymbol(arg) { |
|
return typeof arg === 'symbol'; |
|
} |
|
exports.isSymbol = isSymbol; |
|
|
|
function isUndefined(arg) { |
|
return arg === void 0; |
|
} |
|
exports.isUndefined = isUndefined; |
|
|
|
function isRegExp(re) { |
|
return objectToString(re) === '[object RegExp]'; |
|
} |
|
exports.isRegExp = isRegExp; |
|
|
|
function isObject(arg) { |
|
return typeof arg === 'object' && arg !== null; |
|
} |
|
exports.isObject = isObject; |
|
|
|
function isDate(d) { |
|
return objectToString(d) === '[object Date]'; |
|
} |
|
exports.isDate = isDate; |
|
|
|
function isError(e) { |
|
return (objectToString(e) === '[object Error]' || e instanceof Error); |
|
} |
|
exports.isError = isError; |
|
|
|
function isFunction(arg) { |
|
return typeof arg === 'function'; |
|
} |
|
exports.isFunction = isFunction; |
|
|
|
function isPrimitive(arg) { |
|
return arg === null || |
|
typeof arg === 'boolean' || |
|
typeof arg === 'number' || |
|
typeof arg === 'string' || |
|
typeof arg === 'symbol' || // ES6 symbol |
|
typeof arg === 'undefined'; |
|
} |
|
exports.isPrimitive = isPrimitive; |
|
|
|
exports.isBuffer = Buffer.isBuffer; |
|
|
|
function objectToString(o) { |
|
return Object.prototype.toString.call(o); |
|
} |
|
|
|
}).call(this,{"isBuffer":require("../../is-buffer/index.js")}) |
|
},{"../../is-buffer/index.js":38}],35:[function(require,module,exports){ |
|
// Copyright Joyent, Inc. and other Node contributors. |
|
// |
|
// Permission is hereby granted, free of charge, to any person obtaining a |
|
// copy of this software and associated documentation files (the |
|
// "Software"), to deal in the Software without restriction, including |
|
// without limitation the rights to use, copy, modify, merge, publish, |
|
// distribute, sublicense, and/or sell copies of the Software, and to permit |
|
// persons to whom the Software is furnished to do so, subject to the |
|
// following conditions: |
|
// |
|
// The above copyright notice and this permission notice shall be included |
|
// in all copies or substantial portions of the Software. |
|
// |
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
|
// USE OR OTHER DEALINGS IN THE SOFTWARE. |
|
|
|
function EventEmitter() { |
|
this._events = this._events || {}; |
|
this._maxListeners = this._maxListeners || undefined; |
|
} |
|
module.exports = EventEmitter; |
|
|
|
// Backwards-compat with node 0.10.x |
|
EventEmitter.EventEmitter = EventEmitter; |
|
|
|
EventEmitter.prototype._events = undefined; |
|
EventEmitter.prototype._maxListeners = undefined; |
|
|
|
// By default EventEmitters will print a warning if more than 10 listeners are |
|
// added to it. This is a useful default which helps finding memory leaks. |
|
EventEmitter.defaultMaxListeners = 10; |
|
|
|
// Obviously not all Emitters should be limited to 10. This function allows |
|
// that to be increased. Set to zero for unlimited. |
|
EventEmitter.prototype.setMaxListeners = function(n) { |
|
if (!isNumber(n) || n < 0 || isNaN(n)) |
|
throw TypeError('n must be a positive number'); |
|
this._maxListeners = n; |
|
return this; |
|
}; |
|
|
|
EventEmitter.prototype.emit = function(type) { |
|
var er, handler, len, args, i, listeners; |
|
|
|
if (!this._events) |
|
this._events = {}; |
|
|
|
// If there is no 'error' event listener then throw. |
|
if (type === 'error') { |
|
if (!this._events.error || |
|
(isObject(this._events.error) && !this._events.error.length)) { |
|
er = arguments[1]; |
|
if (er instanceof Error) { |
|
throw er; // Unhandled 'error' event |
|
} else { |
|
// At least give some kind of context to the user |
|
var err = new Error('Uncaught, unspecified "error" event. (' + er + ')'); |
|
err.context = er; |
|
throw err; |
|
} |
|
} |
|
} |
|
|
|
handler = this._events[type]; |
|
|
|
if (isUndefined(handler)) |
|
return false; |
|
|
|
if (isFunction(handler)) { |
|
switch (arguments.length) { |
|
// fast cases |
|
case 1: |
|
handler.call(this); |
|
break; |
|
case 2: |
|
handler.call(this, arguments[1]); |
|
break; |
|
case 3: |
|
handler.call(this, arguments[1], arguments[2]); |
|
break; |
|
// slower |
|
default: |
|
args = Array.prototype.slice.call(arguments, 1); |
|
handler.apply(this, args); |
|
} |
|
} else if (isObject(handler)) { |
|
args = Array.prototype.slice.call(arguments, 1); |
|
listeners = handler.slice(); |
|
len = listeners.length; |
|
for (i = 0; i < len; i++) |
|
listeners[i].apply(this, args); |
|
} |
|
|
|
return true; |
|
}; |
|
|
|
EventEmitter.prototype.addListener = function(type, listener) { |
|
var m; |
|
|
|
if (!isFunction(listener)) |
|
throw TypeError('listener must be a function'); |
|
|
|
if (!this._events) |
|
this._events = {}; |
|
|
|
// To avoid recursion in the case that type === "newListener"! Before |
|
// adding it to the listeners, first emit "newListener". |
|
if (this._events.newListener) |
|
this.emit('newListener', type, |
|
isFunction(listener.listener) ? |
|
listener.listener : listener); |
|
|
|
if (!this._events[type]) |
|
// Optimize the case of one listener. Don't need the extra array object. |
|
this._events[type] = listener; |
|
else if (isObject(this._events[type])) |
|
// If we've already got an array, just append. |
|
this._events[type].push(listener); |
|
else |
|
// Adding the second element, need to change to array. |
|
this._events[type] = [this._events[type], listener]; |
|
|
|
// Check for listener leak |
|
if (isObject(this._events[type]) && !this._events[type].warned) { |
|
if (!isUndefined(this._maxListeners)) { |
|
m = this._maxListeners; |
|
} else { |
|
m = EventEmitter.defaultMaxListeners; |
|
} |
|
|
|
if (m && m > 0 && this._events[type].length > m) { |
|
this._events[type].warned = true; |
|
console.error('(node) warning: possible EventEmitter memory ' + |
|
'leak detected. %d listeners added. ' + |
|
'Use emitter.setMaxListeners() to increase limit.', |
|
this._events[type].length); |
|
if (typeof console.trace === 'function') { |
|
// not supported in IE 10 |
|
console.trace(); |
|
} |
|
} |
|
} |
|
|
|
return this; |
|
}; |
|
|
|
EventEmitter.prototype.on = EventEmitter.prototype.addListener; |
|
|
|
EventEmitter.prototype.once = function(type, listener) { |
|
if (!isFunction(listener)) |
|
throw TypeError('listener must be a function'); |
|
|
|
var fired = false; |
|
|
|
function g() { |
|
this.removeListener(type, g); |
|
|
|
if (!fired) { |
|
fired = true; |
|
listener.apply(this, arguments); |
|
} |
|
} |
|
|
|
g.listener = listener; |
|
this.on(type, g); |
|
|
|
return this; |
|
}; |
|
|
|
// emits a 'removeListener' event iff the listener was removed |
|
EventEmitter.prototype.removeListener = function(type, listener) { |
|
var list, position, length, i; |
|
|
|
if (!isFunction(listener)) |
|
throw TypeError('listener must be a function'); |
|
|
|
if (!this._events || !this._events[type]) |
|
return this; |
|
|
|
list = this._events[type]; |
|
length = list.length; |
|
position = -1; |
|
|
|
if (list === listener || |
|
(isFunction(list.listener) && list.listener === listener)) { |
|
delete this._events[type]; |
|
if (this._events.removeListener) |
|
this.emit('removeListener', type, listener); |
|
|
|
} else if (isObject(list)) { |
|
for (i = length; i-- > 0;) { |
|
if (list[i] === listener || |
|
(list[i].listener && list[i].listener === listener)) { |
|
position = i; |
|
break; |
|
} |
|
} |
|
|
|
if (position < 0) |
|
return this; |
|
|
|
if (list.length === 1) { |
|
list.length = 0; |
|
delete this._events[type]; |
|
} else { |
|
list.splice(position, 1); |
|
} |
|
|
|
if (this._events.removeListener) |
|
this.emit('removeListener', type, listener); |
|
} |
|
|
|
return this; |
|
}; |
|
|
|
EventEmitter.prototype.removeAllListeners = function(type) { |
|
var key, listeners; |
|
|
|
if (!this._events) |
|
return this; |
|
|
|
// not listening for removeListener, no need to emit |
|
if (!this._events.removeListener) { |
|
if (arguments.length === 0) |
|
this._events = {}; |
|
else if (this._events[type]) |
|
delete this._events[type]; |
|
return this; |
|
} |
|
|
|
// emit removeListener for all listeners on all events |
|
if (arguments.length === 0) { |
|
for (key in this._events) { |
|
if (key === 'removeListener') continue; |
|
this.removeAllListeners(key); |
|
} |
|
this.removeAllListeners('removeListener'); |
|
this._events = {}; |
|
return this; |
|
} |
|
|
|
listeners = this._events[type]; |
|
|
|
if (isFunction(listeners)) { |
|
this.removeListener(type, listeners); |
|
} else if (listeners) { |
|
// LIFO order |
|
while (listeners.length) |
|
this.removeListener(type, listeners[listeners.length - 1]); |
|
} |
|
delete this._events[type]; |
|
|
|
return this; |
|
}; |
|
|
|
EventEmitter.prototype.listeners = function(type) { |
|
var ret; |
|
if (!this._events || !this._events[type]) |
|
ret = []; |
|
else if (isFunction(this._events[type])) |
|
ret = [this._events[type]]; |
|
else |
|
ret = this._events[type].slice(); |
|
return ret; |
|
}; |
|
|
|
EventEmitter.prototype.listenerCount = function(type) { |
|
if (this._events) { |
|
var evlistener = this._events[type]; |
|
|
|
if (isFunction(evlistener)) |
|
return 1; |
|
else if (evlistener) |
|
return evlistener.length; |
|
} |
|
return 0; |
|
}; |
|
|
|
EventEmitter.listenerCount = function(emitter, type) { |
|
return emitter.listenerCount(type); |
|
}; |
|
|
|
function isFunction(arg) { |
|
return typeof arg === 'function'; |
|
} |
|
|
|
function isNumber(arg) { |
|
return typeof arg === 'number'; |
|
} |
|
|
|
function isObject(arg) { |
|
return typeof arg === 'object' && arg !== null; |
|
} |
|
|
|
function isUndefined(arg) { |
|
return arg === void 0; |
|
} |
|
|
|
},{}],36:[function(require,module,exports){ |
|
exports.read = function (buffer, offset, isLE, mLen, nBytes) { |
|
var e, m |
|
var eLen = nBytes * 8 - mLen - 1 |
|
var eMax = (1 << eLen) - 1 |
|
var eBias = eMax >> 1 |
|
var nBits = -7 |
|
var i = isLE ? (nBytes - 1) : 0 |
|
var d = isLE ? -1 : 1 |
|
var s = buffer[offset + i] |
|
|
|
i += d |
|
|
|
e = s & ((1 << (-nBits)) - 1) |
|
s >>= (-nBits) |
|
nBits += eLen |
|
for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8) {} |
|
|
|
m = e & ((1 << (-nBits)) - 1) |
|
e >>= (-nBits) |
|
nBits += mLen |
|
for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8) {} |
|
|
|
if (e === 0) { |
|
e = 1 - eBias |
|
} else if (e === eMax) { |
|
return m ? NaN : ((s ? -1 : 1) * Infinity) |
|
} else { |
|
m = m + Math.pow(2, mLen) |
|
e = e - eBias |
|
} |
|
return (s ? -1 : 1) * m * Math.pow(2, e - mLen) |
|
} |
|
|
|
exports.write = function (buffer, value, offset, isLE, mLen, nBytes) { |
|
var e, m, c |
|
var eLen = nBytes * 8 - mLen - 1 |
|
var eMax = (1 << eLen) - 1 |
|
var eBias = eMax >> 1 |
|
var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0) |
|
var i = isLE ? 0 : (nBytes - 1) |
|
var d = isLE ? 1 : -1 |
|
var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0 |
|
|
|
value = Math.abs(value) |
|
|
|
if (isNaN(value) || value === Infinity) { |
|
m = isNaN(value) ? 1 : 0 |
|
e = eMax |
|
} else { |
|
e = Math.floor(Math.log(value) / Math.LN2) |
|
if (value * (c = Math.pow(2, -e)) < 1) { |
|
e-- |
|
c *= 2 |
|
} |
|
if (e + eBias >= 1) { |
|
value += rt / c |
|
} else { |
|
value += rt * Math.pow(2, 1 - eBias) |
|
} |
|
if (value * c >= 2) { |
|
e++ |
|
c /= 2 |
|
} |
|
|
|
if (e + eBias >= eMax) { |
|
m = 0 |
|
e = eMax |
|
} else if (e + eBias >= 1) { |
|
m = (value * c - 1) * Math.pow(2, mLen) |
|
e = e + eBias |
|
} else { |
|
m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen) |
|
e = 0 |
|
} |
|
} |
|
|
|
for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {} |
|
|
|
e = (e << mLen) | m |
|
eLen += mLen |
|
for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {} |
|
|
|
buffer[offset + i - d] |= s * 128 |
|
} |
|
|
|
},{}],37:[function(require,module,exports){ |
|
arguments[4][11][0].apply(exports,arguments) |
|
},{"dup":11}],38:[function(require,module,exports){ |
|
/*! |
|
* Determine if an object is a Buffer |
|
* |
|
* @author Feross Aboukhadijeh <feross@feross.org> <http://feross.org> |
|
* @license MIT |
|
*/ |
|
|
|
// The _isBuffer check is for Safari 5-7 support, because it's missing |
|
// Object.prototype.constructor. Remove this eventually |
|
module.exports = function (obj) { |
|
return obj != null && (isBuffer(obj) || isSlowBuffer(obj) || !!obj._isBuffer) |
|
} |
|
|
|
function isBuffer (obj) { |
|
return !!obj.constructor && typeof obj.constructor.isBuffer === 'function' && obj.constructor.isBuffer(obj) |
|
} |
|
|
|
// For Node v0.10 support. Remove this eventually. |
|
function isSlowBuffer (obj) { |
|
return typeof obj.readFloatLE === 'function' && typeof obj.slice === 'function' && isBuffer(obj.slice(0, 0)) |
|
} |
|
|
|
},{}],39:[function(require,module,exports){ |
|
arguments[4][12][0].apply(exports,arguments) |
|
},{"dup":12}],40:[function(require,module,exports){ |
|
arguments[4][14][0].apply(exports,arguments) |
|
},{"_process":41,"dup":14}],41:[function(require,module,exports){ |
|
// shim for using process in browser |
|
var process = module.exports = {}; |
|
|
|
// cached from whatever global is present so that test runners that stub it |
|
// don't break things. But we need to wrap it in a try catch in case it is |
|
// wrapped in strict mode code which doesn't define any globals. It's inside a |
|
// function because try/catches deoptimize in certain engines. |
|
|
|
var cachedSetTimeout; |
|
var cachedClearTimeout; |
|
|
|
function defaultSetTimout() { |
|
throw new Error('setTimeout has not been defined'); |
|
} |
|
function defaultClearTimeout () { |
|
throw new Error('clearTimeout has not been defined'); |
|
} |
|
(function () { |
|
try { |
|
if (typeof setTimeout === 'function') { |
|
cachedSetTimeout = setTimeout; |
|
} else { |
|
cachedSetTimeout = defaultSetTimout; |
|
} |
|
} catch (e) { |
|
cachedSetTimeout = defaultSetTimout; |
|
} |
|
try { |
|
if (typeof clearTimeout === 'function') { |
|
cachedClearTimeout = clearTimeout; |
|
} else { |
|
cachedClearTimeout = defaultClearTimeout; |
|
} |
|
} catch (e) { |
|
cachedClearTimeout = defaultClearTimeout; |
|
} |
|
} ()) |
|
function runTimeout(fun) { |
|
if (cachedSetTimeout === setTimeout) { |
|
//normal enviroments in sane situations |
|
return setTimeout(fun, 0); |
|
} |
|
// if setTimeout wasn't available but was latter defined |
|
if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { |
|
cachedSetTimeout = setTimeout; |
|
return setTimeout(fun, 0); |
|
} |
|
try { |
|
// when when somebody has screwed with setTimeout but no I.E. maddness |
|
return cachedSetTimeout(fun, 0); |
|
} catch(e){ |
|
try { |
|
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally |
|
return cachedSetTimeout.call(null, fun, 0); |
|
} catch(e){ |
|
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error |
|
return cachedSetTimeout.call(this, fun, 0); |
|
} |
|
} |
|
|
|
|
|
} |
|
function runClearTimeout(marker) { |
|
if (cachedClearTimeout === clearTimeout) { |
|
//normal enviroments in sane situations |
|
return clearTimeout(marker); |
|
} |
|
// if clearTimeout wasn't available but was latter defined |
|
if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { |
|
cachedClearTimeout = clearTimeout; |
|
return clearTimeout(marker); |
|
} |
|
try { |
|
// when when somebody has screwed with setTimeout but no I.E. maddness |
|
return cachedClearTimeout(marker); |
|
} catch (e){ |
|
try { |
|
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally |
|
return cachedClearTimeout.call(null, marker); |
|
} catch (e){ |
|
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error. |
|
// Some versions of I.E. have different rules for clearTimeout vs setTimeout |
|
return cachedClearTimeout.call(this, marker); |
|
} |
|
} |
|
|
|
|
|
|
|
} |
|
var queue = []; |
|
var draining = false; |
|
var currentQueue; |
|
var queueIndex = -1; |
|
|
|
function cleanUpNextTick() { |
|
if (!draining || !currentQueue) { |
|
return; |
|
} |
|
draining = false; |
|
if (currentQueue.length) { |
|
queue = currentQueue.concat(queue); |
|
} else { |
|
queueIndex = -1; |
|
} |
|
if (queue.length) { |
|
drainQueue(); |
|
} |
|
} |
|
|
|
function drainQueue() { |
|
if (draining) { |
|
return; |
|
} |
|
var timeout = runTimeout(cleanUpNextTick); |
|
draining = true; |
|
|
|
var len = queue.length; |
|
while(len) { |
|
currentQueue = queue; |
|
queue = []; |
|
while (++queueIndex < len) { |
|
if (currentQueue) { |
|
currentQueue[queueIndex].run(); |
|
} |
|
} |
|
queueIndex = -1; |
|
len = queue.length; |
|
} |
|
currentQueue = null; |
|
draining = false; |
|
runClearTimeout(timeout); |
|
} |
|
|
|
process.nextTick = function (fun) { |
|
var args = new Array(arguments.length - 1); |
|
if (arguments.length > 1) { |
|
for (var i = 1; i < arguments.length; i++) { |
|
args[i - 1] = arguments[i]; |
|
} |
|
} |
|
queue.push(new Item(fun, args)); |
|
if (queue.length === 1 && !draining) { |
|
runTimeout(drainQueue); |
|
} |
|
}; |
|
|
|
// v8 likes predictible objects |
|
function Item(fun, array) { |
|
this.fun = fun; |
|
this.array = array; |
|
} |
|
Item.prototype.run = function () { |
|
this.fun.apply(null, this.array); |
|
}; |
|
process.title = 'browser'; |
|
process.browser = true; |
|
process.env = {}; |
|
process.argv = []; |
|
process.version = ''; // empty string to avoid regexp issues |
|
process.versions = {}; |
|
|
|
function noop() {} |
|
|
|
process.on = noop; |
|
process.addListener = noop; |
|
process.once = noop; |
|
process.off = noop; |
|
process.removeListener = noop; |
|
process.removeAllListeners = noop; |
|
process.emit = noop; |
|
process.prependListener = noop; |
|
process.prependOnceListener = noop; |
|
|
|
process.listeners = function (name) { return [] } |
|
|
|
process.binding = function (name) { |
|
throw new Error('process.binding is not supported'); |
|
}; |
|
|
|
process.cwd = function () { return '/' }; |
|
process.chdir = function (dir) { |
|
throw new Error('process.chdir is not supported'); |
|
}; |
|
process.umask = function() { return 0; }; |
|
|
|
},{}],42:[function(require,module,exports){ |
|
module.exports = require('./lib/_stream_duplex.js'); |
|
|
|
},{"./lib/_stream_duplex.js":43}],43:[function(require,module,exports){ |
|
arguments[4][16][0].apply(exports,arguments) |
|
},{"./_stream_readable":45,"./_stream_writable":47,"core-util-is":34,"dup":16,"inherits":37,"process-nextick-args":40}],44:[function(require,module,exports){ |
|
arguments[4][17][0].apply(exports,arguments) |
|
},{"./_stream_transform":46,"core-util-is":34,"dup":17,"inherits":37}],45:[function(require,module,exports){ |
|
arguments[4][18][0].apply(exports,arguments) |
|
},{"./_stream_duplex":43,"./internal/streams/BufferList":48,"./internal/streams/destroy":49,"./internal/streams/stream":50,"_process":41,"core-util-is":34,"dup":18,"events":35,"inherits":37,"isarray":39,"process-nextick-args":40,"safe-buffer":55,"string_decoder/":57,"util":32}],46:[function(require,module,exports){ |
|
arguments[4][19][0].apply(exports,arguments) |
|
},{"./_stream_duplex":43,"core-util-is":34,"dup":19,"inherits":37}],47:[function(require,module,exports){ |
|
arguments[4][20][0].apply(exports,arguments) |
|
},{"./_stream_duplex":43,"./internal/streams/destroy":49,"./internal/streams/stream":50,"_process":41,"core-util-is":34,"dup":20,"inherits":37,"process-nextick-args":40,"safe-buffer":55,"util-deprecate":58}],48:[function(require,module,exports){ |
|
arguments[4][21][0].apply(exports,arguments) |
|
},{"dup":21,"safe-buffer":55}],49:[function(require,module,exports){ |
|
arguments[4][22][0].apply(exports,arguments) |
|
},{"dup":22,"process-nextick-args":40}],50:[function(require,module,exports){ |
|
arguments[4][23][0].apply(exports,arguments) |
|
},{"dup":23,"events":35}],51:[function(require,module,exports){ |
|
module.exports = require('./readable').PassThrough |
|
|
|
},{"./readable":52}],52:[function(require,module,exports){ |
|
arguments[4][24][0].apply(exports,arguments) |
|
},{"./lib/_stream_duplex.js":43,"./lib/_stream_passthrough.js":44,"./lib/_stream_readable.js":45,"./lib/_stream_transform.js":46,"./lib/_stream_writable.js":47,"dup":24}],53:[function(require,module,exports){ |
|
module.exports = require('./readable').Transform |
|
|
|
},{"./readable":52}],54:[function(require,module,exports){ |
|
module.exports = require('./lib/_stream_writable.js'); |
|
|
|
},{"./lib/_stream_writable.js":47}],55:[function(require,module,exports){ |
|
arguments[4][25][0].apply(exports,arguments) |
|
},{"buffer":33,"dup":25}],56:[function(require,module,exports){ |
|
// Copyright Joyent, Inc. and other Node contributors. |
|
// |
|
// Permission is hereby granted, free of charge, to any person obtaining a |
|
// copy of this software and associated documentation files (the |
|
// "Software"), to deal in the Software without restriction, including |
|
// without limitation the rights to use, copy, modify, merge, publish, |
|
// distribute, sublicense, and/or sell copies of the Software, and to permit |
|
// persons to whom the Software is furnished to do so, subject to the |
|
// following conditions: |
|
// |
|
// The above copyright notice and this permission notice shall be included |
|
// in all copies or substantial portions of the Software. |
|
// |
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
|
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
|
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
|
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
|
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
|
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
|
// USE OR OTHER DEALINGS IN THE SOFTWARE. |
|
|
|
module.exports = Stream; |
|
|
|
var EE = require('events').EventEmitter; |
|
var inherits = require('inherits'); |
|
|
|
inherits(Stream, EE); |
|
Stream.Readable = require('readable-stream/readable.js'); |
|
Stream.Writable = require('readable-stream/writable.js'); |
|
Stream.Duplex = require('readable-stream/duplex.js'); |
|
Stream.Transform = require('readable-stream/transform.js'); |
|
Stream.PassThrough = require('readable-stream/passthrough.js'); |
|
|
|
// Backwards-compat with node 0.4.x |
|
Stream.Stream = Stream; |
|
|
|
|
|
|
|
// old-style streams. Note that the pipe method (the only relevant |
|
// part of this class) is overridden in the Readable class. |
|
|
|
function Stream() { |
|
EE.call(this); |
|
} |
|
|
|
Stream.prototype.pipe = function(dest, options) { |
|
var source = this; |
|
|
|
function ondata(chunk) { |
|
if (dest.writable) { |
|
if (false === dest.write(chunk) && source.pause) { |
|
source.pause(); |
|
} |
|
} |
|
} |
|
|
|
source.on('data', ondata); |
|
|
|
function ondrain() { |
|
if (source.readable && source.resume) { |
|
source.resume(); |
|
} |
|
} |
|
|
|
dest.on('drain', ondrain); |
|
|
|
// If the 'end' option is not supplied, dest.end() will be called when |
|
// source gets the 'end' or 'close' events. Only dest.end() once. |
|
if (!dest._isStdio && (!options || options.end !== false)) { |
|
source.on('end', onend); |
|
source.on('close', onclose); |
|
} |
|
|
|
var didOnEnd = false; |
|
function onend() { |
|
if (didOnEnd) return; |
|
didOnEnd = true; |
|
|
|
dest.end(); |
|
} |
|
|
|
|
|
function onclose() { |
|
if (didOnEnd) return; |
|
didOnEnd = true; |
|
|
|
if (typeof dest.destroy === 'function') dest.destroy(); |
|
} |
|
|
|
// don't leave dangling pipes when there are errors. |
|
function onerror(er) { |
|
cleanup(); |
|
if (EE.listenerCount(this, 'error') === 0) { |
|
throw er; // Unhandled stream error in pipe. |
|
} |
|
} |
|
|
|
source.on('error', onerror); |
|
dest.on('error', onerror); |
|
|
|
// remove all the event listeners that were added. |
|
function cleanup() { |
|
source.removeListener('data', ondata); |
|
dest.removeListener('drain', ondrain); |
|
|
|
source.removeListener('end', onend); |
|
source.removeListener('close', onclose); |
|
|
|
source.removeListener('error', onerror); |
|
dest.removeListener('error', onerror); |
|
|
|
source.removeListener('end', cleanup); |
|
source.removeListener('close', cleanup); |
|
|
|
dest.removeListener('close', cleanup); |
|
} |
|
|
|
source.on('end', cleanup); |
|
source.on('close', cleanup); |
|
|
|
dest.on('close', cleanup); |
|
|
|
dest.emit('pipe', source); |
|
|
|
// Allow for unix-like usage: A.pipe(B).pipe(C) |
|
return dest; |
|
}; |
|
|
|
},{"events":35,"inherits":37,"readable-stream/duplex.js":42,"readable-stream/passthrough.js":51,"readable-stream/readable.js":52,"readable-stream/transform.js":53,"readable-stream/writable.js":54}],57:[function(require,module,exports){ |
|
arguments[4][27][0].apply(exports,arguments) |
|
},{"dup":27,"safe-buffer":55}],58:[function(require,module,exports){ |
|
arguments[4][29][0].apply(exports,arguments) |
|
},{"dup":29}]},{},[2,1]);
|
|
|