| require=(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ |
| /*! |
| * The buffer module from node.js, for the browser. |
| * |
| * @author Feross Aboukhadijeh <feross@feross.org> <http://feross.org> |
| * @license MIT |
| */ |
| |
| var base64 = require('base64-js') |
| var ieee754 = require('ieee754') |
| |
| exports.Buffer = Buffer |
| exports.SlowBuffer = Buffer |
| exports.INSPECT_MAX_BYTES = 50 |
| Buffer.poolSize = 8192 |
| |
| /** |
| * If `Buffer._useTypedArrays`: |
| * === true Use Uint8Array implementation (fastest) |
| * === false Use Object implementation (compatible down to IE6) |
| */ |
| Buffer._useTypedArrays = (function () { |
| // Detect if browser supports Typed Arrays. Supported browsers are IE 10+, Firefox 4+, |
| // Chrome 7+, Safari 5.1+, Opera 11.6+, iOS 4.2+. If the browser does not support adding |
| // properties to `Uint8Array` instances, then that's the same as no `Uint8Array` support |
| // because we need to be able to add all the node Buffer API methods. This is an issue |
| // in Firefox 4-29. Now fixed: https://bugzilla.mozilla.org/show_bug.cgi?id=695438 |
| try { |
| var buf = new ArrayBuffer(0) |
| var arr = new Uint8Array(buf) |
| arr.foo = function () { return 42 } |
| return 42 === arr.foo() && |
| typeof arr.subarray === 'function' // Chrome 9-10 lack `subarray` |
| } catch (e) { |
| return false |
| } |
| })() |
| |
| /** |
| * Class: Buffer |
| * ============= |
| * |
| * The Buffer constructor returns instances of `Uint8Array` that are augmented |
| * with function properties for all the node `Buffer` API functions. We use |
| * `Uint8Array` so that square bracket notation works as expected -- it returns |
| * a single octet. |
| * |
| * By augmenting the instances, we can avoid modifying the `Uint8Array` |
| * prototype. |
| */ |
| function Buffer (subject, encoding, noZero) { |
| if (!(this instanceof Buffer)) |
| return new Buffer(subject, encoding, noZero) |
| |
| var type = typeof subject |
| |
| // Workaround: node's base64 implementation allows for non-padded strings |
| // while base64-js does not. |
| if (encoding === 'base64' && type === 'string') { |
| subject = stringtrim(subject) |
| while (subject.length % 4 !== 0) { |
| subject = subject + '=' |
| } |
| } |
| |
| // Find the length |
| var length |
| if (type === 'number') |
| length = coerce(subject) |
| else if (type === 'string') |
| length = Buffer.byteLength(subject, encoding) |
| else if (type === 'object') |
| length = coerce(subject.length) // assume that object is array-like |
| else |
| throw new Error('First argument needs to be a number, array or string.') |
| |
| var buf |
| if (Buffer._useTypedArrays) { |
| // Preferred: Return an augmented `Uint8Array` instance for best performance |
| buf = Buffer._augment(new Uint8Array(length)) |
| } else { |
| // Fallback: Return THIS instance of Buffer (created by `new`) |
| buf = this |
| buf.length = length |
| buf._isBuffer = true |
| } |
| |
| var i |
| if (Buffer._useTypedArrays && typeof subject.byteLength === 'number') { |
| // Speed optimization -- use set if we're copying from a typed array |
| buf._set(subject) |
| } else if (isArrayish(subject)) { |
| // Treat array-ish objects as a byte array |
| if (Buffer.isBuffer(subject)) { |
| for (i = 0; i < length; i++) |
| buf[i] = subject.readUInt8(i) |
| } else { |
| for (i = 0; i < length; i++) |
| buf[i] = ((subject[i] % 256) + 256) % 256 |
| } |
| } else if (type === 'string') { |
| buf.write(subject, 0, encoding) |
| } else if (type === 'number' && !Buffer._useTypedArrays && !noZero) { |
| for (i = 0; i < length; i++) { |
| buf[i] = 0 |
| } |
| } |
| |
| return buf |
| } |
| |
| // STATIC METHODS |
| // ============== |
| |
| Buffer.isEncoding = function (encoding) { |
| switch (String(encoding).toLowerCase()) { |
| case 'hex': |
| case 'utf8': |
| case 'utf-8': |
| case 'ascii': |
| case 'binary': |
| case 'base64': |
| case 'raw': |
| case 'ucs2': |
| case 'ucs-2': |
| case 'utf16le': |
| case 'utf-16le': |
| return true |
| default: |
| return false |
| } |
| } |
| |
| Buffer.isBuffer = function (b) { |
| return !!(b !== null && b !== undefined && b._isBuffer) |
| } |
| |
| Buffer.byteLength = function (str, encoding) { |
| var ret |
| str = str.toString() |
| switch (encoding || 'utf8') { |
| case 'hex': |
| ret = str.length / 2 |
| break |
| case 'utf8': |
| case 'utf-8': |
| ret = utf8ToBytes(str).length |
| break |
| case 'ascii': |
| case 'binary': |
| case 'raw': |
| ret = str.length |
| break |
| case 'base64': |
| ret = base64ToBytes(str).length |
| break |
| case 'ucs2': |
| case 'ucs-2': |
| case 'utf16le': |
| case 'utf-16le': |
| ret = str.length * 2 |
| break |
| default: |
| throw new Error('Unknown encoding') |
| } |
| return ret |
| } |
| |
| Buffer.concat = function (list, totalLength) { |
| assert(isArray(list), 'Usage: Buffer.concat(list[, length])') |
| |
| if (list.length === 0) { |
| return new Buffer(0) |
| } else if (list.length === 1) { |
| return list[0] |
| } |
| |
| var i |
| if (totalLength === undefined) { |
| totalLength = 0 |
| for (i = 0; i < list.length; i++) { |
| totalLength += list[i].length |
| } |
| } |
| |
| var buf = new Buffer(totalLength) |
| var pos = 0 |
| for (i = 0; i < list.length; i++) { |
| var item = list[i] |
| item.copy(buf, pos) |
| pos += item.length |
| } |
| return buf |
| } |
| |
| Buffer.compare = function (a, b) { |
| assert(Buffer.isBuffer(a) && Buffer.isBuffer(b), 'Arguments must be Buffers') |
| var x = a.length |
| var y = b.length |
| for (var i = 0, len = Math.min(x, y); i < len && a[i] === b[i]; i++) {} |
| if (i !== len) { |
| x = a[i] |
| y = b[i] |
| } |
| if (x < y) { |
| return -1 |
| } |
| if (y < x) { |
| return 1 |
| } |
| return 0 |
| } |
| |
| // BUFFER INSTANCE METHODS |
| // ======================= |
| |
| function hexWrite (buf, string, offset, length) { |
| offset = Number(offset) || 0 |
| var remaining = buf.length - offset |
| if (!length) { |
| length = remaining |
| } else { |
| length = Number(length) |
| if (length > remaining) { |
| length = remaining |
| } |
| } |
| |
| // must be an even number of digits |
| var strLen = string.length |
| assert(strLen % 2 === 0, 'Invalid hex string') |
| |
| if (length > strLen / 2) { |
| length = strLen / 2 |
| } |
| for (var i = 0; i < length; i++) { |
| var byte = parseInt(string.substr(i * 2, 2), 16) |
| assert(!isNaN(byte), 'Invalid hex string') |
| buf[offset + i] = byte |
| } |
| return i |
| } |
| |
| function utf8Write (buf, string, offset, length) { |
| var charsWritten = blitBuffer(utf8ToBytes(string), buf, offset, length) |
| return charsWritten |
| } |
| |
| function asciiWrite (buf, string, offset, length) { |
| var charsWritten = blitBuffer(asciiToBytes(string), buf, offset, length) |
| return charsWritten |
| } |
| |
| function binaryWrite (buf, string, offset, length) { |
| return asciiWrite(buf, string, offset, length) |
| } |
| |
| function base64Write (buf, string, offset, length) { |
| var charsWritten = blitBuffer(base64ToBytes(string), buf, offset, length) |
| return charsWritten |
| } |
| |
| function utf16leWrite (buf, string, offset, length) { |
| var charsWritten = blitBuffer(utf16leToBytes(string), buf, offset, length) |
| return charsWritten |
| } |
| |
| Buffer.prototype.write = function (string, offset, length, encoding) { |
| // Support both (string, offset, length, encoding) |
| // and the legacy (string, encoding, offset, length) |
| if (isFinite(offset)) { |
| if (!isFinite(length)) { |
| encoding = length |
| length = undefined |
| } |
| } else { // legacy |
| var swap = encoding |
| encoding = offset |
| offset = length |
| length = swap |
| } |
| |
| offset = Number(offset) || 0 |
| var remaining = this.length - offset |
| if (!length) { |
| length = remaining |
| } else { |
| length = Number(length) |
| if (length > remaining) { |
| length = remaining |
| } |
| } |
| encoding = String(encoding || 'utf8').toLowerCase() |
| |
| var ret |
| switch (encoding) { |
| case 'hex': |
| ret = hexWrite(this, string, offset, length) |
| break |
| case 'utf8': |
| case 'utf-8': |
| ret = utf8Write(this, string, offset, length) |
| break |
| case 'ascii': |
| ret = asciiWrite(this, string, offset, length) |
| break |
| case 'binary': |
| ret = binaryWrite(this, string, offset, length) |
| break |
| case 'base64': |
| ret = base64Write(this, string, offset, length) |
| break |
| case 'ucs2': |
| case 'ucs-2': |
| case 'utf16le': |
| case 'utf-16le': |
| ret = utf16leWrite(this, string, offset, length) |
| break |
| default: |
| throw new Error('Unknown encoding') |
| } |
| return ret |
| } |
| |
| Buffer.prototype.toString = function (encoding, start, end) { |
| var self = this |
| |
| encoding = String(encoding || 'utf8').toLowerCase() |
| start = Number(start) || 0 |
| end = (end === undefined) ? self.length : Number(end) |
| |
| // Fastpath empty strings |
| if (end === start) |
| return '' |
| |
| var ret |
| switch (encoding) { |
| case 'hex': |
| ret = hexSlice(self, start, end) |
| break |
| case 'utf8': |
| case 'utf-8': |
| ret = utf8Slice(self, start, end) |
| break |
| case 'ascii': |
| ret = asciiSlice(self, start, end) |
| break |
| case 'binary': |
| ret = binarySlice(self, start, end) |
| break |
| case 'base64': |
| ret = base64Slice(self, start, end) |
| break |
| case 'ucs2': |
| case 'ucs-2': |
| case 'utf16le': |
| case 'utf-16le': |
| ret = utf16leSlice(self, start, end) |
| break |
| default: |
| throw new Error('Unknown encoding') |
| } |
| return ret |
| } |
| |
| Buffer.prototype.toJSON = function () { |
| return { |
| type: 'Buffer', |
| data: Array.prototype.slice.call(this._arr || this, 0) |
| } |
| } |
| |
| Buffer.prototype.equals = function (b) { |
| assert(Buffer.isBuffer(b), 'Argument must be a Buffer') |
| return Buffer.compare(this, b) === 0 |
| } |
| |
| Buffer.prototype.compare = function (b) { |
| assert(Buffer.isBuffer(b), 'Argument must be a Buffer') |
| return Buffer.compare(this, b) |
| } |
| |
| // copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) |
| Buffer.prototype.copy = function (target, target_start, start, end) { |
| var source = this |
| |
| if (!start) start = 0 |
| if (!end && end !== 0) end = this.length |
| if (!target_start) target_start = 0 |
| |
| // Copy 0 bytes; we're done |
| if (end === start) return |
| if (target.length === 0 || source.length === 0) return |
| |
| // Fatal error conditions |
| assert(end >= start, 'sourceEnd < sourceStart') |
| assert(target_start >= 0 && target_start < target.length, |
| 'targetStart out of bounds') |
| assert(start >= 0 && start < source.length, 'sourceStart out of bounds') |
| assert(end >= 0 && end <= source.length, 'sourceEnd out of bounds') |
| |
| // Are we oob? |
| if (end > this.length) |
| end = this.length |
| if (target.length - target_start < end - start) |
| end = target.length - target_start + start |
| |
| var len = end - start |
| |
| if (len < 100 || !Buffer._useTypedArrays) { |
| for (var i = 0; i < len; i++) { |
| target[i + target_start] = this[i + start] |
| } |
| } else { |
| target._set(this.subarray(start, start + len), target_start) |
| } |
| } |
| |
| function base64Slice (buf, start, end) { |
| if (start === 0 && end === buf.length) { |
| return base64.fromByteArray(buf) |
| } else { |
| return base64.fromByteArray(buf.slice(start, end)) |
| } |
| } |
| |
| function utf8Slice (buf, start, end) { |
| var res = '' |
| var tmp = '' |
| end = Math.min(buf.length, end) |
| |
| for (var i = start; i < end; i++) { |
| if (buf[i] <= 0x7F) { |
| res += decodeUtf8Char(tmp) + String.fromCharCode(buf[i]) |
| tmp = '' |
| } else { |
| tmp += '%' + buf[i].toString(16) |
| } |
| } |
| |
| return res + decodeUtf8Char(tmp) |
| } |
| |
| function asciiSlice (buf, start, end) { |
| var ret = '' |
| end = Math.min(buf.length, end) |
| |
| for (var i = start; i < end; i++) { |
| ret += String.fromCharCode(buf[i]) |
| } |
| return ret |
| } |
| |
| function binarySlice (buf, start, end) { |
| return asciiSlice(buf, start, end) |
| } |
| |
| function hexSlice (buf, start, end) { |
| var len = buf.length |
| |
| if (!start || start < 0) start = 0 |
| if (!end || end < 0 || end > len) end = len |
| |
| var out = '' |
| for (var i = start; i < end; i++) { |
| out += toHex(buf[i]) |
| } |
| return out |
| } |
| |
| function utf16leSlice (buf, start, end) { |
| var bytes = buf.slice(start, end) |
| var res = '' |
| for (var i = 0; i < bytes.length; i += 2) { |
| res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256) |
| } |
| return res |
| } |
| |
| Buffer.prototype.slice = function (start, end) { |
| var len = this.length |
| start = clamp(start, len, 0) |
| end = clamp(end, len, len) |
| |
| if (Buffer._useTypedArrays) { |
| return Buffer._augment(this.subarray(start, end)) |
| } else { |
| var sliceLen = end - start |
| var newBuf = new Buffer(sliceLen, undefined, true) |
| for (var i = 0; i < sliceLen; i++) { |
| newBuf[i] = this[i + start] |
| } |
| return newBuf |
| } |
| } |
| |
| // `get` will be removed in Node 0.13+ |
| Buffer.prototype.get = function (offset) { |
| console.log('.get() is deprecated. Access using array indexes instead.') |
| return this.readUInt8(offset) |
| } |
| |
| // `set` will be removed in Node 0.13+ |
| Buffer.prototype.set = function (v, offset) { |
| console.log('.set() is deprecated. Access using array indexes instead.') |
| return this.writeUInt8(v, offset) |
| } |
| |
| Buffer.prototype.readUInt8 = function (offset, noAssert) { |
| if (!noAssert) { |
| assert(offset !== undefined && offset !== null, 'missing offset') |
| assert(offset < this.length, 'Trying to read beyond buffer length') |
| } |
| |
| if (offset >= this.length) |
| return |
| |
| return this[offset] |
| } |
| |
| function readUInt16 (buf, offset, littleEndian, noAssert) { |
| if (!noAssert) { |
| assert(typeof littleEndian === 'boolean', 'missing or invalid endian') |
| assert(offset !== undefined && offset !== null, 'missing offset') |
| assert(offset + 1 < buf.length, 'Trying to read beyond buffer length') |
| } |
| |
| var len = buf.length |
| if (offset >= len) |
| return |
| |
| var val |
| if (littleEndian) { |
| val = buf[offset] |
| if (offset + 1 < len) |
| val |= buf[offset + 1] << 8 |
| } else { |
| val = buf[offset] << 8 |
| if (offset + 1 < len) |
| val |= buf[offset + 1] |
| } |
| return val |
| } |
| |
| Buffer.prototype.readUInt16LE = function (offset, noAssert) { |
| return readUInt16(this, offset, true, noAssert) |
| } |
| |
| Buffer.prototype.readUInt16BE = function (offset, noAssert) { |
| return readUInt16(this, offset, false, noAssert) |
| } |
| |
| function readUInt32 (buf, offset, littleEndian, noAssert) { |
| if (!noAssert) { |
| assert(typeof littleEndian === 'boolean', 'missing or invalid endian') |
| assert(offset !== undefined && offset !== null, 'missing offset') |
| assert(offset + 3 < buf.length, 'Trying to read beyond buffer length') |
| } |
| |
| var len = buf.length |
| if (offset >= len) |
| return |
| |
| var val |
| if (littleEndian) { |
| if (offset + 2 < len) |
| val = buf[offset + 2] << 16 |
| if (offset + 1 < len) |
| val |= buf[offset + 1] << 8 |
| val |= buf[offset] |
| if (offset + 3 < len) |
| val = val + (buf[offset + 3] << 24 >>> 0) |
| } else { |
| if (offset + 1 < len) |
| val = buf[offset + 1] << 16 |
| if (offset + 2 < len) |
| val |= buf[offset + 2] << 8 |
| if (offset + 3 < len) |
| val |= buf[offset + 3] |
| val = val + (buf[offset] << 24 >>> 0) |
| } |
| return val |
| } |
| |
| Buffer.prototype.readUInt32LE = function (offset, noAssert) { |
| return readUInt32(this, offset, true, noAssert) |
| } |
| |
| Buffer.prototype.readUInt32BE = function (offset, noAssert) { |
| return readUInt32(this, offset, false, noAssert) |
| } |
| |
| Buffer.prototype.readInt8 = function (offset, noAssert) { |
| if (!noAssert) { |
| assert(offset !== undefined && offset !== null, |
| 'missing offset') |
| assert(offset < this.length, 'Trying to read beyond buffer length') |
| } |
| |
| if (offset >= this.length) |
| return |
| |
| var neg = this[offset] & 0x80 |
| if (neg) |
| return (0xff - this[offset] + 1) * -1 |
| else |
| return this[offset] |
| } |
| |
| function readInt16 (buf, offset, littleEndian, noAssert) { |
| if (!noAssert) { |
| assert(typeof littleEndian === 'boolean', 'missing or invalid endian') |
| assert(offset !== undefined && offset !== null, 'missing offset') |
| assert(offset + 1 < buf.length, 'Trying to read beyond buffer length') |
| } |
| |
| var len = buf.length |
| if (offset >= len) |
| return |
| |
| var val = readUInt16(buf, offset, littleEndian, true) |
| var neg = val & 0x8000 |
| if (neg) |
| return (0xffff - val + 1) * -1 |
| else |
| return val |
| } |
| |
| Buffer.prototype.readInt16LE = function (offset, noAssert) { |
| return readInt16(this, offset, true, noAssert) |
| } |
| |
| Buffer.prototype.readInt16BE = function (offset, noAssert) { |
| return readInt16(this, offset, false, noAssert) |
| } |
| |
| function readInt32 (buf, offset, littleEndian, noAssert) { |
| if (!noAssert) { |
| assert(typeof littleEndian === 'boolean', 'missing or invalid endian') |
| assert(offset !== undefined && offset !== null, 'missing offset') |
| assert(offset + 3 < buf.length, 'Trying to read beyond buffer length') |
| } |
| |
| var len = buf.length |
| if (offset >= len) |
| return |
| |
| var val = readUInt32(buf, offset, littleEndian, true) |
| var neg = val & 0x80000000 |
| if (neg) |
| return (0xffffffff - val + 1) * -1 |
| else |
| return val |
| } |
| |
| Buffer.prototype.readInt32LE = function (offset, noAssert) { |
| return readInt32(this, offset, true, noAssert) |
| } |
| |
| Buffer.prototype.readInt32BE = function (offset, noAssert) { |
| return readInt32(this, offset, false, noAssert) |
| } |
| |
| function readFloat (buf, offset, littleEndian, noAssert) { |
| if (!noAssert) { |
| assert(typeof littleEndian === 'boolean', 'missing or invalid endian') |
| assert(offset + 3 < buf.length, 'Trying to read beyond buffer length') |
| } |
| |
| return ieee754.read(buf, offset, littleEndian, 23, 4) |
| } |
| |
| Buffer.prototype.readFloatLE = function (offset, noAssert) { |
| return readFloat(this, offset, true, noAssert) |
| } |
| |
| Buffer.prototype.readFloatBE = function (offset, noAssert) { |
| return readFloat(this, offset, false, noAssert) |
| } |
| |
| function readDouble (buf, offset, littleEndian, noAssert) { |
| if (!noAssert) { |
| assert(typeof littleEndian === 'boolean', 'missing or invalid endian') |
| assert(offset + 7 < buf.length, 'Trying to read beyond buffer length') |
| } |
| |
| return ieee754.read(buf, offset, littleEndian, 52, 8) |
| } |
| |
| Buffer.prototype.readDoubleLE = function (offset, noAssert) { |
| return readDouble(this, offset, true, noAssert) |
| } |
| |
| Buffer.prototype.readDoubleBE = function (offset, noAssert) { |
| return readDouble(this, offset, false, noAssert) |
| } |
| |
| Buffer.prototype.writeUInt8 = function (value, offset, noAssert) { |
| if (!noAssert) { |
| assert(value !== undefined && value !== null, 'missing value') |
| assert(offset !== undefined && offset !== null, 'missing offset') |
| assert(offset < this.length, 'trying to write beyond buffer length') |
| verifuint(value, 0xff) |
| } |
| |
| if (offset >= this.length) return |
| |
| this[offset] = value |
| return offset + 1 |
| } |
| |
| function writeUInt16 (buf, value, offset, littleEndian, noAssert) { |
| if (!noAssert) { |
| assert(value !== undefined && value !== null, 'missing value') |
| assert(typeof littleEndian === 'boolean', 'missing or invalid endian') |
| assert(offset !== undefined && offset !== null, 'missing offset') |
| assert(offset + 1 < buf.length, 'trying to write beyond buffer length') |
| verifuint(value, 0xffff) |
| } |
| |
| var len = buf.length |
| if (offset >= len) |
| return |
| |
| for (var i = 0, j = Math.min(len - offset, 2); i < j; i++) { |
| buf[offset + i] = |
| (value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>> |
| (littleEndian ? i : 1 - i) * 8 |
| } |
| return offset + 2 |
| } |
| |
| Buffer.prototype.writeUInt16LE = function (value, offset, noAssert) { |
| return writeUInt16(this, value, offset, true, noAssert) |
| } |
| |
| Buffer.prototype.writeUInt16BE = function (value, offset, noAssert) { |
| return writeUInt16(this, value, offset, false, noAssert) |
| } |
| |
| function writeUInt32 (buf, value, offset, littleEndian, noAssert) { |
| if (!noAssert) { |
| assert(value !== undefined && value !== null, 'missing value') |
| assert(typeof littleEndian === 'boolean', 'missing or invalid endian') |
| assert(offset !== undefined && offset !== null, 'missing offset') |
| assert(offset + 3 < buf.length, 'trying to write beyond buffer length') |
| verifuint(value, 0xffffffff) |
| } |
| |
| var len = buf.length |
| if (offset >= len) |
| return |
| |
| for (var i = 0, j = Math.min(len - offset, 4); i < j; i++) { |
| buf[offset + i] = |
| (value >>> (littleEndian ? i : 3 - i) * 8) & 0xff |
| } |
| return offset + 4 |
| } |
| |
| Buffer.prototype.writeUInt32LE = function (value, offset, noAssert) { |
| return writeUInt32(this, value, offset, true, noAssert) |
| } |
| |
| Buffer.prototype.writeUInt32BE = function (value, offset, noAssert) { |
| return writeUInt32(this, value, offset, false, noAssert) |
| } |
| |
| Buffer.prototype.writeInt8 = function (value, offset, noAssert) { |
| if (!noAssert) { |
| assert(value !== undefined && value !== null, 'missing value') |
| assert(offset !== undefined && offset !== null, 'missing offset') |
| assert(offset < this.length, 'Trying to write beyond buffer length') |
| verifsint(value, 0x7f, -0x80) |
| } |
| |
| if (offset >= this.length) |
| return |
| |
| if (value >= 0) |
| this.writeUInt8(value, offset, noAssert) |
| else |
| this.writeUInt8(0xff + value + 1, offset, noAssert) |
| return offset + 1 |
| } |
| |
| function writeInt16 (buf, value, offset, littleEndian, noAssert) { |
| if (!noAssert) { |
| assert(value !== undefined && value !== null, 'missing value') |
| assert(typeof littleEndian === 'boolean', 'missing or invalid endian') |
| assert(offset !== undefined && offset !== null, 'missing offset') |
| assert(offset + 1 < buf.length, 'Trying to write beyond buffer length') |
| verifsint(value, 0x7fff, -0x8000) |
| } |
| |
| var len = buf.length |
| if (offset >= len) |
| return |
| |
| if (value >= 0) |
| writeUInt16(buf, value, offset, littleEndian, noAssert) |
| else |
| writeUInt16(buf, 0xffff + value + 1, offset, littleEndian, noAssert) |
| return offset + 2 |
| } |
| |
| Buffer.prototype.writeInt16LE = function (value, offset, noAssert) { |
| return writeInt16(this, value, offset, true, noAssert) |
| } |
| |
| Buffer.prototype.writeInt16BE = function (value, offset, noAssert) { |
| return writeInt16(this, value, offset, false, noAssert) |
| } |
| |
| function writeInt32 (buf, value, offset, littleEndian, noAssert) { |
| if (!noAssert) { |
| assert(value !== undefined && value !== null, 'missing value') |
| assert(typeof littleEndian === 'boolean', 'missing or invalid endian') |
| assert(offset !== undefined && offset !== null, 'missing offset') |
| assert(offset + 3 < buf.length, 'Trying to write beyond buffer length') |
| verifsint(value, 0x7fffffff, -0x80000000) |
| } |
| |
| var len = buf.length |
| if (offset >= len) |
| return |
| |
| if (value >= 0) |
| writeUInt32(buf, value, offset, littleEndian, noAssert) |
| else |
| writeUInt32(buf, 0xffffffff + value + 1, offset, littleEndian, noAssert) |
| return offset + 4 |
| } |
| |
| Buffer.prototype.writeInt32LE = function (value, offset, noAssert) { |
| return writeInt32(this, value, offset, true, noAssert) |
| } |
| |
| Buffer.prototype.writeInt32BE = function (value, offset, noAssert) { |
| return writeInt32(this, value, offset, false, noAssert) |
| } |
| |
| function writeFloat (buf, value, offset, littleEndian, noAssert) { |
| if (!noAssert) { |
| assert(value !== undefined && value !== null, 'missing value') |
| assert(typeof littleEndian === 'boolean', 'missing or invalid endian') |
| assert(offset !== undefined && offset !== null, 'missing offset') |
| assert(offset + 3 < buf.length, 'Trying to write beyond buffer length') |
| verifIEEE754(value, 3.4028234663852886e+38, -3.4028234663852886e+38) |
| } |
| |
| var len = buf.length |
| if (offset >= len) |
| return |
| |
| ieee754.write(buf, value, offset, littleEndian, 23, 4) |
| return offset + 4 |
| } |
| |
| Buffer.prototype.writeFloatLE = function (value, offset, noAssert) { |
| return writeFloat(this, value, offset, true, noAssert) |
| } |
| |
| Buffer.prototype.writeFloatBE = function (value, offset, noAssert) { |
| return writeFloat(this, value, offset, false, noAssert) |
| } |
| |
| function writeDouble (buf, value, offset, littleEndian, noAssert) { |
| if (!noAssert) { |
| assert(value !== undefined && value !== null, 'missing value') |
| assert(typeof littleEndian === 'boolean', 'missing or invalid endian') |
| assert(offset !== undefined && offset !== null, 'missing offset') |
| assert(offset + 7 < buf.length, |
| 'Trying to write beyond buffer length') |
| verifIEEE754(value, 1.7976931348623157E+308, -1.7976931348623157E+308) |
| } |
| |
| var len = buf.length |
| if (offset >= len) |
| return |
| |
| ieee754.write(buf, value, offset, littleEndian, 52, 8) |
| return offset + 8 |
| } |
| |
| Buffer.prototype.writeDoubleLE = function (value, offset, noAssert) { |
| return writeDouble(this, value, offset, true, noAssert) |
| } |
| |
| Buffer.prototype.writeDoubleBE = function (value, offset, noAssert) { |
| return writeDouble(this, value, offset, false, noAssert) |
| } |
| |
| // fill(value, start=0, end=buffer.length) |
| Buffer.prototype.fill = function (value, start, end) { |
| if (!value) value = 0 |
| if (!start) start = 0 |
| if (!end) end = this.length |
| |
| assert(end >= start, 'end < start') |
| |
| // Fill 0 bytes; we're done |
| if (end === start) return |
| if (this.length === 0) return |
| |
| assert(start >= 0 && start < this.length, 'start out of bounds') |
| assert(end >= 0 && end <= this.length, 'end out of bounds') |
| |
| var i |
| if (typeof value === 'number') { |
| for (i = start; i < end; i++) { |
| this[i] = value |
| } |
| } else { |
| var bytes = utf8ToBytes(value.toString()) |
| var len = bytes.length |
| for (i = start; i < end; i++) { |
| this[i] = bytes[i % len] |
| } |
| } |
| |
| return this |
| } |
| |
| Buffer.prototype.inspect = function () { |
| var out = [] |
| var len = this.length |
| for (var i = 0; i < len; i++) { |
| out[i] = toHex(this[i]) |
| if (i === exports.INSPECT_MAX_BYTES) { |
| out[i + 1] = '...' |
| break |
| } |
| } |
| return '<Buffer ' + out.join(' ') + '>' |
| } |
| |
| /** |
| * Creates a new `ArrayBuffer` with the *copied* memory of the buffer instance. |
| * Added in Node 0.12. Only available in browsers that support ArrayBuffer. |
| */ |
| Buffer.prototype.toArrayBuffer = function () { |
| if (typeof Uint8Array !== 'undefined') { |
| if (Buffer._useTypedArrays) { |
| return (new Buffer(this)).buffer |
| } else { |
| var buf = new Uint8Array(this.length) |
| for (var i = 0, len = buf.length; i < len; i += 1) { |
| buf[i] = this[i] |
| } |
| return buf.buffer |
| } |
| } else { |
| throw new Error('Buffer.toArrayBuffer not supported in this browser') |
| } |
| } |
| |
| // HELPER FUNCTIONS |
| // ================ |
| |
| var BP = Buffer.prototype |
| |
| /** |
| * Augment a Uint8Array *instance* (not the Uint8Array class!) with Buffer methods |
| */ |
| Buffer._augment = function (arr) { |
| arr._isBuffer = true |
| |
| // save reference to original Uint8Array get/set methods before overwriting |
| arr._get = arr.get |
| arr._set = arr.set |
| |
| // deprecated, will be removed in node 0.13+ |
| arr.get = BP.get |
| arr.set = BP.set |
| |
| arr.write = BP.write |
| arr.toString = BP.toString |
| arr.toLocaleString = BP.toString |
| arr.toJSON = BP.toJSON |
| arr.equals = BP.equals |
| arr.compare = BP.compare |
| arr.copy = BP.copy |
| arr.slice = BP.slice |
| arr.readUInt8 = BP.readUInt8 |
| arr.readUInt16LE = BP.readUInt16LE |
| arr.readUInt16BE = BP.readUInt16BE |
| arr.readUInt32LE = BP.readUInt32LE |
| arr.readUInt32BE = BP.readUInt32BE |
| arr.readInt8 = BP.readInt8 |
| arr.readInt16LE = BP.readInt16LE |
| arr.readInt16BE = BP.readInt16BE |
| arr.readInt32LE = BP.readInt32LE |
| arr.readInt32BE = BP.readInt32BE |
| arr.readFloatLE = BP.readFloatLE |
| arr.readFloatBE = BP.readFloatBE |
| arr.readDoubleLE = BP.readDoubleLE |
| arr.readDoubleBE = BP.readDoubleBE |
| arr.writeUInt8 = BP.writeUInt8 |
| arr.writeUInt16LE = BP.writeUInt16LE |
| arr.writeUInt16BE = BP.writeUInt16BE |
| arr.writeUInt32LE = BP.writeUInt32LE |
| arr.writeUInt32BE = BP.writeUInt32BE |
| arr.writeInt8 = BP.writeInt8 |
| arr.writeInt16LE = BP.writeInt16LE |
| arr.writeInt16BE = BP.writeInt16BE |
| arr.writeInt32LE = BP.writeInt32LE |
| arr.writeInt32BE = BP.writeInt32BE |
| arr.writeFloatLE = BP.writeFloatLE |
| arr.writeFloatBE = BP.writeFloatBE |
| arr.writeDoubleLE = BP.writeDoubleLE |
| arr.writeDoubleBE = BP.writeDoubleBE |
| arr.fill = BP.fill |
| arr.inspect = BP.inspect |
| arr.toArrayBuffer = BP.toArrayBuffer |
| |
| return arr |
| } |
| |
| function stringtrim (str) { |
| if (str.trim) return str.trim() |
| return str.replace(/^\s+|\s+$/g, '') |
| } |
| |
| // slice(start, end) |
| function clamp (index, len, defaultValue) { |
| if (typeof index !== 'number') return defaultValue |
| index = ~~index; // Coerce to integer. |
| if (index >= len) return len |
| if (index >= 0) return index |
| index += len |
| if (index >= 0) return index |
| return 0 |
| } |
| |
| function coerce (length) { |
| // Coerce length to a number (possibly NaN), round up |
| // in case it's fractional (e.g. 123.456) then do a |
| // double negate to coerce a NaN to 0. Easy, right? |
| length = ~~Math.ceil(+length) |
| return length < 0 ? 0 : length |
| } |
| |
| function isArray (subject) { |
| return (Array.isArray || function (subject) { |
| return Object.prototype.toString.call(subject) === '[object Array]' |
| })(subject) |
| } |
| |
| function isArrayish (subject) { |
| return isArray(subject) || Buffer.isBuffer(subject) || |
| subject && typeof subject === 'object' && |
| typeof subject.length === 'number' |
| } |
| |
| function toHex (n) { |
| if (n < 16) return '0' + n.toString(16) |
| return n.toString(16) |
| } |
| |
| function utf8ToBytes (str) { |
| var byteArray = [] |
| for (var i = 0; i < str.length; i++) { |
| var b = str.charCodeAt(i) |
| if (b <= 0x7F) { |
| byteArray.push(b) |
| } else { |
| var start = i |
| if (b >= 0xD800 && b <= 0xDFFF) i++ |
| var h = encodeURIComponent(str.slice(start, i+1)).substr(1).split('%') |
| for (var j = 0; j < h.length; j++) { |
| byteArray.push(parseInt(h[j], 16)) |
| } |
| } |
| } |
| return byteArray |
| } |
| |
| function asciiToBytes (str) { |
| var byteArray = [] |
| for (var i = 0; i < str.length; i++) { |
| // Node's code seems to be doing this and not & 0x7F.. |
| byteArray.push(str.charCodeAt(i) & 0xFF) |
| } |
| return byteArray |
| } |
| |
| function utf16leToBytes (str) { |
| var c, hi, lo |
| var byteArray = [] |
| for (var i = 0; i < str.length; i++) { |
| c = str.charCodeAt(i) |
| hi = c >> 8 |
| lo = c % 256 |
| byteArray.push(lo) |
| byteArray.push(hi) |
| } |
| |
| return byteArray |
| } |
| |
| function base64ToBytes (str) { |
| return base64.toByteArray(str) |
| } |
| |
| function blitBuffer (src, dst, offset, length) { |
| for (var i = 0; i < length; i++) { |
| if ((i + offset >= dst.length) || (i >= src.length)) |
| break |
| dst[i + offset] = src[i] |
| } |
| return i |
| } |
| |
| function decodeUtf8Char (str) { |
| try { |
| return decodeURIComponent(str) |
| } catch (err) { |
| return String.fromCharCode(0xFFFD) // UTF 8 invalid char |
| } |
| } |
| |
| /* |
| * We have to make sure that the value is a valid integer. This means that it |
| * is non-negative. It has no fractional component and that it does not |
| * exceed the maximum allowed value. |
| */ |
| function verifuint (value, max) { |
| assert(typeof value === 'number', 'cannot write a non-number as a number') |
| assert(value >= 0, 'specified a negative value for writing an unsigned value') |
| assert(value <= max, 'value is larger than maximum value for type') |
| assert(Math.floor(value) === value, 'value has a fractional component') |
| } |
| |
| function verifsint (value, max, min) { |
| assert(typeof value === 'number', 'cannot write a non-number as a number') |
| assert(value <= max, 'value larger than maximum allowed value') |
| assert(value >= min, 'value smaller than minimum allowed value') |
| assert(Math.floor(value) === value, 'value has a fractional component') |
| } |
| |
| function verifIEEE754 (value, max, min) { |
| assert(typeof value === 'number', 'cannot write a non-number as a number') |
| assert(value <= max, 'value larger than maximum allowed value') |
| assert(value >= min, 'value smaller than minimum allowed value') |
| } |
| |
| function assert (test, message) { |
| if (!test) throw new Error(message || 'Failed assertion') |
| } |
| |
| },{"base64-js":2,"ieee754":3}],2:[function(require,module,exports){ |
| var lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; |
| |
| ;(function (exports) { |
| 'use strict'; |
| |
| var Arr = (typeof Uint8Array !== 'undefined') |
| ? Uint8Array |
| : Array |
| |
| var PLUS = '+'.charCodeAt(0) |
| var SLASH = '/'.charCodeAt(0) |
| var NUMBER = '0'.charCodeAt(0) |
| var LOWER = 'a'.charCodeAt(0) |
| var UPPER = 'A'.charCodeAt(0) |
| |
| function decode (elt) { |
| var code = elt.charCodeAt(0) |
| if (code === PLUS) |
| return 62 // '+' |
| if (code === SLASH) |
| return 63 // '/' |
| if (code < NUMBER) |
| return -1 //no match |
| if (code < NUMBER + 10) |
| return code - NUMBER + 26 + 26 |
| if (code < UPPER + 26) |
| return code - UPPER |
| if (code < LOWER + 26) |
| return code - LOWER + 26 |
| } |
| |
| function b64ToByteArray (b64) { |
| var i, j, l, tmp, placeHolders, arr |
| |
| if (b64.length % 4 > 0) { |
| throw new Error('Invalid string. Length must be a multiple of 4') |
| } |
| |
| // the number of equal signs (place holders) |
| // if there are two placeholders, than the two characters before it |
| // represent one byte |
| // if there is only one, then the three characters before it represent 2 bytes |
| // this is just a cheap hack to not do indexOf twice |
| var len = b64.length |
| placeHolders = '=' === b64.charAt(len - 2) ? 2 : '=' === b64.charAt(len - 1) ? 1 : 0 |
| |
| // base64 is 4/3 + up to two characters of the original data |
| arr = new Arr(b64.length * 3 / 4 - placeHolders) |
| |
| // if there are placeholders, only get up to the last complete 4 chars |
| l = placeHolders > 0 ? b64.length - 4 : b64.length |
| |
| var L = 0 |
| |
| function push (v) { |
| arr[L++] = v |
| } |
| |
| for (i = 0, j = 0; i < l; i += 4, j += 3) { |
| tmp = (decode(b64.charAt(i)) << 18) | (decode(b64.charAt(i + 1)) << 12) | (decode(b64.charAt(i + 2)) << 6) | decode(b64.charAt(i + 3)) |
| push((tmp & 0xFF0000) >> 16) |
| push((tmp & 0xFF00) >> 8) |
| push(tmp & 0xFF) |
| } |
| |
| if (placeHolders === 2) { |
| tmp = (decode(b64.charAt(i)) << 2) | (decode(b64.charAt(i + 1)) >> 4) |
| push(tmp & 0xFF) |
| } else if (placeHolders === 1) { |
| tmp = (decode(b64.charAt(i)) << 10) | (decode(b64.charAt(i + 1)) << 4) | (decode(b64.charAt(i + 2)) >> 2) |
| push((tmp >> 8) & 0xFF) |
| push(tmp & 0xFF) |
| } |
| |
| return arr |
| } |
| |
| function uint8ToBase64 (uint8) { |
| var i, |
| extraBytes = uint8.length % 3, // if we have 1 byte left, pad 2 bytes |
| output = "", |
| temp, length |
| |
| function encode (num) { |
| return lookup.charAt(num) |
| } |
| |
| function tripletToBase64 (num) { |
| return encode(num >> 18 & 0x3F) + encode(num >> 12 & 0x3F) + encode(num >> 6 & 0x3F) + encode(num & 0x3F) |
| } |
| |
| // go through the array every three bytes, we'll deal with trailing stuff later |
| for (i = 0, length = uint8.length - extraBytes; i < length; i += 3) { |
| temp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2]) |
| output += tripletToBase64(temp) |
| } |
| |
| // pad the end with zeros, but make sure to not forget the extra bytes |
| switch (extraBytes) { |
| case 1: |
| temp = uint8[uint8.length - 1] |
| output += encode(temp >> 2) |
| output += encode((temp << 4) & 0x3F) |
| output += '==' |
| break |
| case 2: |
| temp = (uint8[uint8.length - 2] << 8) + (uint8[uint8.length - 1]) |
| output += encode(temp >> 10) |
| output += encode((temp >> 4) & 0x3F) |
| output += encode((temp << 2) & 0x3F) |
| output += '=' |
| break |
| } |
| |
| return output |
| } |
| |
| exports.toByteArray = b64ToByteArray |
| exports.fromByteArray = uint8ToBase64 |
| }(typeof exports === 'undefined' ? (this.base64js = {}) : exports)) |
| |
| },{}],3:[function(require,module,exports){ |
| exports.read = function(buffer, offset, isLE, mLen, nBytes) { |
| var e, m, |
| eLen = nBytes * 8 - mLen - 1, |
| eMax = (1 << eLen) - 1, |
| eBias = eMax >> 1, |
| nBits = -7, |
| i = isLE ? (nBytes - 1) : 0, |
| d = isLE ? -1 : 1, |
| s = buffer[offset + i]; |
| |
| i += d; |
| |
| e = s & ((1 << (-nBits)) - 1); |
| s >>= (-nBits); |
| nBits += eLen; |
| for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8); |
| |
| m = e & ((1 << (-nBits)) - 1); |
| e >>= (-nBits); |
| nBits += mLen; |
| for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8); |
| |
| if (e === 0) { |
| e = 1 - eBias; |
| } else if (e === eMax) { |
| return m ? NaN : ((s ? -1 : 1) * Infinity); |
| } else { |
| m = m + Math.pow(2, mLen); |
| e = e - eBias; |
| } |
| return (s ? -1 : 1) * m * Math.pow(2, e - mLen); |
| }; |
| |
| exports.write = function(buffer, value, offset, isLE, mLen, nBytes) { |
| var e, m, c, |
| eLen = nBytes * 8 - mLen - 1, |
| eMax = (1 << eLen) - 1, |
| eBias = eMax >> 1, |
| rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0), |
| i = isLE ? 0 : (nBytes - 1), |
| d = isLE ? 1 : -1, |
| s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0; |
| |
| value = Math.abs(value); |
| |
| if (isNaN(value) || value === Infinity) { |
| m = isNaN(value) ? 1 : 0; |
| e = eMax; |
| } else { |
| e = Math.floor(Math.log(value) / Math.LN2); |
| if (value * (c = Math.pow(2, -e)) < 1) { |
| e--; |
| c *= 2; |
| } |
| if (e + eBias >= 1) { |
| value += rt / c; |
| } else { |
| value += rt * Math.pow(2, 1 - eBias); |
| } |
| if (value * c >= 2) { |
| e++; |
| c /= 2; |
| } |
| |
| if (e + eBias >= eMax) { |
| m = 0; |
| e = eMax; |
| } else if (e + eBias >= 1) { |
| m = (value * c - 1) * Math.pow(2, mLen); |
| e = e + eBias; |
| } else { |
| m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen); |
| e = 0; |
| } |
| } |
| |
| for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8); |
| |
| e = (e << mLen) | m; |
| eLen += mLen; |
| for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8); |
| |
| buffer[offset + i - d] |= s * 128; |
| }; |
| |
| },{}],4:[function(require,module,exports){ |
| // Copyright Joyent, Inc. and other Node contributors. |
| // |
| // Permission is hereby granted, free of charge, to any person obtaining a |
| // copy of this software and associated documentation files (the |
| // "Software"), to deal in the Software without restriction, including |
| // without limitation the rights to use, copy, modify, merge, publish, |
| // distribute, sublicense, and/or sell copies of the Software, and to permit |
| // persons to whom the Software is furnished to do so, subject to the |
| // following conditions: |
| // |
| // The above copyright notice and this permission notice shall be included |
| // in all copies or substantial portions of the Software. |
| // |
| // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
| // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
| // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
| // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
| // USE OR OTHER DEALINGS IN THE SOFTWARE. |
| |
| function EventEmitter() { |
| this._events = this._events || {}; |
| this._maxListeners = this._maxListeners || undefined; |
| } |
| module.exports = EventEmitter; |
| |
| // Backwards-compat with node 0.10.x |
| EventEmitter.EventEmitter = EventEmitter; |
| |
| EventEmitter.prototype._events = undefined; |
| EventEmitter.prototype._maxListeners = undefined; |
| |
| // By default EventEmitters will print a warning if more than 10 listeners are |
| // added to it. This is a useful default which helps finding memory leaks. |
| EventEmitter.defaultMaxListeners = 10; |
| |
| // Obviously not all Emitters should be limited to 10. This function allows |
| // that to be increased. Set to zero for unlimited. |
| EventEmitter.prototype.setMaxListeners = function(n) { |
| if (!isNumber(n) || n < 0 || isNaN(n)) |
| throw TypeError('n must be a positive number'); |
| this._maxListeners = n; |
| return this; |
| }; |
| |
| EventEmitter.prototype.emit = function(type) { |
| var er, handler, len, args, i, listeners; |
| |
| if (!this._events) |
| this._events = {}; |
| |
| // If there is no 'error' event listener then throw. |
| if (type === 'error') { |
| if (!this._events.error || |
| (isObject(this._events.error) && !this._events.error.length)) { |
| er = arguments[1]; |
| if (er instanceof Error) { |
| throw er; // Unhandled 'error' event |
| } else { |
| throw TypeError('Uncaught, unspecified "error" event.'); |
| } |
| return false; |
| } |
| } |
| |
| handler = this._events[type]; |
| |
| if (isUndefined(handler)) |
| return false; |
| |
| if (isFunction(handler)) { |
| switch (arguments.length) { |
| // fast cases |
| case 1: |
| handler.call(this); |
| break; |
| case 2: |
| handler.call(this, arguments[1]); |
| break; |
| case 3: |
| handler.call(this, arguments[1], arguments[2]); |
| break; |
| // slower |
| default: |
| len = arguments.length; |
| args = new Array(len - 1); |
| for (i = 1; i < len; i++) |
| args[i - 1] = arguments[i]; |
| handler.apply(this, args); |
| } |
| } else if (isObject(handler)) { |
| len = arguments.length; |
| args = new Array(len - 1); |
| for (i = 1; i < len; i++) |
| args[i - 1] = arguments[i]; |
| |
| listeners = handler.slice(); |
| len = listeners.length; |
| for (i = 0; i < len; i++) |
| listeners[i].apply(this, args); |
| } |
| |
| return true; |
| }; |
| |
| EventEmitter.prototype.addListener = function(type, listener) { |
| var m; |
| |
| if (!isFunction(listener)) |
| throw TypeError('listener must be a function'); |
| |
| if (!this._events) |
| this._events = {}; |
| |
| // To avoid recursion in the case that type === "newListener"! Before |
| // adding it to the listeners, first emit "newListener". |
| if (this._events.newListener) |
| this.emit('newListener', type, |
| isFunction(listener.listener) ? |
| listener.listener : listener); |
| |
| if (!this._events[type]) |
| // Optimize the case of one listener. Don't need the extra array object. |
| this._events[type] = listener; |
| else if (isObject(this._events[type])) |
| // If we've already got an array, just append. |
| this._events[type].push(listener); |
| else |
| // Adding the second element, need to change to array. |
| this._events[type] = [this._events[type], listener]; |
| |
| // Check for listener leak |
| if (isObject(this._events[type]) && !this._events[type].warned) { |
| var m; |
| if (!isUndefined(this._maxListeners)) { |
| m = this._maxListeners; |
| } else { |
| m = EventEmitter.defaultMaxListeners; |
| } |
| |
| if (m && m > 0 && this._events[type].length > m) { |
| this._events[type].warned = true; |
| console.error('(node) warning: possible EventEmitter memory ' + |
| 'leak detected. %d listeners added. ' + |
| 'Use emitter.setMaxListeners() to increase limit.', |
| this._events[type].length); |
| if (typeof console.trace === 'function') { |
| // not supported in IE 10 |
| console.trace(); |
| } |
| } |
| } |
| |
| return this; |
| }; |
| |
| EventEmitter.prototype.on = EventEmitter.prototype.addListener; |
| |
| EventEmitter.prototype.once = function(type, listener) { |
| if (!isFunction(listener)) |
| throw TypeError('listener must be a function'); |
| |
| var fired = false; |
| |
| function g() { |
| this.removeListener(type, g); |
| |
| if (!fired) { |
| fired = true; |
| listener.apply(this, arguments); |
| } |
| } |
| |
| g.listener = listener; |
| this.on(type, g); |
| |
| return this; |
| }; |
| |
| // emits a 'removeListener' event iff the listener was removed |
| EventEmitter.prototype.removeListener = function(type, listener) { |
| var list, position, length, i; |
| |
| if (!isFunction(listener)) |
| throw TypeError('listener must be a function'); |
| |
| if (!this._events || !this._events[type]) |
| return this; |
| |
| list = this._events[type]; |
| length = list.length; |
| position = -1; |
| |
| if (list === listener || |
| (isFunction(list.listener) && list.listener === listener)) { |
| delete this._events[type]; |
| if (this._events.removeListener) |
| this.emit('removeListener', type, listener); |
| |
| } else if (isObject(list)) { |
| for (i = length; i-- > 0;) { |
| if (list[i] === listener || |
| (list[i].listener && list[i].listener === listener)) { |
| position = i; |
| break; |
| } |
| } |
| |
| if (position < 0) |
| return this; |
| |
| if (list.length === 1) { |
| list.length = 0; |
| delete this._events[type]; |
| } else { |
| list.splice(position, 1); |
| } |
| |
| if (this._events.removeListener) |
| this.emit('removeListener', type, listener); |
| } |
| |
| return this; |
| }; |
| |
| EventEmitter.prototype.removeAllListeners = function(type) { |
| var key, listeners; |
| |
| if (!this._events) |
| return this; |
| |
| // not listening for removeListener, no need to emit |
| if (!this._events.removeListener) { |
| if (arguments.length === 0) |
| this._events = {}; |
| else if (this._events[type]) |
| delete this._events[type]; |
| return this; |
| } |
| |
| // emit removeListener for all listeners on all events |
| if (arguments.length === 0) { |
| for (key in this._events) { |
| if (key === 'removeListener') continue; |
| this.removeAllListeners(key); |
| } |
| this.removeAllListeners('removeListener'); |
| this._events = {}; |
| return this; |
| } |
| |
| listeners = this._events[type]; |
| |
| if (isFunction(listeners)) { |
| this.removeListener(type, listeners); |
| } else { |
| // LIFO order |
| while (listeners.length) |
| this.removeListener(type, listeners[listeners.length - 1]); |
| } |
| delete this._events[type]; |
| |
| return this; |
| }; |
| |
| EventEmitter.prototype.listeners = function(type) { |
| var ret; |
| if (!this._events || !this._events[type]) |
| ret = []; |
| else if (isFunction(this._events[type])) |
| ret = [this._events[type]]; |
| else |
| ret = this._events[type].slice(); |
| return ret; |
| }; |
| |
| EventEmitter.listenerCount = function(emitter, type) { |
| var ret; |
| if (!emitter._events || !emitter._events[type]) |
| ret = 0; |
| else if (isFunction(emitter._events[type])) |
| ret = 1; |
| else |
| ret = emitter._events[type].length; |
| return ret; |
| }; |
| |
| function isFunction(arg) { |
| return typeof arg === 'function'; |
| } |
| |
| function isNumber(arg) { |
| return typeof arg === 'number'; |
| } |
| |
| function isObject(arg) { |
| return typeof arg === 'object' && arg !== null; |
| } |
| |
| function isUndefined(arg) { |
| return arg === void 0; |
| } |
| |
| },{}],5:[function(require,module,exports){ |
| if (typeof Object.create === 'function') { |
| // implementation from standard node.js 'util' module |
| module.exports = function inherits(ctor, superCtor) { |
| ctor.super_ = superCtor |
| ctor.prototype = Object.create(superCtor.prototype, { |
| constructor: { |
| value: ctor, |
| enumerable: false, |
| writable: true, |
| configurable: true |
| } |
| }); |
| }; |
| } else { |
| // old school shim for old browsers |
| module.exports = function inherits(ctor, superCtor) { |
| ctor.super_ = superCtor |
| var TempCtor = function () {} |
| TempCtor.prototype = superCtor.prototype |
| ctor.prototype = new TempCtor() |
| ctor.prototype.constructor = ctor |
| } |
| } |
| |
| },{}],6:[function(require,module,exports){ |
| // shim for using process in browser |
| |
| var process = module.exports = {}; |
| |
| process.nextTick = (function () { |
| var canSetImmediate = typeof window !== 'undefined' |
| && window.setImmediate; |
| var canPost = typeof window !== 'undefined' |
| && window.postMessage && window.addEventListener |
| ; |
| |
| if (canSetImmediate) { |
| return function (f) { return window.setImmediate(f) }; |
| } |
| |
| if (canPost) { |
| var queue = []; |
| window.addEventListener('message', function (ev) { |
| var source = ev.source; |
| if ((source === window || source === null) && ev.data === 'process-tick') { |
| ev.stopPropagation(); |
| if (queue.length > 0) { |
| var fn = queue.shift(); |
| fn(); |
| } |
| } |
| }, true); |
| |
| return function nextTick(fn) { |
| queue.push(fn); |
| window.postMessage('process-tick', '*'); |
| }; |
| } |
| |
| return function nextTick(fn) { |
| setTimeout(fn, 0); |
| }; |
| })(); |
| |
| process.title = 'browser'; |
| process.browser = true; |
| process.env = {}; |
| process.argv = []; |
| |
| function noop() {} |
| |
| process.on = noop; |
| process.addListener = noop; |
| process.once = noop; |
| process.off = noop; |
| process.removeListener = noop; |
| process.removeAllListeners = noop; |
| process.emit = noop; |
| |
| process.binding = function (name) { |
| throw new Error('process.binding is not supported'); |
| } |
| |
| // TODO(shtylman) |
| process.cwd = function () { return '/' }; |
| process.chdir = function (dir) { |
| throw new Error('process.chdir is not supported'); |
| }; |
| |
| },{}],7:[function(require,module,exports){ |
| module.exports = require("./lib/_stream_duplex.js") |
| |
| },{"./lib/_stream_duplex.js":8}],8:[function(require,module,exports){ |
| (function (process){ |
| // Copyright Joyent, Inc. and other Node contributors. |
| // |
| // Permission is hereby granted, free of charge, to any person obtaining a |
| // copy of this software and associated documentation files (the |
| // "Software"), to deal in the Software without restriction, including |
| // without limitation the rights to use, copy, modify, merge, publish, |
| // distribute, sublicense, and/or sell copies of the Software, and to permit |
| // persons to whom the Software is furnished to do so, subject to the |
| // following conditions: |
| // |
| // The above copyright notice and this permission notice shall be included |
| // in all copies or substantial portions of the Software. |
| // |
| // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
| // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
| // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
| // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
| // USE OR OTHER DEALINGS IN THE SOFTWARE. |
| |
| // a duplex stream is just a stream that is both readable and writable. |
| // Since JS doesn't have multiple prototypal inheritance, this class |
| // prototypally inherits from Readable, and then parasitically from |
| // Writable. |
| |
| module.exports = Duplex; |
| |
| /*<replacement>*/ |
| var objectKeys = Object.keys || function (obj) { |
| var keys = []; |
| for (var key in obj) keys.push(key); |
| return keys; |
| } |
| /*</replacement>*/ |
| |
| |
| /*<replacement>*/ |
| var util = require('core-util-is'); |
| util.inherits = require('inherits'); |
| /*</replacement>*/ |
| |
| var Readable = require('./_stream_readable'); |
| var Writable = require('./_stream_writable'); |
| |
| util.inherits(Duplex, Readable); |
| |
| forEach(objectKeys(Writable.prototype), function(method) { |
| if (!Duplex.prototype[method]) |
| Duplex.prototype[method] = Writable.prototype[method]; |
| }); |
| |
| function Duplex(options) { |
| if (!(this instanceof Duplex)) |
| return new Duplex(options); |
| |
| Readable.call(this, options); |
| Writable.call(this, options); |
| |
| if (options && options.readable === false) |
| this.readable = false; |
| |
| if (options && options.writable === false) |
| this.writable = false; |
| |
| this.allowHalfOpen = true; |
| if (options && options.allowHalfOpen === false) |
| this.allowHalfOpen = false; |
| |
| this.once('end', onend); |
| } |
| |
| // the no-half-open enforcer |
| function onend() { |
| // if we allow half-open state, or if the writable side ended, |
| // then we're ok. |
| if (this.allowHalfOpen || this._writableState.ended) |
| return; |
| |
| // no more data can be written. |
| // But allow more writes to happen in this tick. |
| process.nextTick(this.end.bind(this)); |
| } |
| |
| function forEach (xs, f) { |
| for (var i = 0, l = xs.length; i < l; i++) { |
| f(xs[i], i); |
| } |
| } |
| |
| }).call(this,require("4dON6Z")) |
| },{"./_stream_readable":10,"./_stream_writable":12,"4dON6Z":6,"core-util-is":13,"inherits":5}],9:[function(require,module,exports){ |
| // Copyright Joyent, Inc. and other Node contributors. |
| // |
| // Permission is hereby granted, free of charge, to any person obtaining a |
| // copy of this software and associated documentation files (the |
| // "Software"), to deal in the Software without restriction, including |
| // without limitation the rights to use, copy, modify, merge, publish, |
| // distribute, sublicense, and/or sell copies of the Software, and to permit |
| // persons to whom the Software is furnished to do so, subject to the |
| // following conditions: |
| // |
| // The above copyright notice and this permission notice shall be included |
| // in all copies or substantial portions of the Software. |
| // |
| // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
| // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
| // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
| // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
| // USE OR OTHER DEALINGS IN THE SOFTWARE. |
| |
| // a passthrough stream. |
| // basically just the most minimal sort of Transform stream. |
| // Every written chunk gets output as-is. |
| |
| module.exports = PassThrough; |
| |
| var Transform = require('./_stream_transform'); |
| |
| /*<replacement>*/ |
| var util = require('core-util-is'); |
| util.inherits = require('inherits'); |
| /*</replacement>*/ |
| |
| util.inherits(PassThrough, Transform); |
| |
| function PassThrough(options) { |
| if (!(this instanceof PassThrough)) |
| return new PassThrough(options); |
| |
| Transform.call(this, options); |
| } |
| |
| PassThrough.prototype._transform = function(chunk, encoding, cb) { |
| cb(null, chunk); |
| }; |
| |
| },{"./_stream_transform":11,"core-util-is":13,"inherits":5}],10:[function(require,module,exports){ |
| (function (process){ |
| // Copyright Joyent, Inc. and other Node contributors. |
| // |
| // Permission is hereby granted, free of charge, to any person obtaining a |
| // copy of this software and associated documentation files (the |
| // "Software"), to deal in the Software without restriction, including |
| // without limitation the rights to use, copy, modify, merge, publish, |
| // distribute, sublicense, and/or sell copies of the Software, and to permit |
| // persons to whom the Software is furnished to do so, subject to the |
| // following conditions: |
| // |
| // The above copyright notice and this permission notice shall be included |
| // in all copies or substantial portions of the Software. |
| // |
| // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
| // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
| // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
| // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
| // USE OR OTHER DEALINGS IN THE SOFTWARE. |
| |
| module.exports = Readable; |
| |
| /*<replacement>*/ |
| var isArray = require('isarray'); |
| /*</replacement>*/ |
| |
| |
| /*<replacement>*/ |
| var Buffer = require('buffer').Buffer; |
| /*</replacement>*/ |
| |
| Readable.ReadableState = ReadableState; |
| |
| var EE = require('events').EventEmitter; |
| |
| /*<replacement>*/ |
| if (!EE.listenerCount) EE.listenerCount = function(emitter, type) { |
| return emitter.listeners(type).length; |
| }; |
| /*</replacement>*/ |
| |
| var Stream = require('stream'); |
| |
| /*<replacement>*/ |
| var util = require('core-util-is'); |
| util.inherits = require('inherits'); |
| /*</replacement>*/ |
| |
| var StringDecoder; |
| |
| util.inherits(Readable, Stream); |
| |
| function ReadableState(options, stream) { |
| options = options || {}; |
| |
| // the point at which it stops calling _read() to fill the buffer |
| // Note: 0 is a valid value, means "don't call _read preemptively ever" |
| var hwm = options.highWaterMark; |
| this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024; |
| |
| // cast to ints. |
| this.highWaterMark = ~~this.highWaterMark; |
| |
| this.buffer = []; |
| this.length = 0; |
| this.pipes = null; |
| this.pipesCount = 0; |
| this.flowing = false; |
| this.ended = false; |
| this.endEmitted = false; |
| this.reading = false; |
| |
| // In streams that never have any data, and do push(null) right away, |
| // the consumer can miss the 'end' event if they do some I/O before |
| // consuming the stream. So, we don't emit('end') until some reading |
| // happens. |
| this.calledRead = false; |
| |
| // a flag to be able to tell if the onwrite cb is called immediately, |
| // or on a later tick. We set this to true at first, becuase any |
| // actions that shouldn't happen until "later" should generally also |
| // not happen before the first write call. |
| this.sync = true; |
| |
| // whenever we return null, then we set a flag to say |
| // that we're awaiting a 'readable' event emission. |
| this.needReadable = false; |
| this.emittedReadable = false; |
| this.readableListening = false; |
| |
| |
| // object stream flag. Used to make read(n) ignore n and to |
| // make all the buffer merging and length checks go away |
| this.objectMode = !!options.objectMode; |
| |
| // Crypto is kind of old and crusty. Historically, its default string |
| // encoding is 'binary' so we have to make this configurable. |
| // Everything else in the universe uses 'utf8', though. |
| this.defaultEncoding = options.defaultEncoding || 'utf8'; |
| |
| // when piping, we only care about 'readable' events that happen |
| // after read()ing all the bytes and not getting any pushback. |
| this.ranOut = false; |
| |
| // the number of writers that are awaiting a drain event in .pipe()s |
| this.awaitDrain = 0; |
| |
| // if true, a maybeReadMore has been scheduled |
| this.readingMore = false; |
| |
| this.decoder = null; |
| this.encoding = null; |
| if (options.encoding) { |
| if (!StringDecoder) |
| StringDecoder = require('string_decoder/').StringDecoder; |
| this.decoder = new StringDecoder(options.encoding); |
| this.encoding = options.encoding; |
| } |
| } |
| |
| function Readable(options) { |
| if (!(this instanceof Readable)) |
| return new Readable(options); |
| |
| this._readableState = new ReadableState(options, this); |
| |
| // legacy |
| this.readable = true; |
| |
| Stream.call(this); |
| } |
| |
| // Manually shove something into the read() buffer. |
| // This returns true if the highWaterMark has not been hit yet, |
| // similar to how Writable.write() returns true if you should |
| // write() some more. |
| Readable.prototype.push = function(chunk, encoding) { |
| var state = this._readableState; |
| |
| if (typeof chunk === 'string' && !state.objectMode) { |
| encoding = encoding || state.defaultEncoding; |
| if (encoding !== state.encoding) { |
| chunk = new Buffer(chunk, encoding); |
| encoding = ''; |
| } |
| } |
| |
| return readableAddChunk(this, state, chunk, encoding, false); |
| }; |
| |
| // Unshift should *always* be something directly out of read() |
| Readable.prototype.unshift = function(chunk) { |
| var state = this._readableState; |
| return readableAddChunk(this, state, chunk, '', true); |
| }; |
| |
| function readableAddChunk(stream, state, chunk, encoding, addToFront) { |
| var er = chunkInvalid(state, chunk); |
| if (er) { |
| stream.emit('error', er); |
| } else if (chunk === null || chunk === undefined) { |
| state.reading = false; |
| if (!state.ended) |
| onEofChunk(stream, state); |
| } else if (state.objectMode || chunk && chunk.length > 0) { |
| if (state.ended && !addToFront) { |
| var e = new Error('stream.push() after EOF'); |
| stream.emit('error', e); |
| } else if (state.endEmitted && addToFront) { |
| var e = new Error('stream.unshift() after end event'); |
| stream.emit('error', e); |
| } else { |
| if (state.decoder && !addToFront && !encoding) |
| chunk = state.decoder.write(chunk); |
| |
| // update the buffer info. |
| state.length += state.objectMode ? 1 : chunk.length; |
| if (addToFront) { |
| state.buffer.unshift(chunk); |
| } else { |
| state.reading = false; |
| state.buffer.push(chunk); |
| } |
| |
| if (state.needReadable) |
| emitReadable(stream); |
| |
| maybeReadMore(stream, state); |
| } |
| } else if (!addToFront) { |
| state.reading = false; |
| } |
| |
| return needMoreData(state); |
| } |
| |
| |
| |
| // if it's past the high water mark, we can push in some more. |
| // Also, if we have no data yet, we can stand some |
| // more bytes. This is to work around cases where hwm=0, |
| // such as the repl. Also, if the push() triggered a |
| // readable event, and the user called read(largeNumber) such that |
| // needReadable was set, then we ought to push more, so that another |
| // 'readable' event will be triggered. |
| function needMoreData(state) { |
| return !state.ended && |
| (state.needReadable || |
| state.length < state.highWaterMark || |
| state.length === 0); |
| } |
| |
| // backwards compatibility. |
| Readable.prototype.setEncoding = function(enc) { |
| if (!StringDecoder) |
| StringDecoder = require('string_decoder/').StringDecoder; |
| this._readableState.decoder = new StringDecoder(enc); |
| this._readableState.encoding = enc; |
| }; |
| |
| // Don't raise the hwm > 128MB |
| var MAX_HWM = 0x800000; |
| function roundUpToNextPowerOf2(n) { |
| if (n >= MAX_HWM) { |
| n = MAX_HWM; |
| } else { |
| // Get the next highest power of 2 |
| n--; |
| for (var p = 1; p < 32; p <<= 1) n |= n >> p; |
| n++; |
| } |
| return n; |
| } |
| |
| function howMuchToRead(n, state) { |
| if (state.length === 0 && state.ended) |
| return 0; |
| |
| if (state.objectMode) |
| return n === 0 ? 0 : 1; |
| |
| if (isNaN(n) || n === null) { |
| // only flow one buffer at a time |
| if (state.flowing && state.buffer.length) |
| return state.buffer[0].length; |
| else |
| return state.length; |
| } |
| |
| if (n <= 0) |
| return 0; |
| |
| // If we're asking for more than the target buffer level, |
| // then raise the water mark. Bump up to the next highest |
| // power of 2, to prevent increasing it excessively in tiny |
| // amounts. |
| if (n > state.highWaterMark) |
| state.highWaterMark = roundUpToNextPowerOf2(n); |
| |
| // don't have that much. return null, unless we've ended. |
| if (n > state.length) { |
| if (!state.ended) { |
| state.needReadable = true; |
| return 0; |
| } else |
| return state.length; |
| } |
| |
| return n; |
| } |
| |
| // you can override either this method, or the async _read(n) below. |
| Readable.prototype.read = function(n) { |
| var state = this._readableState; |
| state.calledRead = true; |
| var nOrig = n; |
| |
| if (typeof n !== 'number' || n > 0) |
| state.emittedReadable = false; |
| |
| // if we're doing read(0) to trigger a readable event, but we |
| // already have a bunch of data in the buffer, then just trigger |
| // the 'readable' event and move on. |
| if (n === 0 && |
| state.needReadable && |
| (state.length >= state.highWaterMark || state.ended)) { |
| emitReadable(this); |
| return null; |
| } |
| |
| n = howMuchToRead(n, state); |
| |
| // if we've ended, and we're now clear, then finish it up. |
| if (n === 0 && state.ended) { |
| if (state.length === 0) |
| endReadable(this); |
| return null; |
| } |
| |
| // All the actual chunk generation logic needs to be |
| // *below* the call to _read. The reason is that in certain |
| // synthetic stream cases, such as passthrough streams, _read |
| // may be a completely synchronous operation which may change |
| // the state of the read buffer, providing enough data when |
| // before there was *not* enough. |
| // |
| // So, the steps are: |
| // 1. Figure out what the state of things will be after we do |
| // a read from the buffer. |
| // |
| // 2. If that resulting state will trigger a _read, then call _read. |
| // Note that this may be asynchronous, or synchronous. Yes, it is |
| // deeply ugly to write APIs this way, but that still doesn't mean |
| // that the Readable class should behave improperly, as streams are |
| // designed to be sync/async agnostic. |
| // Take note if the _read call is sync or async (ie, if the read call |
| // has returned yet), so that we know whether or not it's safe to emit |
| // 'readable' etc. |
| // |
| // 3. Actually pull the requested chunks out of the buffer and return. |
| |
| // if we need a readable event, then we need to do some reading. |
| var doRead = state.needReadable; |
| |
| // if we currently have less than the highWaterMark, then also read some |
| if (state.length - n <= state.highWaterMark) |
| doRead = true; |
| |
| // however, if we've ended, then there's no point, and if we're already |
| // reading, then it's unnecessary. |
| if (state.ended || state.reading) |
| doRead = false; |
| |
| if (doRead) { |
| state.reading = true; |
| state.sync = true; |
| // if the length is currently zero, then we *need* a readable event. |
| if (state.length === 0) |
| state.needReadable = true; |
| // call internal read method |
| this._read(state.highWaterMark); |
| state.sync = false; |
| } |
| |
| // If _read called its callback synchronously, then `reading` |
| // will be false, and we need to re-evaluate how much data we |
| // can return to the user. |
| if (doRead && !state.reading) |
| n = howMuchToRead(nOrig, state); |
| |
| var ret; |
| if (n > 0) |
| ret = fromList(n, state); |
| else |
| ret = null; |
| |
| if (ret === null) { |
| state.needReadable = true; |
| n = 0; |
| } |
| |
| state.length -= n; |
| |
| // If we have nothing in the buffer, then we want to know |
| // as soon as we *do* get something into the buffer. |
| if (state.length === 0 && !state.ended) |
| state.needReadable = true; |
| |
| // If we happened to read() exactly the remaining amount in the |
| // buffer, and the EOF has been seen at this point, then make sure |
| // that we emit 'end' on the very next tick. |
| if (state.ended && !state.endEmitted && state.length === 0) |
| endReadable(this); |
| |
| return ret; |
| }; |
| |
| function chunkInvalid(state, chunk) { |
| var er = null; |
| if (!Buffer.isBuffer(chunk) && |
| 'string' !== typeof chunk && |
| chunk !== null && |
| chunk !== undefined && |
| !state.objectMode && |
| !er) { |
| er = new TypeError('Invalid non-string/buffer chunk'); |
| } |
| return er; |
| } |
| |
| |
| function onEofChunk(stream, state) { |
| if (state.decoder && !state.ended) { |
| var chunk = state.decoder.end(); |
| if (chunk && chunk.length) { |
| state.buffer.push(chunk); |
| state.length += state.objectMode ? 1 : chunk.length; |
| } |
| } |
| state.ended = true; |
| |
| // if we've ended and we have some data left, then emit |
| // 'readable' now to make sure it gets picked up. |
| if (state.length > 0) |
| emitReadable(stream); |
| else |
| endReadable(stream); |
| } |
| |
| // Don't emit readable right away in sync mode, because this can trigger |
| // another read() call => stack overflow. This way, it might trigger |
| // a nextTick recursion warning, but that's not so bad. |
| function emitReadable(stream) { |
| var state = stream._readableState; |
| state.needReadable = false; |
| if (state.emittedReadable) |
| return; |
| |
| state.emittedReadable = true; |
| if (state.sync) |
| process.nextTick(function() { |
| emitReadable_(stream); |
| }); |
| else |
| emitReadable_(stream); |
| } |
| |
| function emitReadable_(stream) { |
| stream.emit('readable'); |
| } |
| |
| |
| // at this point, the user has presumably seen the 'readable' event, |
| // and called read() to consume some data. that may have triggered |
| // in turn another _read(n) call, in which case reading = true if |
| // it's in progress. |
| // However, if we're not ended, or reading, and the length < hwm, |
| // then go ahead and try to read some more preemptively. |
| function maybeReadMore(stream, state) { |
| if (!state.readingMore) { |
| state.readingMore = true; |
| process.nextTick(function() { |
| maybeReadMore_(stream, state); |
| }); |
| } |
| } |
| |
| function maybeReadMore_(stream, state) { |
| var len = state.length; |
| while (!state.reading && !state.flowing && !state.ended && |
| state.length < state.highWaterMark) { |
| stream.read(0); |
| if (len === state.length) |
| // didn't get any data, stop spinning. |
| break; |
| else |
| len = state.length; |
| } |
| state.readingMore = false; |
| } |
| |
| // abstract method. to be overridden in specific implementation classes. |
| // call cb(er, data) where data is <= n in length. |
| // for virtual (non-string, non-buffer) streams, "length" is somewhat |
| // arbitrary, and perhaps not very meaningful. |
| Readable.prototype._read = function(n) { |
| this.emit('error', new Error('not implemented')); |
| }; |
| |
| Readable.prototype.pipe = function(dest, pipeOpts) { |
| var src = this; |
| var state = this._readableState; |
| |
| switch (state.pipesCount) { |
| case 0: |
| state.pipes = dest; |
| break; |
| case 1: |
| state.pipes = [state.pipes, dest]; |
| break; |
| default: |
| state.pipes.push(dest); |
| break; |
| } |
| state.pipesCount += 1; |
| |
| var doEnd = (!pipeOpts || pipeOpts.end !== false) && |
| dest !== process.stdout && |
| dest !== process.stderr; |
| |
| var endFn = doEnd ? onend : cleanup; |
| if (state.endEmitted) |
| process.nextTick(endFn); |
| else |
| src.once('end', endFn); |
| |
| dest.on('unpipe', onunpipe); |
| function onunpipe(readable) { |
| if (readable !== src) return; |
| cleanup(); |
| } |
| |
| function onend() { |
| dest.end(); |
| } |
| |
| // when the dest drains, it reduces the awaitDrain counter |
| // on the source. This would be more elegant with a .once() |
| // handler in flow(), but adding and removing repeatedly is |
| // too slow. |
| var ondrain = pipeOnDrain(src); |
| dest.on('drain', ondrain); |
| |
| function cleanup() { |
| // cleanup event handlers once the pipe is broken |
| dest.removeListener('close', onclose); |
| dest.removeListener('finish', onfinish); |
| dest.removeListener('drain', ondrain); |
| dest.removeListener('error', onerror); |
| dest.removeListener('unpipe', onunpipe); |
| src.removeListener('end', onend); |
| src.removeListener('end', cleanup); |
| |
| // if the reader is waiting for a drain event from this |
| // specific writer, then it would cause it to never start |
| // flowing again. |
| // So, if this is awaiting a drain, then we just call it now. |
| // If we don't know, then assume that we are waiting for one. |
| if (!dest._writableState || dest._writableState.needDrain) |
| ondrain(); |
| } |
| |
| // if the dest has an error, then stop piping into it. |
| // however, don't suppress the throwing behavior for this. |
| function onerror(er) { |
| unpipe(); |
| dest.removeListener('error', onerror); |
| if (EE.listenerCount(dest, 'error') === 0) |
| dest.emit('error', er); |
| } |
| // This is a brutally ugly hack to make sure that our error handler |
| // is attached before any userland ones. NEVER DO THIS. |
| if (!dest._events || !dest._events.error) |
| dest.on('error', onerror); |
| else if (isArray(dest._events.error)) |
| dest._events.error.unshift(onerror); |
| else |
| dest._events.error = [onerror, dest._events.error]; |
| |
| |
| |
| // Both close and finish should trigger unpipe, but only once. |
| function onclose() { |
| dest.removeListener('finish', onfinish); |
| unpipe(); |
| } |
| dest.once('close', onclose); |
| function onfinish() { |
| dest.removeListener('close', onclose); |
| unpipe(); |
| } |
| dest.once('finish', onfinish); |
| |
| function unpipe() { |
| src.unpipe(dest); |
| } |
| |
| // tell the dest that it's being piped to |
| dest.emit('pipe', src); |
| |
| // start the flow if it hasn't been started already. |
| if (!state.flowing) { |
| // the handler that waits for readable events after all |
| // the data gets sucked out in flow. |
| // This would be easier to follow with a .once() handler |
| // in flow(), but that is too slow. |
| this.on('readable', pipeOnReadable); |
| |
| state.flowing = true; |
| process.nextTick(function() { |
| flow(src); |
| }); |
| } |
| |
| return dest; |
| }; |
| |
| function pipeOnDrain(src) { |
| return function() { |
| var dest = this; |
| var state = src._readableState; |
| state.awaitDrain--; |
| if (state.awaitDrain === 0) |
| flow(src); |
| }; |
| } |
| |
| function flow(src) { |
| var state = src._readableState; |
| var chunk; |
| state.awaitDrain = 0; |
| |
| function write(dest, i, list) { |
| var written = dest.write(chunk); |
| if (false === written) { |
| state.awaitDrain++; |
| } |
| } |
| |
| while (state.pipesCount && null !== (chunk = src.read())) { |
| |
| if (state.pipesCount === 1) |
| write(state.pipes, 0, null); |
| else |
| forEach(state.pipes, write); |
| |
| src.emit('data', chunk); |
| |
| // if anyone needs a drain, then we have to wait for that. |
| if (state.awaitDrain > 0) |
| return; |
| } |
| |
| // if every destination was unpiped, either before entering this |
| // function, or in the while loop, then stop flowing. |
| // |
| // NB: This is a pretty rare edge case. |
| if (state.pipesCount === 0) { |
| state.flowing = false; |
| |
| // if there were data event listeners added, then switch to old mode. |
| if (EE.listenerCount(src, 'data') > 0) |
| emitDataEvents(src); |
| return; |
| } |
| |
| // at this point, no one needed a drain, so we just ran out of data |
| // on the next readable event, start it over again. |
| state.ranOut = true; |
| } |
| |
| function pipeOnReadable() { |
| if (this._readableState.ranOut) { |
| this._readableState.ranOut = false; |
| flow(this); |
| } |
| } |
| |
| |
| Readable.prototype.unpipe = function(dest) { |
| var state = this._readableState; |
| |
| // if we're not piping anywhere, then do nothing. |
| if (state.pipesCount === 0) |
| return this; |
| |
| // just one destination. most common case. |
| if (state.pipesCount === 1) { |
| // passed in one, but it's not the right one. |
| if (dest && dest !== state.pipes) |
| return this; |
| |
| if (!dest) |
| dest = state.pipes; |
| |
| // got a match. |
| state.pipes = null; |
| state.pipesCount = 0; |
| this.removeListener('readable', pipeOnReadable); |
| state.flowing = false; |
| if (dest) |
| dest.emit('unpipe', this); |
| return this; |
| } |
| |
| // slow case. multiple pipe destinations. |
| |
| if (!dest) { |
| // remove all. |
| var dests = state.pipes; |
| var len = state.pipesCount; |
| state.pipes = null; |
| state.pipesCount = 0; |
| this.removeListener('readable', pipeOnReadable); |
| state.flowing = false; |
| |
| for (var i = 0; i < len; i++) |
| dests[i].emit('unpipe', this); |
| return this; |
| } |
| |
| // try to find the right one. |
| var i = indexOf(state.pipes, dest); |
| if (i === -1) |
| return this; |
| |
| state.pipes.splice(i, 1); |
| state.pipesCount -= 1; |
| if (state.pipesCount === 1) |
| state.pipes = state.pipes[0]; |
| |
| dest.emit('unpipe', this); |
| |
| return this; |
| }; |
| |
| // set up data events if they are asked for |
| // Ensure readable listeners eventually get something |
| Readable.prototype.on = function(ev, fn) { |
| var res = Stream.prototype.on.call(this, ev, fn); |
| |
| if (ev === 'data' && !this._readableState.flowing) |
| emitDataEvents(this); |
| |
| if (ev === 'readable' && this.readable) { |
| var state = this._readableState; |
| if (!state.readableListening) { |
| state.readableListening = true; |
| state.emittedReadable = false; |
| state.needReadable = true; |
| if (!state.reading) { |
| this.read(0); |
| } else if (state.length) { |
| emitReadable(this, state); |
| } |
| } |
| } |
| |
| return res; |
| }; |
| Readable.prototype.addListener = Readable.prototype.on; |
| |
| // pause() and resume() are remnants of the legacy readable stream API |
| // If the user uses them, then switch into old mode. |
| Readable.prototype.resume = function() { |
| emitDataEvents(this); |
| this.read(0); |
| this.emit('resume'); |
| }; |
| |
| Readable.prototype.pause = function() { |
| emitDataEvents(this, true); |
| this.emit('pause'); |
| }; |
| |
| function emitDataEvents(stream, startPaused) { |
| var state = stream._readableState; |
| |
| if (state.flowing) { |
| // https://github.com/isaacs/readable-stream/issues/16 |
| throw new Error('Cannot switch to old mode now.'); |
| } |
| |
| var paused = startPaused || false; |
| var readable = false; |
| |
| // convert to an old-style stream. |
| stream.readable = true; |
| stream.pipe = Stream.prototype.pipe; |
| stream.on = stream.addListener = Stream.prototype.on; |
| |
| stream.on('readable', function() { |
| readable = true; |
| |
| var c; |
| while (!paused && (null !== (c = stream.read()))) |
| stream.emit('data', c); |
| |
| if (c === null) { |
| readable = false; |
| stream._readableState.needReadable = true; |
| } |
| }); |
| |
| stream.pause = function() { |
| paused = true; |
| this.emit('pause'); |
| }; |
| |
| stream.resume = function() { |
| paused = false; |
| if (readable) |
| process.nextTick(function() { |
| stream.emit('readable'); |
| }); |
| else |
| this.read(0); |
| this.emit('resume'); |
| }; |
| |
| // now make it start, just in case it hadn't already. |
| stream.emit('readable'); |
| } |
| |
| // wrap an old-style stream as the async data source. |
| // This is *not* part of the readable stream interface. |
| // It is an ugly unfortunate mess of history. |
| Readable.prototype.wrap = function(stream) { |
| var state = this._readableState; |
| var paused = false; |
| |
| var self = this; |
| stream.on('end', function() { |
| if (state.decoder && !state.ended) { |
| var chunk = state.decoder.end(); |
| if (chunk && chunk.length) |
| self.push(chunk); |
| } |
| |
| self.push(null); |
| }); |
| |
| stream.on('data', function(chunk) { |
| if (state.decoder) |
| chunk = state.decoder.write(chunk); |
| if (!chunk || !state.objectMode && !chunk.length) |
| return; |
| |
| var ret = self.push(chunk); |
| if (!ret) { |
| paused = true; |
| stream.pause(); |
| } |
| }); |
| |
| // proxy all the other methods. |
| // important when wrapping filters and duplexes. |
| for (var i in stream) { |
| if (typeof stream[i] === 'function' && |
| typeof this[i] === 'undefined') { |
| this[i] = function(method) { return function() { |
| return stream[method].apply(stream, arguments); |
| }}(i); |
| } |
| } |
| |
| // proxy certain important events. |
| var events = ['error', 'close', 'destroy', 'pause', 'resume']; |
| forEach(events, function(ev) { |
| stream.on(ev, self.emit.bind(self, ev)); |
| }); |
| |
| // when we try to consume some more bytes, simply unpause the |
| // underlying stream. |
| self._read = function(n) { |
| if (paused) { |
| paused = false; |
| stream.resume(); |
| } |
| }; |
| |
| return self; |
| }; |
| |
| |
| |
| // exposed for testing purposes only. |
| Readable._fromList = fromList; |
| |
| // Pluck off n bytes from an array of buffers. |
| // Length is the combined lengths of all the buffers in the list. |
| function fromList(n, state) { |
| var list = state.buffer; |
| var length = state.length; |
| var stringMode = !!state.decoder; |
| var objectMode = !!state.objectMode; |
| var ret; |
| |
| // nothing in the list, definitely empty. |
| if (list.length === 0) |
| return null; |
| |
| if (length === 0) |
| ret = null; |
| else if (objectMode) |
| ret = list.shift(); |
| else if (!n || n >= length) { |
| // read it all, truncate the array. |
| if (stringMode) |
| ret = list.join(''); |
| else |
| ret = Buffer.concat(list, length); |
| list.length = 0; |
| } else { |
| // read just some of it. |
| if (n < list[0].length) { |
| // just take a part of the first list item. |
| // slice is the same for buffers and strings. |
| var buf = list[0]; |
| ret = buf.slice(0, n); |
| list[0] = buf.slice(n); |
| } else if (n === list[0].length) { |
| // first list is a perfect match |
| ret = list.shift(); |
| } else { |
| // complex case. |
| // we have enough to cover it, but it spans past the first buffer. |
| if (stringMode) |
| ret = ''; |
| else |
| ret = new Buffer(n); |
| |
| var c = 0; |
| for (var i = 0, l = list.length; i < l && c < n; i++) { |
| var buf = list[0]; |
| var cpy = Math.min(n - c, buf.length); |
| |
| if (stringMode) |
| ret += buf.slice(0, cpy); |
| else |
| buf.copy(ret, c, 0, cpy); |
| |
| if (cpy < buf.length) |
| list[0] = buf.slice(cpy); |
| else |
| list.shift(); |
| |
| c += cpy; |
| } |
| } |
| } |
| |
| return ret; |
| } |
| |
| function endReadable(stream) { |
| var state = stream._readableState; |
| |
| // If we get here before consuming all the bytes, then that is a |
| // bug in node. Should never happen. |
| if (state.length > 0) |
| throw new Error('endReadable called on non-empty stream'); |
| |
| if (!state.endEmitted && state.calledRead) { |
| state.ended = true; |
| process.nextTick(function() { |
| // Check that we didn't get one last unshift. |
| if (!state.endEmitted && state.length === 0) { |
| state.endEmitted = true; |
| stream.readable = false; |
| stream.emit('end'); |
| } |
| }); |
| } |
| } |
| |
| function forEach (xs, f) { |
| for (var i = 0, l = xs.length; i < l; i++) { |
| f(xs[i], i); |
| } |
| } |
| |
| function indexOf (xs, x) { |
| for (var i = 0, l = xs.length; i < l; i++) { |
| if (xs[i] === x) return i; |
| } |
| return -1; |
| } |
| |
| }).call(this,require("4dON6Z")) |
| },{"4dON6Z":6,"buffer":1,"core-util-is":13,"events":4,"inherits":5,"isarray":14,"stream":20,"string_decoder/":15}],11:[function(require,module,exports){ |
| // Copyright Joyent, Inc. and other Node contributors. |
| // |
| // Permission is hereby granted, free of charge, to any person obtaining a |
| // copy of this software and associated documentation files (the |
| // "Software"), to deal in the Software without restriction, including |
| // without limitation the rights to use, copy, modify, merge, publish, |
| // distribute, sublicense, and/or sell copies of the Software, and to permit |
| // persons to whom the Software is furnished to do so, subject to the |
| // following conditions: |
| // |
| // The above copyright notice and this permission notice shall be included |
| // in all copies or substantial portions of the Software. |
| // |
| // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
| // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
| // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
| // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
| // USE OR OTHER DEALINGS IN THE SOFTWARE. |
| |
| |
| // a transform stream is a readable/writable stream where you do |
| // something with the data. Sometimes it's called a "filter", |
| // but that's not a great name for it, since that implies a thing where |
| // some bits pass through, and others are simply ignored. (That would |
| // be a valid example of a transform, of course.) |
| // |
| // While the output is causally related to the input, it's not a |
| // necessarily symmetric or synchronous transformation. For example, |
| // a zlib stream might take multiple plain-text writes(), and then |
| // emit a single compressed chunk some time in the future. |
| // |
| // Here's how this works: |
| // |
| // The Transform stream has all the aspects of the readable and writable |
| // stream classes. When you write(chunk), that calls _write(chunk,cb) |
| // internally, and returns false if there's a lot of pending writes |
| // buffered up. When you call read(), that calls _read(n) until |
| // there's enough pending readable data buffered up. |
| // |
| // In a transform stream, the written data is placed in a buffer. When |
| // _read(n) is called, it transforms the queued up data, calling the |
| // buffered _write cb's as it consumes chunks. If consuming a single |
| // written chunk would result in multiple output chunks, then the first |
| // outputted bit calls the readcb, and subsequent chunks just go into |
| // the read buffer, and will cause it to emit 'readable' if necessary. |
| // |
| // This way, back-pressure is actually determined by the reading side, |
| // since _read has to be called to start processing a new chunk. However, |
| // a pathological inflate type of transform can cause excessive buffering |
| // here. For example, imagine a stream where every byte of input is |
| // interpreted as an integer from 0-255, and then results in that many |
| // bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in |
| // 1kb of data being output. In this case, you could write a very small |
| // amount of input, and end up with a very large amount of output. In |
| // such a pathological inflating mechanism, there'd be no way to tell |
| // the system to stop doing the transform. A single 4MB write could |
| // cause the system to run out of memory. |
| // |
| // However, even in such a pathological case, only a single written chunk |
| // would be consumed, and then the rest would wait (un-transformed) until |
| // the results of the previous transformed chunk were consumed. |
| |
| module.exports = Transform; |
| |
| var Duplex = require('./_stream_duplex'); |
| |
| /*<replacement>*/ |
| var util = require('core-util-is'); |
| util.inherits = require('inherits'); |
| /*</replacement>*/ |
| |
| util.inherits(Transform, Duplex); |
| |
| |
| function TransformState(options, stream) { |
| this.afterTransform = function(er, data) { |
| return afterTransform(stream, er, data); |
| }; |
| |
| this.needTransform = false; |
| this.transforming = false; |
| this.writecb = null; |
| this.writechunk = null; |
| } |
| |
| function afterTransform(stream, er, data) { |
| var ts = stream._transformState; |
| ts.transforming = false; |
| |
| var cb = ts.writecb; |
| |
| if (!cb) |
| return stream.emit('error', new Error('no writecb in Transform class')); |
| |
| ts.writechunk = null; |
| ts.writecb = null; |
| |
| if (data !== null && data !== undefined) |
| stream.push(data); |
| |
| if (cb) |
| cb(er); |
| |
| var rs = stream._readableState; |
| rs.reading = false; |
| if (rs.needReadable || rs.length < rs.highWaterMark) { |
| stream._read(rs.highWaterMark); |
| } |
| } |
| |
| |
| function Transform(options) { |
| if (!(this instanceof Transform)) |
| return new Transform(options); |
| |
| Duplex.call(this, options); |
| |
| var ts = this._transformState = new TransformState(options, this); |
| |
| // when the writable side finishes, then flush out anything remaining. |
| var stream = this; |
| |
| // start out asking for a readable event once data is transformed. |
| this._readableState.needReadable = true; |
| |
| // we have implemented the _read method, and done the other things |
| // that Readable wants before the first _read call, so unset the |
| // sync guard flag. |
| this._readableState.sync = false; |
| |
| this.once('finish', function() { |
| if ('function' === typeof this._flush) |
| this._flush(function(er) { |
| done(stream, er); |
| }); |
| else |
| done(stream); |
| }); |
| } |
| |
| Transform.prototype.push = function(chunk, encoding) { |
| this._transformState.needTransform = false; |
| return Duplex.prototype.push.call(this, chunk, encoding); |
| }; |
| |
| // This is the part where you do stuff! |
| // override this function in implementation classes. |
| // 'chunk' is an input chunk. |
| // |
| // Call `push(newChunk)` to pass along transformed output |
| // to the readable side. You may call 'push' zero or more times. |
| // |
| // Call `cb(err)` when you are done with this chunk. If you pass |
| // an error, then that'll put the hurt on the whole operation. If you |
| // never call cb(), then you'll never get another chunk. |
| Transform.prototype._transform = function(chunk, encoding, cb) { |
| throw new Error('not implemented'); |
| }; |
| |
| Transform.prototype._write = function(chunk, encoding, cb) { |
| var ts = this._transformState; |
| ts.writecb = cb; |
| ts.writechunk = chunk; |
| ts.writeencoding = encoding; |
| if (!ts.transforming) { |
| var rs = this._readableState; |
| if (ts.needTransform || |
| rs.needReadable || |
| rs.length < rs.highWaterMark) |
| this._read(rs.highWaterMark); |
| } |
| }; |
| |
| // Doesn't matter what the args are here. |
| // _transform does all the work. |
| // That we got here means that the readable side wants more data. |
| Transform.prototype._read = function(n) { |
| var ts = this._transformState; |
| |
| if (ts.writechunk !== null && ts.writecb && !ts.transforming) { |
| ts.transforming = true; |
| this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); |
| } else { |
| // mark that we need a transform, so that any data that comes in |
| // will get processed, now that we've asked for it. |
| ts.needTransform = true; |
| } |
| }; |
| |
| |
| function done(stream, er) { |
| if (er) |
| return stream.emit('error', er); |
| |
| // if there's nothing in the write buffer, then that means |
| // that nothing more will ever be provided |
| var ws = stream._writableState; |
| var rs = stream._readableState; |
| var ts = stream._transformState; |
| |
| if (ws.length) |
| throw new Error('calling transform done when ws.length != 0'); |
| |
| if (ts.transforming) |
| throw new Error('calling transform done when still transforming'); |
| |
| return stream.push(null); |
| } |
| |
| },{"./_stream_duplex":8,"core-util-is":13,"inherits":5}],12:[function(require,module,exports){ |
| (function (process){ |
| // Copyright Joyent, Inc. and other Node contributors. |
| // |
| // Permission is hereby granted, free of charge, to any person obtaining a |
| // copy of this software and associated documentation files (the |
| // "Software"), to deal in the Software without restriction, including |
| // without limitation the rights to use, copy, modify, merge, publish, |
| // distribute, sublicense, and/or sell copies of the Software, and to permit |
| // persons to whom the Software is furnished to do so, subject to the |
| // following conditions: |
| // |
| // The above copyright notice and this permission notice shall be included |
| // in all copies or substantial portions of the Software. |
| // |
| // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
| // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
| // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
| // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
| // USE OR OTHER DEALINGS IN THE SOFTWARE. |
| |
| // A bit simpler than readable streams. |
| // Implement an async ._write(chunk, cb), and it'll handle all |
| // the drain event emission and buffering. |
| |
| module.exports = Writable; |
| |
| /*<replacement>*/ |
| var Buffer = require('buffer').Buffer; |
| /*</replacement>*/ |
| |
| Writable.WritableState = WritableState; |
| |
| |
| /*<replacement>*/ |
| var util = require('core-util-is'); |
| util.inherits = require('inherits'); |
| /*</replacement>*/ |
| |
| |
| var Stream = require('stream'); |
| |
| util.inherits(Writable, Stream); |
| |
| function WriteReq(chunk, encoding, cb) { |
| this.chunk = chunk; |
| this.encoding = encoding; |
| this.callback = cb; |
| } |
| |
| function WritableState(options, stream) { |
| options = options || {}; |
| |
| // the point at which write() starts returning false |
| // Note: 0 is a valid value, means that we always return false if |
| // the entire buffer is not flushed immediately on write() |
| var hwm = options.highWaterMark; |
| this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024; |
| |
| // object stream flag to indicate whether or not this stream |
| // contains buffers or objects. |
| this.objectMode = !!options.objectMode; |
| |
| // cast to ints. |
| this.highWaterMark = ~~this.highWaterMark; |
| |
| this.needDrain = false; |
| // at the start of calling end() |
| this.ending = false; |
| // when end() has been called, and returned |
| this.ended = false; |
| // when 'finish' is emitted |
| this.finished = false; |
| |
| // should we decode strings into buffers before passing to _write? |
| // this is here so that some node-core streams can optimize string |
| // handling at a lower level. |
| var noDecode = options.decodeStrings === false; |
| this.decodeStrings = !noDecode; |
| |
| // Crypto is kind of old and crusty. Historically, its default string |
| // encoding is 'binary' so we have to make this configurable. |
| // Everything else in the universe uses 'utf8', though. |
| this.defaultEncoding = options.defaultEncoding || 'utf8'; |
| |
| // not an actual buffer we keep track of, but a measurement |
| // of how much we're waiting to get pushed to some underlying |
| // socket or file. |
| this.length = 0; |
| |
| // a flag to see when we're in the middle of a write. |
| this.writing = false; |
| |
| // a flag to be able to tell if the onwrite cb is called immediately, |
| // or on a later tick. We set this to true at first, becuase any |
| // actions that shouldn't happen until "later" should generally also |
| // not happen before the first write call. |
| this.sync = true; |
| |
| // a flag to know if we're processing previously buffered items, which |
| // may call the _write() callback in the same tick, so that we don't |
| // end up in an overlapped onwrite situation. |
| this.bufferProcessing = false; |
| |
| // the callback that's passed to _write(chunk,cb) |
| this.onwrite = function(er) { |
| onwrite(stream, er); |
| }; |
| |
| // the callback that the user supplies to write(chunk,encoding,cb) |
| this.writecb = null; |
| |
| // the amount that is being written when _write is called. |
| this.writelen = 0; |
| |
| this.buffer = []; |
| |
| // True if the error was already emitted and should not be thrown again |
| this.errorEmitted = false; |
| } |
| |
| function Writable(options) { |
| var Duplex = require('./_stream_duplex'); |
| |
| // Writable ctor is applied to Duplexes, though they're not |
| // instanceof Writable, they're instanceof Readable. |
| if (!(this instanceof Writable) && !(this instanceof Duplex)) |
| return new Writable(options); |
| |
| this._writableState = new WritableState(options, this); |
| |
| // legacy. |
| this.writable = true; |
| |
| Stream.call(this); |
| } |
| |
| // Otherwise people can pipe Writable streams, which is just wrong. |
| Writable.prototype.pipe = function() { |
| this.emit('error', new Error('Cannot pipe. Not readable.')); |
| }; |
| |
| |
| function writeAfterEnd(stream, state, cb) { |
| var er = new Error('write after end'); |
| // TODO: defer error events consistently everywhere, not just the cb |
| stream.emit('error', er); |
| process.nextTick(function() { |
| cb(er); |
| }); |
| } |
| |
| // If we get something that is not a buffer, string, null, or undefined, |
| // and we're not in objectMode, then that's an error. |
| // Otherwise stream chunks are all considered to be of length=1, and the |
| // watermarks determine how many objects to keep in the buffer, rather than |
| // how many bytes or characters. |
| function validChunk(stream, state, chunk, cb) { |
| var valid = true; |
| if (!Buffer.isBuffer(chunk) && |
| 'string' !== typeof chunk && |
| chunk !== null && |
| chunk !== undefined && |
| !state.objectMode) { |
| var er = new TypeError('Invalid non-string/buffer chunk'); |
| stream.emit('error', er); |
| process.nextTick(function() { |
| cb(er); |
| }); |
| valid = false; |
| } |
| return valid; |
| } |
| |
| Writable.prototype.write = function(chunk, encoding, cb) { |
| var state = this._writableState; |
| var ret = false; |
| |
| if (typeof encoding === 'function') { |
| cb = encoding; |
| encoding = null; |
| } |
| |
| if (Buffer.isBuffer(chunk)) |
| encoding = 'buffer'; |
| else if (!encoding) |
| encoding = state.defaultEncoding; |
| |
| if (typeof cb !== 'function') |
| cb = function() {}; |
| |
| if (state.ended) |
| writeAfterEnd(this, state, cb); |
| else if (validChunk(this, state, chunk, cb)) |
| ret = writeOrBuffer(this, state, chunk, encoding, cb); |
| |
| return ret; |
| }; |
| |
| function decodeChunk(state, chunk, encoding) { |
| if (!state.objectMode && |
| state.decodeStrings !== false && |
| typeof chunk === 'string') { |
| chunk = new Buffer(chunk, encoding); |
| } |
| return chunk; |
| } |
| |
| // if we're already writing something, then just put this |
| // in the queue, and wait our turn. Otherwise, call _write |
| // If we return false, then we need a drain event, so set that flag. |
| function writeOrBuffer(stream, state, chunk, encoding, cb) { |
| chunk = decodeChunk(state, chunk, encoding); |
| if (Buffer.isBuffer(chunk)) |
| encoding = 'buffer'; |
| var len = state.objectMode ? 1 : chunk.length; |
| |
| state.length += len; |
| |
| var ret = state.length < state.highWaterMark; |
| // we must ensure that previous needDrain will not be reset to false. |
| if (!ret) |
| state.needDrain = true; |
| |
| if (state.writing) |
| state.buffer.push(new WriteReq(chunk, encoding, cb)); |
| else |
| doWrite(stream, state, len, chunk, encoding, cb); |
| |
| return ret; |
| } |
| |
| function doWrite(stream, state, len, chunk, encoding, cb) { |
| state.writelen = len; |
| state.writecb = cb; |
| state.writing = true; |
| state.sync = true; |
| stream._write(chunk, encoding, state.onwrite); |
| state.sync = false; |
| } |
| |
| function onwriteError(stream, state, sync, er, cb) { |
| if (sync) |
| process.nextTick(function() { |
| cb(er); |
| }); |
| else |
| cb(er); |
| |
| stream._writableState.errorEmitted = true; |
| stream.emit('error', er); |
| } |
| |
| function onwriteStateUpdate(state) { |
| state.writing = false; |
| state.writecb = null; |
| state.length -= state.writelen; |
| state.writelen = 0; |
| } |
| |
| function onwrite(stream, er) { |
| var state = stream._writableState; |
| var sync = state.sync; |
| var cb = state.writecb; |
| |
| onwriteStateUpdate(state); |
| |
| if (er) |
| onwriteError(stream, state, sync, er, cb); |
| else { |
| // Check if we're actually ready to finish, but don't emit yet |
| var finished = needFinish(stream, state); |
| |
| if (!finished && !state.bufferProcessing && state.buffer.length) |
| clearBuffer(stream, state); |
| |
| if (sync) { |
| process.nextTick(function() { |
| afterWrite(stream, state, finished, cb); |
| }); |
| } else { |
| afterWrite(stream, state, finished, cb); |
| } |
| } |
| } |
| |
| function afterWrite(stream, state, finished, cb) { |
| if (!finished) |
| onwriteDrain(stream, state); |
| cb(); |
| if (finished) |
| finishMaybe(stream, state); |
| } |
| |
| // Must force callback to be called on nextTick, so that we don't |
| // emit 'drain' before the write() consumer gets the 'false' return |
| // value, and has a chance to attach a 'drain' listener. |
| function onwriteDrain(stream, state) { |
| if (state.length === 0 && state.needDrain) { |
| state.needDrain = false; |
| stream.emit('drain'); |
| } |
| } |
| |
| |
| // if there's something in the buffer waiting, then process it |
| function clearBuffer(stream, state) { |
| state.bufferProcessing = true; |
| |
| for (var c = 0; c < state.buffer.length; c++) { |
| var entry = state.buffer[c]; |
| var chunk = entry.chunk; |
| var encoding = entry.encoding; |
| var cb = entry.callback; |
| var len = state.objectMode ? 1 : chunk.length; |
| |
| doWrite(stream, state, len, chunk, encoding, cb); |
| |
| // if we didn't call the onwrite immediately, then |
| // it means that we need to wait until it does. |
| // also, that means that the chunk and cb are currently |
| // being processed, so move the buffer counter past them. |
| if (state.writing) { |
| c++; |
| break; |
| } |
| } |
| |
| state.bufferProcessing = false; |
| if (c < state.buffer.length) |
| state.buffer = state.buffer.slice(c); |
| else |
| state.buffer.length = 0; |
| } |
| |
| Writable.prototype._write = function(chunk, encoding, cb) { |
| cb(new Error('not implemented')); |
| }; |
| |
| Writable.prototype.end = function(chunk, encoding, cb) { |
| var state = this._writableState; |
| |
| if (typeof chunk === 'function') { |
| cb = chunk; |
| chunk = null; |
| encoding = null; |
| } else if (typeof encoding === 'function') { |
| cb = encoding; |
| encoding = null; |
| } |
| |
| if (typeof chunk !== 'undefined' && chunk !== null) |
| this.write(chunk, encoding); |
| |
| // ignore unnecessary end() calls. |
| if (!state.ending && !state.finished) |
| endWritable(this, state, cb); |
| }; |
| |
| |
| function needFinish(stream, state) { |
| return (state.ending && |
| state.length === 0 && |
| !state.finished && |
| !state.writing); |
| } |
| |
| function finishMaybe(stream, state) { |
| var need = needFinish(stream, state); |
| if (need) { |
| state.finished = true; |
| stream.emit('finish'); |
| } |
| return need; |
| } |
| |
| function endWritable(stream, state, cb) { |
| state.ending = true; |
| finishMaybe(stream, state); |
| if (cb) { |
| if (state.finished) |
| process.nextTick(cb); |
| else |
| stream.once('finish', cb); |
| } |
| state.ended = true; |
| } |
| |
| }).call(this,require("4dON6Z")) |
| },{"./_stream_duplex":8,"4dON6Z":6,"buffer":1,"core-util-is":13,"inherits":5,"stream":20}],13:[function(require,module,exports){ |
| (function (Buffer){ |
| // Copyright Joyent, Inc. and other Node contributors. |
| // |
| // Permission is hereby granted, free of charge, to any person obtaining a |
| // copy of this software and associated documentation files (the |
| // "Software"), to deal in the Software without restriction, including |
| // without limitation the rights to use, copy, modify, merge, publish, |
| // distribute, sublicense, and/or sell copies of the Software, and to permit |
| // persons to whom the Software is furnished to do so, subject to the |
| // following conditions: |
| // |
| // The above copyright notice and this permission notice shall be included |
| // in all copies or substantial portions of the Software. |
| // |
| // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
| // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
| // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
| // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
| // USE OR OTHER DEALINGS IN THE SOFTWARE. |
| |
| // NOTE: These type checking functions intentionally don't use `instanceof` |
| // because it is fragile and can be easily faked with `Object.create()`. |
| function isArray(ar) { |
| return Array.isArray(ar); |
| } |
| exports.isArray = isArray; |
| |
| function isBoolean(arg) { |
| return typeof arg === 'boolean'; |
| } |
| exports.isBoolean = isBoolean; |
| |
| function isNull(arg) { |
| return arg === null; |
| } |
| exports.isNull = isNull; |
| |
| function isNullOrUndefined(arg) { |
| return arg == null; |
| } |
| exports.isNullOrUndefined = isNullOrUndefined; |
| |
| function isNumber(arg) { |
| return typeof arg === 'number'; |
| } |
| exports.isNumber = isNumber; |
| |
| function isString(arg) { |
| return typeof arg === 'string'; |
| } |
| exports.isString = isString; |
| |
| function isSymbol(arg) { |
| return typeof arg === 'symbol'; |
| } |
| exports.isSymbol = isSymbol; |
| |
| function isUndefined(arg) { |
| return arg === void 0; |
| } |
| exports.isUndefined = isUndefined; |
| |
| function isRegExp(re) { |
| return isObject(re) && objectToString(re) === '[object RegExp]'; |
| } |
| exports.isRegExp = isRegExp; |
| |
| function isObject(arg) { |
| return typeof arg === 'object' && arg !== null; |
| } |
| exports.isObject = isObject; |
| |
| function isDate(d) { |
| return isObject(d) && objectToString(d) === '[object Date]'; |
| } |
| exports.isDate = isDate; |
| |
| function isError(e) { |
| return isObject(e) && |
| (objectToString(e) === '[object Error]' || e instanceof Error); |
| } |
| exports.isError = isError; |
| |
| function isFunction(arg) { |
| return typeof arg === 'function'; |
| } |
| exports.isFunction = isFunction; |
| |
| function isPrimitive(arg) { |
| return arg === null || |
| typeof arg === 'boolean' || |
| typeof arg === 'number' || |
| typeof arg === 'string' || |
| typeof arg === 'symbol' || // ES6 symbol |
| typeof arg === 'undefined'; |
| } |
| exports.isPrimitive = isPrimitive; |
| |
| function isBuffer(arg) { |
| return Buffer.isBuffer(arg); |
| } |
| exports.isBuffer = isBuffer; |
| |
| function objectToString(o) { |
| return Object.prototype.toString.call(o); |
| } |
| }).call(this,require("buffer").Buffer) |
| },{"buffer":1}],14:[function(require,module,exports){ |
| module.exports = Array.isArray || function (arr) { |
| return Object.prototype.toString.call(arr) == '[object Array]'; |
| }; |
| |
| },{}],15:[function(require,module,exports){ |
| // Copyright Joyent, Inc. and other Node contributors. |
| // |
| // Permission is hereby granted, free of charge, to any person obtaining a |
| // copy of this software and associated documentation files (the |
| // "Software"), to deal in the Software without restriction, including |
| // without limitation the rights to use, copy, modify, merge, publish, |
| // distribute, sublicense, and/or sell copies of the Software, and to permit |
| // persons to whom the Software is furnished to do so, subject to the |
| // following conditions: |
| // |
| // The above copyright notice and this permission notice shall be included |
| // in all copies or substantial portions of the Software. |
| // |
| // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
| // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
| // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
| // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
| // USE OR OTHER DEALINGS IN THE SOFTWARE. |
| |
| var Buffer = require('buffer').Buffer; |
| |
| var isBufferEncoding = Buffer.isEncoding |
| || function(encoding) { |
| switch (encoding && encoding.toLowerCase()) { |
| case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true; |
| default: return false; |
| } |
| } |
| |
| |
| function assertEncoding(encoding) { |
| if (encoding && !isBufferEncoding(encoding)) { |
| throw new Error('Unknown encoding: ' + encoding); |
| } |
| } |
| |
| var StringDecoder = exports.StringDecoder = function(encoding) { |
| this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); |
| assertEncoding(encoding); |
| switch (this.encoding) { |
| case 'utf8': |
| // CESU-8 represents each of Surrogate Pair by 3-bytes |
| this.surrogateSize = 3; |
| break; |
| case 'ucs2': |
| case 'utf16le': |
| // UTF-16 represents each of Surrogate Pair by 2-bytes |
| this.surrogateSize = 2; |
| this.detectIncompleteChar = utf16DetectIncompleteChar; |
| break; |
| case 'base64': |
| // Base-64 stores 3 bytes in 4 chars, and pads the remainder. |
| this.surrogateSize = 3; |
| this.detectIncompleteChar = base64DetectIncompleteChar; |
| break; |
| default: |
| this.write = passThroughWrite; |
| return; |
| } |
| |
| this.charBuffer = new Buffer(6); |
| this.charReceived = 0; |
| this.charLength = 0; |
| }; |
| |
| |
| StringDecoder.prototype.write = function(buffer) { |
| var charStr = ''; |
| var offset = 0; |
| |
| // if our last write ended with an incomplete multibyte character |
| while (this.charLength) { |
| // determine how many remaining bytes this buffer has to offer for this char |
| var i = (buffer.length >= this.charLength - this.charReceived) ? |
| this.charLength - this.charReceived : |
| buffer.length; |
| |
| // add the new bytes to the char buffer |
| buffer.copy(this.charBuffer, this.charReceived, offset, i); |
| this.charReceived += (i - offset); |
| offset = i; |
| |
| if (this.charReceived < this.charLength) { |
| // still not enough chars in this buffer? wait for more ... |
| return ''; |
| } |
| |
| // get the character that was split |
| charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); |
| |
| // lead surrogate (D800-DBFF) is also the incomplete character |
| var charCode = charStr.charCodeAt(charStr.length - 1); |
| if (charCode >= 0xD800 && charCode <= 0xDBFF) { |
| this.charLength += this.surrogateSize; |
| charStr = ''; |
| continue; |
| } |
| this.charReceived = this.charLength = 0; |
| |
| // if there are no more bytes in this buffer, just emit our char |
| if (i == buffer.length) return charStr; |
| |
| // otherwise cut off the characters end from the beginning of this buffer |
| buffer = buffer.slice(i, buffer.length); |
| break; |
| } |
| |
| var lenIncomplete = this.detectIncompleteChar(buffer); |
| |
| var end = buffer.length; |
| if (this.charLength) { |
| // buffer the incomplete character bytes we got |
| buffer.copy(this.charBuffer, 0, buffer.length - lenIncomplete, end); |
| this.charReceived = lenIncomplete; |
| end -= lenIncomplete; |
| } |
| |
| charStr += buffer.toString(this.encoding, 0, end); |
| |
| var end = charStr.length - 1; |
| var charCode = charStr.charCodeAt(end); |
| // lead surrogate (D800-DBFF) is also the incomplete character |
| if (charCode >= 0xD800 && charCode <= 0xDBFF) { |
| var size = this.surrogateSize; |
| this.charLength += size; |
| this.charReceived += size; |
| this.charBuffer.copy(this.charBuffer, size, 0, size); |
| this.charBuffer.write(charStr.charAt(charStr.length - 1), this.encoding); |
| return charStr.substring(0, end); |
| } |
| |
| // or just emit the charStr |
| return charStr; |
| }; |
| |
| StringDecoder.prototype.detectIncompleteChar = function(buffer) { |
| // determine how many bytes we have to check at the end of this buffer |
| var i = (buffer.length >= 3) ? 3 : buffer.length; |
| |
| // Figure out if one of the last i bytes of our buffer announces an |
| // incomplete char. |
| for (; i > 0; i--) { |
| var c = buffer[buffer.length - i]; |
| |
| // See http://en.wikipedia.org/wiki/UTF-8#Description |
| |
| // 110XXXXX |
| if (i == 1 && c >> 5 == 0x06) { |
| this.charLength = 2; |
| break; |
| } |
| |
| // 1110XXXX |
| if (i <= 2 && c >> 4 == 0x0E) { |
| this.charLength = 3; |
| break; |
| } |
| |
| // 11110XXX |
| if (i <= 3 && c >> 3 == 0x1E) { |
| this.charLength = 4; |
| break; |
| } |
| } |
| |
| return i; |
| }; |
| |
| StringDecoder.prototype.end = function(buffer) { |
| var res = ''; |
| if (buffer && buffer.length) |
| res = this.write(buffer); |
| |
| if (this.charReceived) { |
| var cr = this.charReceived; |
| var buf = this.charBuffer; |
| var enc = this.encoding; |
| res += buf.slice(0, cr).toString(enc); |
| } |
| |
| return res; |
| }; |
| |
| function passThroughWrite(buffer) { |
| return buffer.toString(this.encoding); |
| } |
| |
| function utf16DetectIncompleteChar(buffer) { |
| var incomplete = this.charReceived = buffer.length % 2; |
| this.charLength = incomplete ? 2 : 0; |
| return incomplete; |
| } |
| |
| function base64DetectIncompleteChar(buffer) { |
| var incomplete = this.charReceived = buffer.length % 3; |
| this.charLength = incomplete ? 3 : 0; |
| return incomplete; |
| } |
| |
| },{"buffer":1}],16:[function(require,module,exports){ |
| module.exports = require("./lib/_stream_passthrough.js") |
| |
| },{"./lib/_stream_passthrough.js":9}],17:[function(require,module,exports){ |
| exports = module.exports = require('./lib/_stream_readable.js'); |
| exports.Readable = exports; |
| exports.Writable = require('./lib/_stream_writable.js'); |
| exports.Duplex = require('./lib/_stream_duplex.js'); |
| exports.Transform = require('./lib/_stream_transform.js'); |
| exports.PassThrough = require('./lib/_stream_passthrough.js'); |
| |
| },{"./lib/_stream_duplex.js":8,"./lib/_stream_passthrough.js":9,"./lib/_stream_readable.js":10,"./lib/_stream_transform.js":11,"./lib/_stream_writable.js":12}],18:[function(require,module,exports){ |
| module.exports = require("./lib/_stream_transform.js") |
| |
| },{"./lib/_stream_transform.js":11}],19:[function(require,module,exports){ |
| module.exports = require("./lib/_stream_writable.js") |
| |
| },{"./lib/_stream_writable.js":12}],20:[function(require,module,exports){ |
| // Copyright Joyent, Inc. and other Node contributors. |
| // |
| // Permission is hereby granted, free of charge, to any person obtaining a |
| // copy of this software and associated documentation files (the |
| // "Software"), to deal in the Software without restriction, including |
| // without limitation the rights to use, copy, modify, merge, publish, |
| // distribute, sublicense, and/or sell copies of the Software, and to permit |
| // persons to whom the Software is furnished to do so, subject to the |
| // following conditions: |
| // |
| // The above copyright notice and this permission notice shall be included |
| // in all copies or substantial portions of the Software. |
| // |
| // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
| // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
| // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
| // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
| // USE OR OTHER DEALINGS IN THE SOFTWARE. |
| |
| module.exports = Stream; |
| |
| var EE = require('events').EventEmitter; |
| var inherits = require('inherits'); |
| |
| inherits(Stream, EE); |
| Stream.Readable = require('readable-stream/readable.js'); |
| Stream.Writable = require('readable-stream/writable.js'); |
| Stream.Duplex = require('readable-stream/duplex.js'); |
| Stream.Transform = require('readable-stream/transform.js'); |
| Stream.PassThrough = require('readable-stream/passthrough.js'); |
| |
| // Backwards-compat with node 0.4.x |
| Stream.Stream = Stream; |
| |
| |
| |
| // old-style streams. Note that the pipe method (the only relevant |
| // part of this class) is overridden in the Readable class. |
| |
| function Stream() { |
| EE.call(this); |
| } |
| |
| Stream.prototype.pipe = function(dest, options) { |
| var source = this; |
| |
| function ondata(chunk) { |
| if (dest.writable) { |
| if (false === dest.write(chunk) && source.pause) { |
| source.pause(); |
| } |
| } |
| } |
| |
| source.on('data', ondata); |
| |
| function ondrain() { |
| if (source.readable && source.resume) { |
| source.resume(); |
| } |
| } |
| |
| dest.on('drain', ondrain); |
| |
| // If the 'end' option is not supplied, dest.end() will be called when |
| // source gets the 'end' or 'close' events. Only dest.end() once. |
| if (!dest._isStdio && (!options || options.end !== false)) { |
| source.on('end', onend); |
| source.on('close', onclose); |
| } |
| |
| var didOnEnd = false; |
| function onend() { |
| if (didOnEnd) return; |
| didOnEnd = true; |
| |
| dest.end(); |
| } |
| |
| |
| function onclose() { |
| if (didOnEnd) return; |
| didOnEnd = true; |
| |
| if (typeof dest.destroy === 'function') dest.destroy(); |
| } |
| |
| // don't leave dangling pipes when there are errors. |
| function onerror(er) { |
| cleanup(); |
| if (EE.listenerCount(this, 'error') === 0) { |
| throw er; // Unhandled stream error in pipe. |
| } |
| } |
| |
| source.on('error', onerror); |
| dest.on('error', onerror); |
| |
| // remove all the event listeners that were added. |
| function cleanup() { |
| source.removeListener('data', ondata); |
| dest.removeListener('drain', ondrain); |
| |
| source.removeListener('end', onend); |
| source.removeListener('close', onclose); |
| |
| source.removeListener('error', onerror); |
| dest.removeListener('error', onerror); |
| |
| source.removeListener('end', cleanup); |
| source.removeListener('close', cleanup); |
| |
| dest.removeListener('close', cleanup); |
| } |
| |
| source.on('end', cleanup); |
| source.on('close', cleanup); |
| |
| dest.on('close', cleanup); |
| |
| dest.emit('pipe', source); |
| |
| // Allow for unix-like usage: A.pipe(B).pipe(C) |
| return dest; |
| }; |
| |
| },{"events":4,"inherits":5,"readable-stream/duplex.js":7,"readable-stream/passthrough.js":16,"readable-stream/readable.js":17,"readable-stream/transform.js":18,"readable-stream/writable.js":19}],21:[function(require,module,exports){ |
| // Copyright Joyent, Inc. and other Node contributors. |
| // |
| // Permission is hereby granted, free of charge, to any person obtaining a |
| // copy of this software and associated documentation files (the |
| // "Software"), to deal in the Software without restriction, including |
| // without limitation the rights to use, copy, modify, merge, publish, |
| // distribute, sublicense, and/or sell copies of the Software, and to permit |
| // persons to whom the Software is furnished to do so, subject to the |
| // following conditions: |
| // |
| // The above copyright notice and this permission notice shall be included |
| // in all copies or substantial portions of the Software. |
| // |
| // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN |
| // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, |
| // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
| // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
| // USE OR OTHER DEALINGS IN THE SOFTWARE. |
| |
| var Buffer = require('buffer').Buffer; |
| |
| function assertEncoding(encoding) { |
| if (encoding && !Buffer.isEncoding(encoding)) { |
| throw new Error('Unknown encoding: ' + encoding); |
| } |
| } |
| |
| var StringDecoder = exports.StringDecoder = function(encoding) { |
| this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); |
| assertEncoding(encoding); |
| switch (this.encoding) { |
| case 'utf8': |
| // CESU-8 represents each of Surrogate Pair by 3-bytes |
| this.surrogateSize = 3; |
| break; |
| case 'ucs2': |
| case 'utf16le': |
| // UTF-16 represents each of Surrogate Pair by 2-bytes |
| this.surrogateSize = 2; |
| this.detectIncompleteChar = utf16DetectIncompleteChar; |
| break; |
| case 'base64': |
| // Base-64 stores 3 bytes in 4 chars, and pads the remainder. |
| this.surrogateSize = 3; |
| this.detectIncompleteChar = base64DetectIncompleteChar; |
| break; |
| default: |
| this.write = passThroughWrite; |
| return; |
| } |
| |
| this.charBuffer = new Buffer(6); |
| this.charReceived = 0; |
| this.charLength = 0; |
| }; |
| |
| |
| StringDecoder.prototype.write = function(buffer) { |
| var charStr = ''; |
| var offset = 0; |
| |
| // if our last write ended with an incomplete multibyte character |
| while (this.charLength) { |
| // determine how many remaining bytes this buffer has to offer for this char |
| var i = (buffer.length >= this.charLength - this.charReceived) ? |
| this.charLength - this.charReceived : |
| buffer.length; |
| |
| // add the new bytes to the char buffer |
| buffer.copy(this.charBuffer, this.charReceived, offset, i); |
| this.charReceived += (i - offset); |
| offset = i; |
| |
| if (this.charReceived < this.charLength) { |
| // still not enough chars in this buffer? wait for more ... |
| return ''; |
| } |
| |
| // get the character that was split |
| charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); |
| |
| // lead surrogate (D800-DBFF) is also the incomplete character |
| var charCode = charStr.charCodeAt(charStr.length - 1); |
| if (charCode >= 0xD800 && charCode <= 0xDBFF) { |
| this.charLength += this.surrogateSize; |
| charStr = ''; |
| continue; |
| } |
| this.charReceived = this.charLength = 0; |
| |
| // if there are no more bytes in this buffer, just emit our char |
| if (i == buffer.length) return charStr; |
| |
| // otherwise cut off the characters end from the beginning of this buffer |
| buffer = buffer.slice(i, buffer.length); |
| break; |
| } |
| |
| var lenIncomplete = this.detectIncompleteChar(buffer); |
| |
| var end = buffer.length; |
| if (this.charLength) { |
| // buffer the incomplete character bytes we got |
| buffer.copy(this.charBuffer, 0, buffer.length - lenIncomplete, end); |
| this.charReceived = lenIncomplete; |
| end -= lenIncomplete; |
| } |
| |
| charStr += buffer.toString(this.encoding, 0, end); |
| |
| var end = charStr.length - 1; |
| var charCode = charStr.charCodeAt(end); |
| // lead surrogate (D800-DBFF) is also the incomplete character |
| if (charCode >= 0xD800 && charCode <= 0xDBFF) { |
| var size = this.surrogateSize; |
| this.charLength += size; |
| this.charReceived += size; |
| this.charBuffer.copy(this.charBuffer, size, 0, size); |
| this.charBuffer.write(charStr.charAt(charStr.length - 1), this.encoding); |
| return charStr.substring(0, end); |
| } |
| |
| // or just emit the charStr |
| return charStr; |
| }; |
| |
| StringDecoder.prototype.detectIncompleteChar = function(buffer) { |
| // determine how many bytes we have to check at the end of this buffer |
| var i = (buffer.length >= 3) ? 3 : buffer.length; |
| |
| // Figure out if one of the last i bytes of our buffer announces an |
| // incomplete char. |
| for (; i > 0; i--) { |
| var c = buffer[buffer.length - i]; |
| |
| // See http://en.wikipedia.org/wiki/UTF-8#Description |
| |
| // 110XXXXX |
| if (i == 1 && c >> 5 == 0x06) { |
| this.charLength = 2; |
| break; |
| } |
| |
| // 1110XXXX |
| if (i <= 2 && c >> 4 == 0x0E) { |
| this.charLength = 3; |
| break; |
| } |
| |
| // 11110XXX |
| if (i <= 3 && c >> 3 == 0x1E) { |
| this.charLength = 4; |
| break; |
| } |
| } |
| |
| return i; |
| }; |
| |
| StringDecoder.prototype.end = function(buffer) { |
| var res = ''; |
| if (buffer && buffer.length) |
| res = this.write(buffer); |
| |
| if (this.charReceived) { |
| var cr = this.charReceived; |
| var buf = this.charBuffer; |
| var enc = this.encoding; |
| res += buf.slice(0, cr).toString(enc); |
| } |
| |
| return res; |
| }; |
| |
| function passThroughWrite(buffer) { |
| return buffer.toString(this.encoding); |
| } |
| |
| function utf16DetectIncompleteChar(buffer) { |
| var incomplete = this.charReceived = buffer.length % 2; |
| this.charLength = incomplete ? 2 : 0; |
| return incomplete; |
| } |
| |
| function base64DetectIncompleteChar(buffer) { |
| var incomplete = this.charReceived = buffer.length % 3; |
| this.charLength = incomplete ? 3 : 0; |
| return incomplete; |
| } |
| |
| },{"buffer":1}],"event-stream":[function(require,module,exports){ |
| module.exports=require('ut3LTG'); |
| },{}],"ut3LTG":[function(require,module,exports){ |
| (function (process,global){ |
| //filter will reemit the data if cb(err,pass) pass is truthy |
| |
| // reduce is more tricky |
| // maybe we want to group the reductions or emit progress updates occasionally |
| // the most basic reduce just emits one 'data' event after it has recieved 'end' |
| |
| var Stream = require('stream').Stream |
| , es = exports |
| , through = require('through') |
| , from = require('from') |
| , duplex = require('duplexer') |
| , map = require('map-stream') |
| , pause = require('pause-stream') |
| , split = require('split') |
| , pipeline = require('stream-combiner') |
| , immediately = global.setImmediate || process.nextTick; |
| |
| es.Stream = Stream //re-export Stream from core |
| es.through = through |
| es.from = from |
| es.duplex = duplex |
| es.map = map |
| es.pause = pause |
| es.split = split |
| es.pipeline = es.connect = es.pipe = pipeline |
| // merge / concat |
| // |
| // combine multiple streams into a single stream. |
| // will emit end only once |
| |
| es.concat = //actually this should be called concat |
| es.merge = function (/*streams...*/) { |
| var toMerge = [].slice.call(arguments) |
| var stream = new Stream() |
| stream.setMaxListeners(0) // allow adding more than 11 streams |
| var endCount = 0 |
| stream.writable = stream.readable = true |
| |
| toMerge.forEach(function (e) { |
| e.pipe(stream, {end: false}) |
| var ended = false |
| e.on('end', function () { |
| if(ended) return |
| ended = true |
| endCount ++ |
| if(endCount == toMerge.length) |
| stream.emit('end') |
| }) |
| }) |
| stream.write = function (data) { |
| this.emit('data', data) |
| } |
| stream.destroy = function () { |
| toMerge.forEach(function (e) { |
| if(e.destroy) e.destroy() |
| }) |
| } |
| return stream |
| } |
| |
| |
| // writable stream, collects all events into an array |
| // and calls back when 'end' occurs |
| // mainly I'm using this to test the other functions |
| |
| es.writeArray = function (done) { |
| if ('function' !== typeof done) |
| throw new Error('function writeArray (done): done must be function') |
| |
| var a = new Stream () |
| , array = [], isDone = false |
| a.write = function (l) { |
| array.push(l) |
| } |
| a.end = function () { |
| isDone = true |
| done(null, array) |
| } |
| a.writable = true |
| a.readable = false |
| a.destroy = function () { |
| a.writable = a.readable = false |
| if(isDone) return |
| done(new Error('destroyed before end'), array) |
| } |
| return a |
| } |
| |
| //return a Stream that reads the properties of an object |
| //respecting pause() and resume() |
| |
| es.readArray = function (array) { |
| var stream = new Stream() |
| , i = 0 |
| , paused = false |
| , ended = false |
| |
| stream.readable = true |
| stream.writable = false |
| |
| if(!Array.isArray(array)) |
| throw new Error('event-stream.read expects an array') |
| |
| stream.resume = function () { |
| if(ended) return |
| paused = false |
| var l = array.length |
| while(i < l && !paused && !ended) { |
| stream.emit('data', array[i++]) |
| } |
| if(i == l && !ended) |
| ended = true, stream.readable = false, stream.emit('end') |
| } |
| process.nextTick(stream.resume) |
| stream.pause = function () { |
| paused = true |
| } |
| stream.destroy = function () { |
| ended = true |
| stream.emit('close') |
| } |
| return stream |
| } |
| |
| // |
| // readable (asyncFunction) |
| // return a stream that calls an async function while the stream is not paused. |
| // |
| // the function must take: (count, callback) {... |
| // |
| |
| es.readable = |
| function (func, continueOnError) { |
| var stream = new Stream() |
| , i = 0 |
| , paused = false |
| , ended = false |
| , reading = false |
| |
| stream.readable = true |
| stream.writable = false |
| |
| if('function' !== typeof func) |
| throw new Error('event-stream.readable expects async function') |
| |
| stream.on('end', function () { ended = true }) |
| |
| function get (err, data) { |
| |
| if(err) { |
| stream.emit('error', err) |
| if(!continueOnError) stream.emit('end') |
| } else if (arguments.length > 1) |
| stream.emit('data', data) |
| |
| immediately(function () { |
| if(ended || paused || reading) return |
| try { |
| reading = true |
| func.call(stream, i++, function () { |
| reading = false |
| get.apply(null, arguments) |
| }) |
| } catch (err) { |
| stream.emit('error', err) |
| } |
| }) |
| } |
| stream.resume = function () { |
| paused = false |
| get() |
| } |
| process.nextTick(get) |
| stream.pause = function () { |
| paused = true |
| } |
| stream.destroy = function () { |
| stream.emit('end') |
| stream.emit('close') |
| ended = true |
| } |
| return stream |
| } |
| |
| |
| // |
| // map sync |
| // |
| |
| es.mapSync = function (sync) { |
| return es.through(function write(data) { |
| var mappedData = sync(data) |
| if (typeof mappedData !== 'undefined') |
| this.emit('data', mappedData) |
| }) |
| } |
| |
| // |
| // log just print out what is coming through the stream, for debugging |
| // |
| |
| es.log = function (name) { |
| return es.through(function (data) { |
| var args = [].slice.call(arguments) |
| if(name) console.error(name, data) |
| else console.error(data) |
| this.emit('data', data) |
| }) |
| } |
| |
| |
| // |
| // child -- pipe through a child process |
| // |
| |
| es.child = function (child) { |
| |
| return es.duplex(child.stdin, child.stdout) |
| |
| } |
| |
| // |
| // parse |
| // |
| // must be used after es.split() to ensure that each chunk represents a line |
| // source.pipe(es.split()).pipe(es.parse()) |
| |
| es.parse = function () { |
| return es.through(function (data) { |
| var obj |
| try { |
| if(data) //ignore empty lines |
| obj = JSON.parse(data.toString()) |
| } catch (err) { |
| return console.error(err, 'attemping to parse:', data) |
| } |
| //ignore lines that where only whitespace. |
| if(obj !== undefined) |
| this.emit('data', obj) |
| }) |
| } |
| // |
| // stringify |
| // |
| |
| es.stringify = function () { |
| var Buffer = require('buffer').Buffer |
| return es.mapSync(function (e){ |
| return JSON.stringify(Buffer.isBuffer(e) ? e.toString() : e) + '\n' |
| }) |
| } |
| |
| // |
| // replace a string within a stream. |
| // |
| // warn: just concatenates the string and then does str.split().join(). |
| // probably not optimal. |
| // for smallish responses, who cares? |
| // I need this for shadow-npm so it's only relatively small json files. |
| |
| es.replace = function (from, to) { |
| return es.pipeline(es.split(from), es.join(to)) |
| } |
| |
| // |
| // join chunks with a joiner. just like Array#join |
| // also accepts a callback that is passed the chunks appended together |
| // this is still supported for legacy reasons. |
| // |
| |
| es.join = function (str) { |
| |
| //legacy api |
| if('function' === typeof str) |
| return es.wait(str) |
| |
| var first = true |
| return es.through(function (data) { |
| if(!first) |
| this.emit('data', str) |
| first = false |
| this.emit('data', data) |
| return true |
| }) |
| } |
| |
| |
| // |
| // wait. callback when 'end' is emitted, with all chunks appended as string. |
| // |
| |
| es.wait = function (callback) { |
| var body = '' |
| return es.through(function (data) { body += data }, |
| function () { |
| this.emit('data', body) |
| this.emit('end') |
| if(callback) callback(null, body) |
| }) |
| } |
| |
| es.pipeable = function () { |
| throw new Error('[EVENT-STREAM] es.pipeable is deprecated') |
| } |
| |
| }).call(this,require("4dON6Z"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) |
| },{"4dON6Z":6,"buffer":1,"duplexer":24,"from":25,"map-stream":26,"pause-stream":27,"split":28,"stream":20,"stream-combiner":29,"through":30}],24:[function(require,module,exports){ |
| var Stream = require("stream") |
| var writeMethods = ["write", "end", "destroy"] |
| var readMethods = ["resume", "pause"] |
| var readEvents = ["data", "close"] |
| var slice = Array.prototype.slice |
| |
| module.exports = duplex |
| |
| function forEach (arr, fn) { |
| if (arr.forEach) { |
| return arr.forEach(fn) |
| } |
| |
| for (var i = 0; i < arr.length; i++) { |
| fn(arr[i], i) |
| } |
| } |
| |
| function duplex(writer, reader) { |
| var stream = new Stream() |
| var ended = false |
| |
| forEach(writeMethods, proxyWriter) |
| |
| forEach(readMethods, proxyReader) |
| |
| forEach(readEvents, proxyStream) |
| |
| reader.on("end", handleEnd) |
| |
| writer.on("drain", function() { |
| stream.emit("drain") |
| }) |
| |
| writer.on("error", reemit) |
| reader.on("error", reemit) |
| |
| stream.writable = writer.writable |
| stream.readable = reader.readable |
| |
| return stream |
| |
| function proxyWriter(methodName) { |
| stream[methodName] = method |
| |
| function method() { |
| return writer[methodName].apply(writer, arguments) |
| } |
| } |
| |
| function proxyReader(methodName) { |
| stream[methodName] = method |
| |
| function method() { |
| stream.emit(methodName) |
| var func = reader[methodName] |
| if (func) { |
| return func.apply(reader, arguments) |
| } |
| reader.emit(methodName) |
| } |
| } |
| |
| function proxyStream(methodName) { |
| reader.on(methodName, reemit) |
| |
| function reemit() { |
| var args = slice.call(arguments) |
| args.unshift(methodName) |
| stream.emit.apply(stream, args) |
| } |
| } |
| |
| function handleEnd() { |
| if (ended) { |
| return |
| } |
| ended = true |
| var args = slice.call(arguments) |
| args.unshift("end") |
| stream.emit.apply(stream, args) |
| } |
| |
| function reemit(err) { |
| stream.emit("error", err) |
| } |
| } |
| |
| },{"stream":20}],25:[function(require,module,exports){ |
| (function (process){ |
| |
| 'use strict'; |
| |
| var Stream = require('stream') |
| |
| // from |
| // |
| // a stream that reads from an source. |
| // source may be an array, or a function. |
| // from handles pause behaviour for you. |
| |
| module.exports = |
| function from (source) { |
| if(Array.isArray(source)) { |
| source = source.slice() |
| return from (function (i) { |
| if(source.length) |
| this.emit('data', source.shift()) |
| else |
| this.emit('end') |
| return true |
| }) |
| } |
| var s = new Stream(), i = 0 |
| s.ended = false |
| s.started = false |
| s.readable = true |
| s.writable = false |
| s.paused = false |
| s.ended = false |
| s.pause = function () { |
| s.started = true |
| s.paused = true |
| } |
| function next () { |
| s.started = true |
| if(s.ended) return |
| while(!s.ended && !s.paused && source.call(s, i++, function () { |
| if(!s.ended && !s.paused) |
| next() |
| })) |
| ; |
| } |
| s.resume = function () { |
| s.started = true |
| s.paused = false |
| next() |
| } |
| s.on('end', function () { |
| s.ended = true |
| s.readable = false |
| process.nextTick(s.destroy) |
| }) |
| s.destroy = function () { |
| s.ended = true |
| s.emit('close') |
| } |
| /* |
| by default, the stream will start emitting at nextTick |
| if you want, you can pause it, after pipeing. |
| you can also resume before next tick, and that will also |
| work. |
| */ |
| process.nextTick(function () { |
| if(!s.started) s.resume() |
| }) |
| return s |
| } |
| |
| }).call(this,require("4dON6Z")) |
| },{"4dON6Z":6,"stream":20}],26:[function(require,module,exports){ |
| (function (process){ |
| //filter will reemit the data if cb(err,pass) pass is truthy |
| |
| // reduce is more tricky |
| // maybe we want to group the reductions or emit progress updates occasionally |
| // the most basic reduce just emits one 'data' event after it has recieved 'end' |
| |
| |
| var Stream = require('stream').Stream |
| |
| |
| //create an event stream and apply function to each .write |
| //emitting each response as data |
| //unless it's an empty callback |
| |
| module.exports = function (mapper, opts) { |
| |
| var stream = new Stream() |
| , self = this |
| , inputs = 0 |
| , outputs = 0 |
| , ended = false |
| , paused = false |
| , destroyed = false |
| , lastWritten = 0 |
| , inNext = false |
| |
| this.opts = opts || {}; |
| var errorEventName = this.opts.failures ? 'failure' : 'error'; |
| |
| // Items that are not ready to be written yet (because they would come out of |
| // order) get stuck in a queue for later. |
| var writeQueue = {} |
| |
| stream.writable = true |
| stream.readable = true |
| |
| function queueData (data, number) { |
| var nextToWrite = lastWritten + 1 |
| |
| if (number === nextToWrite) { |
| // If it's next, and its not undefined write it |
| if (data !== undefined) { |
| stream.emit.apply(stream, ['data', data]) |
| } |
| lastWritten ++ |
| nextToWrite ++ |
| } else { |
| // Otherwise queue it for later. |
| writeQueue[number] = data |
| } |
| |
| // If the next value is in the queue, write it |
| if (writeQueue.hasOwnProperty(nextToWrite)) { |
| var dataToWrite = writeQueue[nextToWrite] |
| delete writeQueue[nextToWrite] |
| return queueData(dataToWrite, nextToWrite) |
| } |
| |
| outputs ++ |
| if(inputs === outputs) { |
| if(paused) paused = false, stream.emit('drain') //written all the incoming events |
| if(ended) end() |
| } |
| } |
| |
| function next (err, data, number) { |
| if(destroyed) return |
| inNext = true |
| |
| if (!err || self.opts.failures) { |
| queueData(data, number) |
| } |
| |
| if (err) { |
| stream.emit.apply(stream, [ errorEventName, err ]); |
| } |
| |
| inNext = false; |
| } |
| |
| // Wrap the mapper function by calling its callback with the order number of |
| // the item in the stream. |
| function wrappedMapper (input, number, callback) { |
| return mapper.call(null, input, function(err, data){ |
| callback(err, data, number) |
| }) |
| } |
| |
| stream.write = function (data) { |
| if(ended) throw new Error('map stream is not writable') |
| inNext = false |
| inputs ++ |
| |
| try { |
| //catch sync errors and handle them like async errors |
| var written = wrappedMapper(data, inputs, next) |
| paused = (written === false) |
| return !paused |
| } catch (err) { |
| //if the callback has been called syncronously, and the error |
| //has occured in an listener, throw it again. |
| if(inNext) |
| throw err |
| next(err) |
| return !paused |
| } |
| } |
| |
| function end (data) { |
| //if end was called with args, write it, |
| ended = true //write will emit 'end' if ended is true |
| stream.writable = false |
| if(data !== undefined) { |
| return queueData(data, inputs) |
| } else if (inputs == outputs) { //wait for processing |
| stream.readable = false, stream.emit('end'), stream.destroy() |
| } |
| } |
| |
| stream.end = function (data) { |
| if(ended) return |
| end() |
| } |
| |
| stream.destroy = function () { |
| ended = destroyed = true |
| stream.writable = stream.readable = paused = false |
| process.nextTick(function () { |
| stream.emit('close') |
| }) |
| } |
| stream.pause = function () { |
| paused = true |
| } |
| |
| stream.resume = function () { |
| paused = false |
| } |
| |
| return stream |
| } |
| |
| |
| |
| |
| |
| }).call(this,require("4dON6Z")) |
| },{"4dON6Z":6,"stream":20}],27:[function(require,module,exports){ |
| //through@2 handles this by default! |
| module.exports = require('through') |
| |
| |
| },{"through":30}],28:[function(require,module,exports){ |
| //filter will reemit the data if cb(err,pass) pass is truthy |
| |
| // reduce is more tricky |
| // maybe we want to group the reductions or emit progress updates occasionally |
| // the most basic reduce just emits one 'data' event after it has recieved 'end' |
| |
| |
| var through = require('through') |
| var Decoder = require('string_decoder').StringDecoder |
| |
| module.exports = split |
| |
| //TODO pass in a function to map across the lines. |
| |
| function split (matcher, mapper) { |
| var decoder = new Decoder() |
| var soFar = '' |
| if('function' === typeof matcher) |
| mapper = matcher, matcher = null |
| if (!matcher) |
| matcher = /\r?\n/ |
| |
| function emit(stream, piece) { |
| if(mapper) { |
| try { |
| piece = mapper(piece) |
| } |
| catch (err) { |
| return stream.emit('error', err) |
| } |
| if('undefined' !== typeof piece) |
| stream.queue(piece) |
| } |
| else |
| stream.queue(piece) |
| } |
| |
| function next (stream, buffer) { |
| var pieces = (soFar + buffer).split(matcher) |
| soFar = pieces.pop() |
| |
| for (var i = 0; i < pieces.length; i++) { |
| var piece = pieces[i] |
| emit(stream, piece) |
| } |
| } |
| |
| return through(function (b) { |
| next(this, decoder.write(b)) |
| }, |
| function () { |
| if(decoder.end) |
| next(this, decoder.end()) |
| if(soFar != null) |
| emit(this, soFar) |
| this.queue(null) |
| }) |
| } |
| |
| |
| },{"string_decoder":21,"through":30}],29:[function(require,module,exports){ |
| var duplexer = require('duplexer') |
| |
| module.exports = function () { |
| |
| var streams = [].slice.call(arguments) |
| , first = streams[0] |
| , last = streams[streams.length - 1] |
| , thepipe = duplexer(first, last) |
| |
| if(streams.length == 1) |
| return streams[0] |
| else if (!streams.length) |
| throw new Error('connect called with empty args') |
| |
| //pipe all the streams together |
| |
| function recurse (streams) { |
| if(streams.length < 2) |
| return |
| streams[0].pipe(streams[1]) |
| recurse(streams.slice(1)) |
| } |
| |
| recurse(streams) |
| |
| function onerror () { |
| var args = [].slice.call(arguments) |
| args.unshift('error') |
| thepipe.emit.apply(thepipe, args) |
| } |
| |
| //es.duplex already reemits the error from the first and last stream. |
| //add a listener for the inner streams in the pipeline. |
| for(var i = 1; i < streams.length - 1; i ++) |
| streams[i].on('error', onerror) |
| |
| return thepipe |
| } |
| |
| |
| },{"duplexer":24}],30:[function(require,module,exports){ |
| (function (process){ |
| var Stream = require('stream') |
| |
| // through |
| // |
| // a stream that does nothing but re-emit the input. |
| // useful for aggregating a series of changing but not ending streams into one stream) |
| |
| exports = module.exports = through |
| through.through = through |
| |
| //create a readable writable stream. |
| |
| function through (write, end, opts) { |
| write = write || function (data) { this.queue(data) } |
| end = end || function () { this.queue(null) } |
| |
| var ended = false, destroyed = false, buffer = [], _ended = false |
| var stream = new Stream() |
| stream.readable = stream.writable = true |
| stream.paused = false |
| |
| // stream.autoPause = !(opts && opts.autoPause === false) |
| stream.autoDestroy = !(opts && opts.autoDestroy === false) |
| |
| stream.write = function (data) { |
| write.call(this, data) |
| return !stream.paused |
| } |
| |
| function drain() { |
| while(buffer.length && !stream.paused) { |
| var data = buffer.shift() |
| if(null === data) |
| return stream.emit('end') |
| else |
| stream.emit('data', data) |
| } |
| } |
| |
| stream.queue = stream.push = function (data) { |
| // console.error(ended) |
| if(_ended) return stream |
| if(data == null) _ended = true |
| buffer.push(data) |
| drain() |
| return stream |
| } |
| |
| //this will be registered as the first 'end' listener |
| //must call destroy next tick, to make sure we're after any |
| //stream piped from here. |
| //this is only a problem if end is not emitted synchronously. |
| //a nicer way to do this is to make sure this is the last listener for 'end' |
| |
| stream.on('end', function () { |
| stream.readable = false |
| if(!stream.writable && stream.autoDestroy) |
| process.nextTick(function () { |
| stream.destroy() |
| }) |
| }) |
| |
| function _end () { |
| stream.writable = false |
| end.call(stream) |
| if(!stream.readable && stream.autoDestroy) |
| stream.destroy() |
| } |
| |
| stream.end = function (data) { |
| if(ended) return |
| ended = true |
| if(arguments.length) stream.write(data) |
| _end() // will emit or queue |
| return stream |
| } |
| |
| stream.destroy = function () { |
| if(destroyed) return |
| destroyed = true |
| ended = true |
| buffer.length = 0 |
| stream.writable = stream.readable = false |
| stream.emit('close') |
| return stream |
| } |
| |
| stream.pause = function () { |
| if(stream.paused) return |
| stream.paused = true |
| return stream |
| } |
| |
| stream.resume = function () { |
| if(stream.paused) { |
| stream.paused = false |
| stream.emit('resume') |
| } |
| drain() |
| //may have become paused again, |
| //as drain emits 'data'. |
| if(!stream.paused) |
| stream.emit('drain') |
| return stream |
| } |
| return stream |
| } |
| |
| |
| }).call(this,require("4dON6Z")) |
| },{"4dON6Z":6,"stream":20}]},{},[]) |