aka-worker/cool-dawn-3d3b/node_modules/rollup-plugin-node-polyfills/polyfills/browserify-fs.js

19038 lines
530 KiB
JavaScript

import util$2 from 'util';
import buffer from 'buffer';
import events from 'events';
import stream from 'stream';
import path from 'path';
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
function createCommonjsModule(fn, module) {
return module = { exports: {} }, fn(module, module.exports), module.exports;
}
function getCjsExportFromNamespace (n) {
return n && n['default'] || n;
}
var idbstore = createCommonjsModule(function (module) {
/*global window:false, self:false, define:false, module:false */
/**
* @license IDBWrapper - A cross-browser wrapper for IndexedDB
* Version 1.7.2
* Copyright (c) 2011 - 2017 Jens Arps
* http://jensarps.de/
*
* Licensed under the MIT license
*/
(function (name, definition, global) {
if (module.exports) {
module.exports = definition();
} else {
global[name] = definition();
}
})('IDBStore', function () {
var defaultErrorHandler = function (error) {
throw error;
};
var defaultSuccessHandler = function () {
};
var defaults = {
storeName: 'Store',
storePrefix: 'IDBWrapper-',
dbVersion: 1,
keyPath: 'id',
autoIncrement: true,
onStoreReady: function () {
},
onError: defaultErrorHandler,
indexes: [],
implementationPreference: [
'indexedDB',
'webkitIndexedDB',
'mozIndexedDB',
'shimIndexedDB'
]
};
/**
*
* The IDBStore constructor
*
* @constructor
* @name IDBStore
* @version 1.7.2
*
* @param {Object} [kwArgs] An options object used to configure the store and
* set callbacks
* @param {String} [kwArgs.storeName='Store'] The name of the store
* @param {String} [kwArgs.storePrefix='IDBWrapper-'] A prefix that is
* internally used to construct the name of the database, which will be
* kwArgs.storePrefix + kwArgs.storeName
* @param {Number} [kwArgs.dbVersion=1] The version of the store
* @param {String} [kwArgs.keyPath='id'] The key path to use. If you want to
* setup IDBWrapper to work with out-of-line keys, you need to set this to
* `null`
* @param {Boolean} [kwArgs.autoIncrement=true] If set to true, IDBStore will
* automatically make sure a unique keyPath value is present on each object
* that is stored.
* @param {Function} [kwArgs.onStoreReady] A callback to be called when the
* store is ready to be used.
* @param {Function} [kwArgs.onError=throw] A callback to be called when an
* error occurred during instantiation of the store.
* @param {Array} [kwArgs.indexes=[]] An array of indexData objects
* defining the indexes to use with the store. For every index to be used
* one indexData object needs to be passed in the array.
* An indexData object is defined as follows:
* @param {Object} [kwArgs.indexes.indexData] An object defining the index to
* use
* @param {String} kwArgs.indexes.indexData.name The name of the index
* @param {String} [kwArgs.indexes.indexData.keyPath] The key path of the index
* @param {Boolean} [kwArgs.indexes.indexData.unique] Whether the index is unique
* @param {Boolean} [kwArgs.indexes.indexData.multiEntry] Whether the index is multi entry
* @param {Array} [kwArgs.implementationPreference=['indexedDB','webkitIndexedDB','mozIndexedDB','shimIndexedDB']] An array of strings naming implementations to be used, in order or preference
* @param {Function} [onStoreReady] A callback to be called when the store
* is ready to be used.
* @example
// create a store for customers with an additional index over the
// `lastname` property.
var myCustomerStore = new IDBStore({
dbVersion: 1,
storeName: 'customer-index',
keyPath: 'customerid',
autoIncrement: true,
onStoreReady: populateTable,
indexes: [
{ name: 'lastname', keyPath: 'lastname', unique: false, multiEntry: false }
]
});
* @example
// create a generic store
var myCustomerStore = new IDBStore({
storeName: 'my-data-store',
onStoreReady: function(){
// start working with the store.
}
});
*/
var IDBStore = function (kwArgs, onStoreReady) {
if (typeof onStoreReady == 'undefined' && typeof kwArgs == 'function') {
onStoreReady = kwArgs;
}
if (Object.prototype.toString.call(kwArgs) != '[object Object]') {
kwArgs = {};
}
for (var key in defaults) {
this[key] = typeof kwArgs[key] != 'undefined' ? kwArgs[key] : defaults[key];
}
this.dbName = this.storePrefix + this.storeName;
this.dbVersion = parseInt(this.dbVersion, 10) || 1;
onStoreReady && (this.onStoreReady = onStoreReady);
var env = typeof window == 'object' ? window : self;
var availableImplementations = this.implementationPreference.filter(function (implName) {
return implName in env;
});
this.implementation = availableImplementations[0];
this.idb = env[this.implementation];
this.keyRange = env.IDBKeyRange || env.webkitIDBKeyRange || env.mozIDBKeyRange;
this.consts = {
'READ_ONLY': 'readonly',
'READ_WRITE': 'readwrite',
'VERSION_CHANGE': 'versionchange',
'NEXT': 'next',
'NEXT_NO_DUPLICATE': 'nextunique',
'PREV': 'prev',
'PREV_NO_DUPLICATE': 'prevunique'
};
this.openDB();
};
/** @lends IDBStore.prototype */
var proto = {
/**
* A pointer to the IDBStore ctor
*
* @private
* @type {Function}
* @constructs
*/
constructor: IDBStore,
/**
* The version of IDBStore
*
* @type {String}
*/
version: '1.7.2',
/**
* A reference to the IndexedDB object
*
* @type {IDBDatabase}
*/
db: null,
/**
* The full name of the IndexedDB used by IDBStore, composed of
* this.storePrefix + this.storeName
*
* @type {String}
*/
dbName: null,
/**
* The version of the IndexedDB used by IDBStore
*
* @type {Number}
*/
dbVersion: null,
/**
* A reference to the objectStore used by IDBStore
*
* @type {IDBObjectStore}
*/
store: null,
/**
* The store name
*
* @type {String}
*/
storeName: null,
/**
* The prefix to prepend to the store name
*
* @type {String}
*/
storePrefix: null,
/**
* The key path
*
* @type {String}
*/
keyPath: null,
/**
* Whether IDBStore uses autoIncrement
*
* @type {Boolean}
*/
autoIncrement: null,
/**
* The indexes used by IDBStore
*
* @type {Array}
*/
indexes: null,
/**
* The implemantations to try to use, in order of preference
*
* @type {Array}
*/
implementationPreference: null,
/**
* The actual implementation being used
*
* @type {String}
*/
implementation: '',
/**
* The callback to be called when the store is ready to be used
*
* @type {Function}
*/
onStoreReady: null,
/**
* The callback to be called if an error occurred during instantiation
* of the store
*
* @type {Function}
*/
onError: null,
/**
* The internal insertID counter
*
* @type {Number}
* @private
*/
_insertIdCount: 0,
/**
* Opens an IndexedDB; called by the constructor.
*
* Will check if versions match and compare provided index configuration
* with existing ones, and update indexes if necessary.
*
* Will call this.onStoreReady() if everything went well and the store
* is ready to use, and this.onError() is something went wrong.
*
* @private
*
*/
openDB: function () {
var openRequest = this.idb.open(this.dbName, this.dbVersion);
var preventSuccessCallback = false;
openRequest.onerror = function (errorEvent) {
if (hasVersionError(errorEvent)) {
this.onError(new Error('The version number provided is lower than the existing one.'));
} else {
var error;
if (errorEvent.target.error) {
error = errorEvent.target.error;
} else {
var errorMessage = 'IndexedDB unknown error occurred when opening DB ' + this.dbName + ' version ' + this.dbVersion;
if ('errorCode' in errorEvent.target) {
errorMessage += ' with error code ' + errorEvent.target.errorCode;
}
error = new Error(errorMessage);
}
this.onError(error);
}
}.bind(this);
openRequest.onsuccess = function (event) {
if (preventSuccessCallback) {
return;
}
if (this.db) {
this.onStoreReady();
return;
}
this.db = event.target.result;
if (typeof this.db.version == 'string') {
this.onError(new Error('The IndexedDB implementation in this browser is outdated. Please upgrade your browser.'));
return;
}
if (!this.db.objectStoreNames.contains(this.storeName)) {
// We should never ever get here.
// Lets notify the user anyway.
this.onError(new Error('Object store couldn\'t be created.'));
return;
}
var emptyTransaction = this.db.transaction([this.storeName], this.consts.READ_ONLY);
this.store = emptyTransaction.objectStore(this.storeName);
// check indexes
var existingIndexes = Array.prototype.slice.call(this.getIndexList());
this.indexes.forEach(function (indexData) {
var indexName = indexData.name;
if (!indexName) {
preventSuccessCallback = true;
this.onError(new Error('Cannot create index: No index name given.'));
return;
}
this.normalizeIndexData(indexData);
if (this.hasIndex(indexName)) {
// check if it complies
var actualIndex = this.store.index(indexName);
var complies = this.indexComplies(actualIndex, indexData);
if (!complies) {
preventSuccessCallback = true;
this.onError(new Error('Cannot modify index "' + indexName + '" for current version. Please bump version number to ' + ( this.dbVersion + 1 ) + '.'));
}
existingIndexes.splice(existingIndexes.indexOf(indexName), 1);
} else {
preventSuccessCallback = true;
this.onError(new Error('Cannot create new index "' + indexName + '" for current version. Please bump version number to ' + ( this.dbVersion + 1 ) + '.'));
}
}, this);
if (existingIndexes.length) {
preventSuccessCallback = true;
this.onError(new Error('Cannot delete index(es) "' + existingIndexes.toString() + '" for current version. Please bump version number to ' + ( this.dbVersion + 1 ) + '.'));
}
preventSuccessCallback || this.onStoreReady();
}.bind(this);
openRequest.onupgradeneeded = function (/* IDBVersionChangeEvent */ event) {
this.db = event.target.result;
if (this.db.objectStoreNames.contains(this.storeName)) {
this.store = event.target.transaction.objectStore(this.storeName);
} else {
var optionalParameters = {autoIncrement: this.autoIncrement};
if (this.keyPath !== null) {
optionalParameters.keyPath = this.keyPath;
}
this.store = this.db.createObjectStore(this.storeName, optionalParameters);
}
var existingIndexes = Array.prototype.slice.call(this.getIndexList());
this.indexes.forEach(function (indexData) {
var indexName = indexData.name;
if (!indexName) {
preventSuccessCallback = true;
this.onError(new Error('Cannot create index: No index name given.'));
}
this.normalizeIndexData(indexData);
if (this.hasIndex(indexName)) {
// check if it complies
var actualIndex = this.store.index(indexName);
var complies = this.indexComplies(actualIndex, indexData);
if (!complies) {
// index differs, need to delete and re-create
this.store.deleteIndex(indexName);
this.store.createIndex(indexName, indexData.keyPath, {
unique: indexData.unique,
multiEntry: indexData.multiEntry
});
}
existingIndexes.splice(existingIndexes.indexOf(indexName), 1);
} else {
this.store.createIndex(indexName, indexData.keyPath, {
unique: indexData.unique,
multiEntry: indexData.multiEntry
});
}
}, this);
if (existingIndexes.length) {
existingIndexes.forEach(function (_indexName) {
this.store.deleteIndex(_indexName);
}, this);
}
}.bind(this);
},
/**
* Deletes the database used for this store if the IDB implementations
* provides that functionality.
*
* @param {Function} [onSuccess] A callback that is called if deletion
* was successful.
* @param {Function} [onError] A callback that is called if deletion
* failed.
*/
deleteDatabase: function (onSuccess, onError) {
if (this.idb.deleteDatabase) {
this.db.close();
var deleteRequest = this.idb.deleteDatabase(this.dbName);
deleteRequest.onsuccess = onSuccess;
deleteRequest.onerror = onError;
} else {
onError(new Error('Browser does not support IndexedDB deleteDatabase!'));
}
},
/*********************
* data manipulation *
*********************/
/**
* Puts an object into the store. If an entry with the given id exists,
* it will be overwritten. This method has a different signature for inline
* keys and out-of-line keys; please see the examples below.
*
* @param {*} [key] The key to store. This is only needed if IDBWrapper
* is set to use out-of-line keys. For inline keys - the default scenario -
* this can be omitted.
* @param {Object} value The data object to store.
* @param {Function} [onSuccess] A callback that is called if insertion
* was successful.
* @param {Function} [onError] A callback that is called if insertion
* failed.
* @returns {IDBTransaction} The transaction used for this operation.
* @example
// Storing an object, using inline keys (the default scenario):
var myCustomer = {
customerid: 2346223,
lastname: 'Doe',
firstname: 'John'
};
myCustomerStore.put(myCustomer, mySuccessHandler, myErrorHandler);
// Note that passing success- and error-handlers is optional.
* @example
// Storing an object, using out-of-line keys:
var myCustomer = {
lastname: 'Doe',
firstname: 'John'
};
myCustomerStore.put(2346223, myCustomer, mySuccessHandler, myErrorHandler);
// Note that passing success- and error-handlers is optional.
*/
put: function (key, value, onSuccess, onError) {
if (this.keyPath !== null) {
onError = onSuccess;
onSuccess = value;
value = key;
}
onError || (onError = defaultErrorHandler);
onSuccess || (onSuccess = defaultSuccessHandler);
var hasSuccess = false,
result = null,
putRequest;
var putTransaction = this.db.transaction([this.storeName], this.consts.READ_WRITE);
putTransaction.oncomplete = function () {
var callback = hasSuccess ? onSuccess : onError;
callback(result);
};
putTransaction.onabort = onError;
putTransaction.onerror = onError;
if (this.keyPath !== null) { // in-line keys
this._addIdPropertyIfNeeded(value);
putRequest = putTransaction.objectStore(this.storeName).put(value);
} else { // out-of-line keys
putRequest = putTransaction.objectStore(this.storeName).put(value, key);
}
putRequest.onsuccess = function (event) {
hasSuccess = true;
result = event.target.result;
};
putRequest.onerror = onError;
return putTransaction;
},
/**
* Retrieves an object from the store. If no entry exists with the given id,
* the success handler will be called with null as first and only argument.
*
* @param {*} key The id of the object to fetch.
* @param {Function} [onSuccess] A callback that is called if fetching
* was successful. Will receive the object as only argument.
* @param {Function} [onError] A callback that will be called if an error
* occurred during the operation.
* @returns {IDBTransaction} The transaction used for this operation.
*/
get: function (key, onSuccess, onError) {
onError || (onError = defaultErrorHandler);
onSuccess || (onSuccess = defaultSuccessHandler);
var hasSuccess = false,
result = null;
var getTransaction = this.db.transaction([this.storeName], this.consts.READ_ONLY);
getTransaction.oncomplete = function () {
var callback = hasSuccess ? onSuccess : onError;
callback(result);
};
getTransaction.onabort = onError;
getTransaction.onerror = onError;
var getRequest = getTransaction.objectStore(this.storeName).get(key);
getRequest.onsuccess = function (event) {
hasSuccess = true;
result = event.target.result;
};
getRequest.onerror = onError;
return getTransaction;
},
/**
* Removes an object from the store.
*
* @param {*} key The id of the object to remove.
* @param {Function} [onSuccess] A callback that is called if the removal
* was successful.
* @param {Function} [onError] A callback that will be called if an error
* occurred during the operation.
* @returns {IDBTransaction} The transaction used for this operation.
*/
remove: function (key, onSuccess, onError) {
onError || (onError = defaultErrorHandler);
onSuccess || (onSuccess = defaultSuccessHandler);
var hasSuccess = false,
result = null;
var removeTransaction = this.db.transaction([this.storeName], this.consts.READ_WRITE);
removeTransaction.oncomplete = function () {
var callback = hasSuccess ? onSuccess : onError;
callback(result);
};
removeTransaction.onabort = onError;
removeTransaction.onerror = onError;
var deleteRequest = removeTransaction.objectStore(this.storeName)['delete'](key);
deleteRequest.onsuccess = function (event) {
hasSuccess = true;
result = event.target.result;
};
deleteRequest.onerror = onError;
return removeTransaction;
},
/**
* Runs a batch of put and/or remove operations on the store.
*
* @param {Array} dataArray An array of objects containing the operation to run
* and the data object (for put operations).
* @param {Function} [onSuccess] A callback that is called if all operations
* were successful.
* @param {Function} [onError] A callback that is called if an error
* occurred during one of the operations.
* @returns {IDBTransaction} The transaction used for this operation.
*/
batch: function (dataArray, onSuccess, onError) {
onError || (onError = defaultErrorHandler);
onSuccess || (onSuccess = defaultSuccessHandler);
if (Object.prototype.toString.call(dataArray) != '[object Array]') {
onError(new Error('dataArray argument must be of type Array.'));
} else if (dataArray.length === 0) {
return onSuccess(true);
}
var count = dataArray.length;
var called = false;
var hasSuccess = false;
var batchTransaction = this.db.transaction([this.storeName], this.consts.READ_WRITE);
batchTransaction.oncomplete = function () {
var callback = hasSuccess ? onSuccess : onError;
callback(hasSuccess);
};
batchTransaction.onabort = onError;
batchTransaction.onerror = onError;
var onItemSuccess = function () {
count--;
if (count === 0 && !called) {
called = true;
hasSuccess = true;
}
};
dataArray.forEach(function (operation) {
var type = operation.type;
var key = operation.key;
var value = operation.value;
var onItemError = function (err) {
batchTransaction.abort();
if (!called) {
called = true;
onError(err, type, key);
}
};
if (type == 'remove') {
var deleteRequest = batchTransaction.objectStore(this.storeName)['delete'](key);
deleteRequest.onsuccess = onItemSuccess;
deleteRequest.onerror = onItemError;
} else if (type == 'put') {
var putRequest;
if (this.keyPath !== null) { // in-line keys
this._addIdPropertyIfNeeded(value);
putRequest = batchTransaction.objectStore(this.storeName).put(value);
} else { // out-of-line keys
putRequest = batchTransaction.objectStore(this.storeName).put(value, key);
}
putRequest.onsuccess = onItemSuccess;
putRequest.onerror = onItemError;
}
}, this);
return batchTransaction;
},
/**
* Takes an array of objects and stores them in a single transaction.
*
* @param {Array} dataArray An array of objects to store
* @param {Function} [onSuccess] A callback that is called if all operations
* were successful.
* @param {Function} [onError] A callback that is called if an error
* occurred during one of the operations.
* @returns {IDBTransaction} The transaction used for this operation.
*/
putBatch: function (dataArray, onSuccess, onError) {
var batchData = dataArray.map(function (item) {
return {type: 'put', value: item};
});
return this.batch(batchData, onSuccess, onError);
},
/**
* Like putBatch, takes an array of objects and stores them in a single
* transaction, but allows processing of the result values. Returns the
* processed records containing the key for newly created records to the
* onSuccess calllback instead of only returning true or false for success.
* In addition, added the option for the caller to specify a key field that
* should be set to the newly created key.
*
* @param {Array} dataArray An array of objects to store
* @param {Object} [options] An object containing optional options
* @param {String} [options.keyField=this.keyPath] Specifies a field in the record to update
* with the auto-incrementing key. Defaults to the store's keyPath.
* @param {Function} [onSuccess] A callback that is called if all operations
* were successful.
* @param {Function} [onError] A callback that is called if an error
* occurred during one of the operations.
* @returns {IDBTransaction} The transaction used for this operation.
*
*/
upsertBatch: function (dataArray, options, onSuccess, onError) {
// handle `dataArray, onSuccess, onError` signature
if (typeof options == 'function') {
onSuccess = options;
onError = onSuccess;
options = {};
}
onError || (onError = defaultErrorHandler);
onSuccess || (onSuccess = defaultSuccessHandler);
options || (options = {});
if (Object.prototype.toString.call(dataArray) != '[object Array]') {
onError(new Error('dataArray argument must be of type Array.'));
}
var keyField = options.keyField || this.keyPath;
var count = dataArray.length;
var called = false;
var hasSuccess = false;
var index = 0; // assume success callbacks are executed in order
var batchTransaction = this.db.transaction([this.storeName], this.consts.READ_WRITE);
batchTransaction.oncomplete = function () {
if (hasSuccess) {
onSuccess(dataArray);
} else {
onError(false);
}
};
batchTransaction.onabort = onError;
batchTransaction.onerror = onError;
var onItemSuccess = function (event) {
var record = dataArray[index++];
record[keyField] = event.target.result;
count--;
if (count === 0 && !called) {
called = true;
hasSuccess = true;
}
};
dataArray.forEach(function (record) {
var key = record.key;
var onItemError = function (err) {
batchTransaction.abort();
if (!called) {
called = true;
onError(err);
}
};
var putRequest;
if (this.keyPath !== null) { // in-line keys
this._addIdPropertyIfNeeded(record);
putRequest = batchTransaction.objectStore(this.storeName).put(record);
} else { // out-of-line keys
putRequest = batchTransaction.objectStore(this.storeName).put(record, key);
}
putRequest.onsuccess = onItemSuccess;
putRequest.onerror = onItemError;
}, this);
return batchTransaction;
},
/**
* Takes an array of keys and removes matching objects in a single
* transaction.
*
* @param {Array} keyArray An array of keys to remove
* @param {Function} [onSuccess] A callback that is called if all operations
* were successful.
* @param {Function} [onError] A callback that is called if an error
* occurred during one of the operations.
* @returns {IDBTransaction} The transaction used for this operation.
*/
removeBatch: function (keyArray, onSuccess, onError) {
var batchData = keyArray.map(function (key) {
return {type: 'remove', key: key};
});
return this.batch(batchData, onSuccess, onError);
},
/**
* Takes an array of keys and fetches matching objects
*
* @param {Array} keyArray An array of keys identifying the objects to fetch
* @param {Function} [onSuccess] A callback that is called if all operations
* were successful.
* @param {Function} [onError] A callback that is called if an error
* occurred during one of the operations.
* @param {String} [arrayType='sparse'] The type of array to pass to the
* success handler. May be one of 'sparse', 'dense' or 'skip'. Defaults to
* 'sparse'. This parameter specifies how to handle the situation if a get
* operation did not throw an error, but there was no matching object in
* the database. In most cases, 'sparse' provides the most desired
* behavior. See the examples for details.
* @returns {IDBTransaction} The transaction used for this operation.
* @example
// given that there are two objects in the database with the keypath
// values 1 and 2, and the call looks like this:
myStore.getBatch([1, 5, 2], onError, function (data) { … }, arrayType);
// this is what the `data` array will be like:
// arrayType == 'sparse':
// data is a sparse array containing two entries and having a length of 3:
[Object, 2: Object]
0: Object
2: Object
length: 3
// calling forEach on data will result in the callback being called two
// times, with the index parameter matching the index of the key in the
// keyArray.
// arrayType == 'dense':
// data is a dense array containing three entries and having a length of 3,
// where data[1] is of type undefined:
[Object, undefined, Object]
0: Object
1: undefined
2: Object
length: 3
// calling forEach on data will result in the callback being called three
// times, with the index parameter matching the index of the key in the
// keyArray, but the second call will have undefined as first argument.
// arrayType == 'skip':
// data is a dense array containing two entries and having a length of 2:
[Object, Object]
0: Object
1: Object
length: 2
// calling forEach on data will result in the callback being called two
// times, with the index parameter not matching the index of the key in the
// keyArray.
*/
getBatch: function (keyArray, onSuccess, onError, arrayType) {
onError || (onError = defaultErrorHandler);
onSuccess || (onSuccess = defaultSuccessHandler);
arrayType || (arrayType = 'sparse');
if (Object.prototype.toString.call(keyArray) != '[object Array]') {
onError(new Error('keyArray argument must be of type Array.'));
} else if (keyArray.length === 0) {
return onSuccess([]);
}
var data = [];
var count = keyArray.length;
var hasSuccess = false;
var result = null;
var batchTransaction = this.db.transaction([this.storeName], this.consts.READ_ONLY);
batchTransaction.oncomplete = function () {
var callback = hasSuccess ? onSuccess : onError;
callback(result);
};
batchTransaction.onabort = onError;
batchTransaction.onerror = onError;
var onItemSuccess = function (event) {
if (event.target.result || arrayType == 'dense') {
data.push(event.target.result);
} else if (arrayType == 'sparse') {
data.length++;
}
count--;
if (count === 0) {
hasSuccess = true;
result = data;
}
};
keyArray.forEach(function (key) {
var onItemError = function (err) {
result = err;
onError(err);
batchTransaction.abort();
};
var getRequest = batchTransaction.objectStore(this.storeName).get(key);
getRequest.onsuccess = onItemSuccess;
getRequest.onerror = onItemError;
}, this);
return batchTransaction;
},
/**
* Fetches all entries in the store.
*
* @param {Function} [onSuccess] A callback that is called if the operation
* was successful. Will receive an array of objects.
* @param {Function} [onError] A callback that will be called if an error
* occurred during the operation.
* @returns {IDBTransaction} The transaction used for this operation.
*/
getAll: function (onSuccess, onError) {
onError || (onError = defaultErrorHandler);
onSuccess || (onSuccess = defaultSuccessHandler);
var getAllTransaction = this.db.transaction([this.storeName], this.consts.READ_ONLY);
var store = getAllTransaction.objectStore(this.storeName);
if (store.getAll) {
this._getAllNative(getAllTransaction, store, onSuccess, onError);
} else {
this._getAllCursor(getAllTransaction, store, onSuccess, onError);
}
return getAllTransaction;
},
/**
* Implements getAll for IDB implementations that have a non-standard
* getAll() method.
*
* @param {IDBTransaction} getAllTransaction An open READ transaction.
* @param {IDBObjectStore} store A reference to the store.
* @param {Function} onSuccess A callback that will be called if the
* operation was successful.
* @param {Function} onError A callback that will be called if an
* error occurred during the operation.
* @private
*/
_getAllNative: function (getAllTransaction, store, onSuccess, onError) {
var hasSuccess = false,
result = null;
getAllTransaction.oncomplete = function () {
var callback = hasSuccess ? onSuccess : onError;
callback(result);
};
getAllTransaction.onabort = onError;
getAllTransaction.onerror = onError;
var getAllRequest = store.getAll();
getAllRequest.onsuccess = function (event) {
hasSuccess = true;
result = event.target.result;
};
getAllRequest.onerror = onError;
},
/**
* Implements getAll for IDB implementations that do not have a getAll()
* method.
*
* @param {IDBTransaction} getAllTransaction An open READ transaction.
* @param {IDBObjectStore} store A reference to the store.
* @param {Function} onSuccess A callback that will be called if the
* operation was successful.
* @param {Function} onError A callback that will be called if an
* error occurred during the operation.
* @private
*/
_getAllCursor: function (getAllTransaction, store, onSuccess, onError) {
var all = [],
hasSuccess = false,
result = null;
getAllTransaction.oncomplete = function () {
var callback = hasSuccess ? onSuccess : onError;
callback(result);
};
getAllTransaction.onabort = onError;
getAllTransaction.onerror = onError;
var cursorRequest = store.openCursor();
cursorRequest.onsuccess = function (event) {
var cursor = event.target.result;
if (cursor) {
all.push(cursor.value);
cursor['continue']();
}
else {
hasSuccess = true;
result = all;
}
};
cursorRequest.onError = onError;
},
/**
* Clears the store, i.e. deletes all entries in the store.
*
* @param {Function} [onSuccess] A callback that will be called if the
* operation was successful.
* @param {Function} [onError] A callback that will be called if an
* error occurred during the operation.
* @returns {IDBTransaction} The transaction used for this operation.
*/
clear: function (onSuccess, onError) {
onError || (onError = defaultErrorHandler);
onSuccess || (onSuccess = defaultSuccessHandler);
var hasSuccess = false,
result = null;
var clearTransaction = this.db.transaction([this.storeName], this.consts.READ_WRITE);
clearTransaction.oncomplete = function () {
var callback = hasSuccess ? onSuccess : onError;
callback(result);
};
clearTransaction.onabort = onError;
clearTransaction.onerror = onError;
var clearRequest = clearTransaction.objectStore(this.storeName).clear();
clearRequest.onsuccess = function (event) {
hasSuccess = true;
result = event.target.result;
};
clearRequest.onerror = onError;
return clearTransaction;
},
/**
* Checks if an id property needs to present on a object and adds one if
* necessary.
*
* @param {Object} dataObj The data object that is about to be stored
* @private
*/
_addIdPropertyIfNeeded: function (dataObj) {
if (typeof dataObj[this.keyPath] == 'undefined') {
dataObj[this.keyPath] = this._insertIdCount++ + Date.now();
}
},
/************
* indexing *
************/
/**
* Returns a DOMStringList of index names of the store.
*
* @return {DOMStringList} The list of index names
*/
getIndexList: function () {
return this.store.indexNames;
},
/**
* Checks if an index with the given name exists in the store.
*
* @param {String} indexName The name of the index to look for
* @return {Boolean} Whether the store contains an index with the given name
*/
hasIndex: function (indexName) {
return this.store.indexNames.contains(indexName);
},
/**
* Normalizes an object containing index data and assures that all
* properties are set.
*
* @param {Object} indexData The index data object to normalize
* @param {String} indexData.name The name of the index
* @param {String} [indexData.keyPath] The key path of the index
* @param {Boolean} [indexData.unique] Whether the index is unique
* @param {Boolean} [indexData.multiEntry] Whether the index is multi entry
*/
normalizeIndexData: function (indexData) {
indexData.keyPath = indexData.keyPath || indexData.name;
indexData.unique = !!indexData.unique;
indexData.multiEntry = !!indexData.multiEntry;
},
/**
* Checks if an actual index complies with an expected index.
*
* @param {IDBIndex} actual The actual index found in the store
* @param {Object} expected An Object describing an expected index
* @return {Boolean} Whether both index definitions are identical
*/
indexComplies: function (actual, expected) {
var complies = ['keyPath', 'unique', 'multiEntry'].every(function (key) {
// IE10 returns undefined for no multiEntry
if (key == 'multiEntry' && actual[key] === undefined && expected[key] === false) {
return true;
}
// Compound keys
if (key == 'keyPath' && Object.prototype.toString.call(expected[key]) == '[object Array]') {
var exp = expected.keyPath;
var act = actual.keyPath;
// IE10 can't handle keyPath sequences and stores them as a string.
// The index will be unusable there, but let's still return true if
// the keyPath sequence matches.
if (typeof act == 'string') {
return exp.toString() == act;
}
// Chrome/Opera stores keyPath squences as DOMStringList, Firefox
// as Array
if (!(typeof act.contains == 'function' || typeof act.indexOf == 'function')) {
return false;
}
if (act.length !== exp.length) {
return false;
}
for (var i = 0, m = exp.length; i < m; i++) {
if (!( (act.contains && act.contains(exp[i])) || act.indexOf(exp[i] !== -1) )) {
return false;
}
}
return true;
}
return expected[key] == actual[key];
});
return complies;
},
/**********
* cursor *
**********/
/**
* Iterates over the store using the given options and calling onItem
* for each entry matching the options.
*
* @param {Function} onItem A callback to be called for each match
* @param {Object} [options] An object defining specific options
* @param {String} [options.index=null] A name of an IDBIndex to operate on
* @param {String} [options.order=ASC] The order in which to provide the
* results, can be 'DESC' or 'ASC'
* @param {Boolean} [options.autoContinue=true] Whether to automatically
* iterate the cursor to the next result
* @param {Boolean} [options.filterDuplicates=false] Whether to exclude
* duplicate matches
* @param {IDBKeyRange} [options.keyRange=null] An IDBKeyRange to use
* @param {Boolean} [options.writeAccess=false] Whether grant write access
* to the store in the onItem callback
* @param {Function} [options.onEnd=null] A callback to be called after
* iteration has ended
* @param {Function} [options.onError=throw] A callback to be called
* if an error occurred during the operation.
* @param {Number} [options.limit=Infinity] Limit the number of returned
* results to this number
* @param {Number} [options.offset=0] Skip the provided number of results
* in the resultset
* @param {Boolean} [options.allowItemRejection=false] Allows the onItem
* function to return a Boolean to accept or reject the current item
* @returns {IDBTransaction} The transaction used for this operation.
*/
iterate: function (onItem, options) {
options = mixin({
index: null,
order: 'ASC',
autoContinue: true,
filterDuplicates: false,
keyRange: null,
writeAccess: false,
onEnd: null,
onError: defaultErrorHandler,
limit: Infinity,
offset: 0,
allowItemRejection: false
}, options || {});
var directionType = options.order.toLowerCase() == 'desc' ? 'PREV' : 'NEXT';
if (options.filterDuplicates) {
directionType += '_NO_DUPLICATE';
}
var hasSuccess = false;
var cursorTransaction = this.db.transaction([this.storeName], this.consts[options.writeAccess ? 'READ_WRITE' : 'READ_ONLY']);
var cursorTarget = cursorTransaction.objectStore(this.storeName);
if (options.index) {
cursorTarget = cursorTarget.index(options.index);
}
var recordCount = 0;
cursorTransaction.oncomplete = function () {
if (!hasSuccess) {
options.onError(null);
return;
}
if (options.onEnd) {
options.onEnd();
} else {
onItem(null);
}
};
cursorTransaction.onabort = options.onError;
cursorTransaction.onerror = options.onError;
var cursorRequest = cursorTarget.openCursor(options.keyRange, this.consts[directionType]);
cursorRequest.onerror = options.onError;
cursorRequest.onsuccess = function (event) {
var cursor = event.target.result;
if (cursor) {
if (options.offset) {
cursor.advance(options.offset);
options.offset = 0;
} else {
var onItemReturn = onItem(cursor.value, cursor, cursorTransaction);
if (!options.allowItemRejection || onItemReturn !== false) {
recordCount++;
}
if (options.autoContinue) {
if (recordCount + options.offset < options.limit) {
cursor['continue']();
} else {
hasSuccess = true;
}
}
}
} else {
hasSuccess = true;
}
};
return cursorTransaction;
},
/**
* Runs a query against the store and passes an array containing matched
* objects to the success handler.
*
* @param {Function} onSuccess A callback to be called when the operation
* was successful.
* @param {Object} [options] An object defining specific options
* @param {String} [options.index=null] A name of an IDBIndex to operate on
* @param {String} [options.order=ASC] The order in which to provide the
* results, can be 'DESC' or 'ASC'
* @param {Boolean} [options.filterDuplicates=false] Whether to exclude
* duplicate matches
* @param {IDBKeyRange} [options.keyRange=null] An IDBKeyRange to use
* @param {Function} [options.onError=throw] A callback to be called
* if an error occurred during the operation.
* @param {Number} [options.limit=Infinity] Limit the number of returned
* results to this number
* @param {Number} [options.offset=0] Skip the provided number of results
* in the resultset
* @param {Function} [options.filter=null] A custom filter function to
* apply to query resuts before returning. Must return `false` to reject
* an item. Can be combined with keyRanges.
* @returns {IDBTransaction} The transaction used for this operation.
*/
query: function (onSuccess, options) {
var result = [],
processedItems = 0;
options = options || {};
options.autoContinue = true;
options.writeAccess = false;
options.allowItemRejection = !!options.filter;
options.onEnd = function () {
onSuccess(result, processedItems);
};
return this.iterate(function (item) {
processedItems++;
var accept = options.filter ? options.filter(item) : true;
if (accept !== false) {
result.push(item);
}
return accept;
}, options);
},
/**
*
* Runs a query against the store, but only returns the number of matches
* instead of the matches itself.
*
* @param {Function} onSuccess A callback to be called if the opration
* was successful.
* @param {Object} [options] An object defining specific options
* @param {String} [options.index=null] A name of an IDBIndex to operate on
* @param {IDBKeyRange} [options.keyRange=null] An IDBKeyRange to use
* @param {Function} [options.onError=throw] A callback to be called if an error
* occurred during the operation.
* @returns {IDBTransaction} The transaction used for this operation.
*/
count: function (onSuccess, options) {
options = mixin({
index: null,
keyRange: null
}, options || {});
var onError = options.onError || defaultErrorHandler;
var hasSuccess = false,
result = null;
var cursorTransaction = this.db.transaction([this.storeName], this.consts.READ_ONLY);
cursorTransaction.oncomplete = function () {
var callback = hasSuccess ? onSuccess : onError;
callback(result);
};
cursorTransaction.onabort = onError;
cursorTransaction.onerror = onError;
var cursorTarget = cursorTransaction.objectStore(this.storeName);
if (options.index) {
cursorTarget = cursorTarget.index(options.index);
}
var countRequest = cursorTarget.count(options.keyRange);
countRequest.onsuccess = function (evt) {
hasSuccess = true;
result = evt.target.result;
};
countRequest.onError = onError;
return cursorTransaction;
},
/**************/
/* key ranges */
/**************/
/**
* Creates a key range using specified options. This key range can be
* handed over to the count() and iterate() methods.
*
* Note: You must provide at least one or both of "lower" or "upper" value.
*
* @param {Object} options The options for the key range to create
* @param {*} [options.lower] The lower bound
* @param {Boolean} [options.excludeLower] Whether to exclude the lower
* bound passed in options.lower from the key range
* @param {*} [options.upper] The upper bound
* @param {Boolean} [options.excludeUpper] Whether to exclude the upper
* bound passed in options.upper from the key range
* @param {*} [options.only] A single key value. Use this if you need a key
* range that only includes one value for a key. Providing this
* property invalidates all other properties.
* @return {IDBKeyRange} The IDBKeyRange representing the specified options
*/
makeKeyRange: function (options) {
/*jshint onecase:true */
var keyRange,
hasLower = typeof options.lower != 'undefined',
hasUpper = typeof options.upper != 'undefined',
isOnly = typeof options.only != 'undefined';
switch (true) {
case isOnly:
keyRange = this.keyRange.only(options.only);
break;
case hasLower && hasUpper:
keyRange = this.keyRange.bound(options.lower, options.upper, options.excludeLower, options.excludeUpper);
break;
case hasLower:
keyRange = this.keyRange.lowerBound(options.lower, options.excludeLower);
break;
case hasUpper:
keyRange = this.keyRange.upperBound(options.upper, options.excludeUpper);
break;
default:
throw new Error('Cannot create KeyRange. Provide one or both of "lower" or "upper" value, or an "only" value.');
}
return keyRange;
}
};
/** helpers **/
var empty = {};
function mixin (target, source) {
var name, s;
for (name in source) {
s = source[name];
if (s !== empty[name] && s !== target[name]) {
target[name] = s;
}
}
return target;
}
function hasVersionError(errorEvent) {
if ('error' in errorEvent.target) {
return errorEvent.target.error.name == 'VersionError';
} else if ('errorCode' in errorEvent.target) {
return errorEvent.target.errorCode == 12;
}
return false;
}
IDBStore.prototype = proto;
IDBStore.version = proto.version;
return IDBStore;
}, commonjsGlobal);
});
var xtend = extend;
function extend() {
var target = {};
for (var i = 0; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) {
if (source.hasOwnProperty(key)) {
target[key] = source[key];
}
}
}
return target
}
/* Copyright (c) 2013 Rod Vagg, MIT License */
function AbstractIterator (db) {
this.db = db;
this._ended = false;
this._nexting = false;
}
AbstractIterator.prototype.next = function (callback) {
var self = this;
if (typeof callback != 'function')
throw new Error('next() requires a callback argument')
if (self._ended)
return callback(new Error('cannot call next() after end()'))
if (self._nexting)
return callback(new Error('cannot call next() before previous next() has completed'))
self._nexting = true;
if (typeof self._next == 'function') {
return self._next(function () {
self._nexting = false;
callback.apply(null, arguments);
})
}
process.nextTick(function () {
self._nexting = false;
callback();
});
};
AbstractIterator.prototype.end = function (callback) {
if (typeof callback != 'function')
throw new Error('end() requires a callback argument')
if (this._ended)
return callback(new Error('end() already called on iterator'))
this._ended = true;
if (typeof this._end == 'function')
return this._end(callback)
process.nextTick(callback);
};
var abstractIterator = AbstractIterator;
/* Copyright (c) 2013 Rod Vagg, MIT License */
function AbstractChainedBatch (db) {
this._db = db;
this._operations = [];
this._written = false;
}
AbstractChainedBatch.prototype._checkWritten = function () {
if (this._written)
throw new Error('write() already called on this batch')
};
AbstractChainedBatch.prototype.put = function (key, value) {
this._checkWritten();
var err = this._db._checkKeyValue(key, 'key', this._db._isBuffer);
if (err) throw err
err = this._db._checkKeyValue(value, 'value', this._db._isBuffer);
if (err) throw err
if (!this._db._isBuffer(key)) key = String(key);
if (!this._db._isBuffer(value)) value = String(value);
if (typeof this._put == 'function' )
this._put(key, value);
else
this._operations.push({ type: 'put', key: key, value: value });
return this
};
AbstractChainedBatch.prototype.del = function (key) {
this._checkWritten();
var err = this._db._checkKeyValue(key, 'key', this._db._isBuffer);
if (err) throw err
if (!this._db._isBuffer(key)) key = String(key);
if (typeof this._del == 'function' )
this._del(key);
else
this._operations.push({ type: 'del', key: key });
return this
};
AbstractChainedBatch.prototype.clear = function () {
this._checkWritten();
this._operations = [];
if (typeof this._clear == 'function' )
this._clear();
return this
};
AbstractChainedBatch.prototype.write = function (options, callback) {
this._checkWritten();
if (typeof options == 'function')
callback = options;
if (typeof callback != 'function')
throw new Error('write() requires a callback argument')
if (typeof options != 'object')
options = {};
this._written = true;
if (typeof this._write == 'function' )
return this._write(callback)
if (typeof this._db._batch == 'function')
return this._db._batch(this._operations, options, callback)
process.nextTick(callback);
};
var abstractChainedBatch = AbstractChainedBatch;
/* Copyright (c) 2013 Rod Vagg, MIT License */
function AbstractLevelDOWN (location) {
if (!arguments.length || location === undefined)
throw new Error('constructor requires at least a location argument')
if (typeof location != 'string')
throw new Error('constructor requires a location string argument')
this.location = location;
}
AbstractLevelDOWN.prototype.open = function (options, callback) {
if (typeof options == 'function')
callback = options;
if (typeof callback != 'function')
throw new Error('open() requires a callback argument')
if (typeof options != 'object')
options = {};
if (typeof this._open == 'function')
return this._open(options, callback)
process.nextTick(callback);
};
AbstractLevelDOWN.prototype.close = function (callback) {
if (typeof callback != 'function')
throw new Error('close() requires a callback argument')
if (typeof this._close == 'function')
return this._close(callback)
process.nextTick(callback);
};
AbstractLevelDOWN.prototype.get = function (key, options, callback) {
var err;
if (typeof options == 'function')
callback = options;
if (typeof callback != 'function')
throw new Error('get() requires a callback argument')
if (err = this._checkKeyValue(key, 'key', this._isBuffer))
return callback(err)
if (!this._isBuffer(key))
key = String(key);
if (typeof options != 'object')
options = {};
if (typeof this._get == 'function')
return this._get(key, options, callback)
process.nextTick(function () { callback(new Error('NotFound')); });
};
AbstractLevelDOWN.prototype.put = function (key, value, options, callback) {
var err;
if (typeof options == 'function')
callback = options;
if (typeof callback != 'function')
throw new Error('put() requires a callback argument')
if (err = this._checkKeyValue(key, 'key', this._isBuffer))
return callback(err)
if (err = this._checkKeyValue(value, 'value', this._isBuffer))
return callback(err)
if (!this._isBuffer(key))
key = String(key);
// coerce value to string in node, don't touch it in browser
// (indexeddb can store any JS type)
if (!this._isBuffer(value) && !process.browser)
value = String(value);
if (typeof options != 'object')
options = {};
if (typeof this._put == 'function')
return this._put(key, value, options, callback)
process.nextTick(callback);
};
AbstractLevelDOWN.prototype.del = function (key, options, callback) {
var err;
if (typeof options == 'function')
callback = options;
if (typeof callback != 'function')
throw new Error('del() requires a callback argument')
if (err = this._checkKeyValue(key, 'key', this._isBuffer))
return callback(err)
if (!this._isBuffer(key))
key = String(key);
if (typeof options != 'object')
options = {};
if (typeof this._del == 'function')
return this._del(key, options, callback)
process.nextTick(callback);
};
AbstractLevelDOWN.prototype.batch = function (array, options, callback) {
if (!arguments.length)
return this._chainedBatch()
if (typeof options == 'function')
callback = options;
if (typeof callback != 'function')
throw new Error('batch(array) requires a callback argument')
if (!Array.isArray(array))
return callback(new Error('batch(array) requires an array argument'))
if (typeof options != 'object')
options = {};
var i = 0
, l = array.length
, e
, err;
for (; i < l; i++) {
e = array[i];
if (typeof e != 'object')
continue
if (err = this._checkKeyValue(e.type, 'type', this._isBuffer))
return callback(err)
if (err = this._checkKeyValue(e.key, 'key', this._isBuffer))
return callback(err)
if (e.type == 'put') {
if (err = this._checkKeyValue(e.value, 'value', this._isBuffer))
return callback(err)
}
}
if (typeof this._batch == 'function')
return this._batch(array, options, callback)
process.nextTick(callback);
};
//TODO: remove from here, not a necessary primitive
AbstractLevelDOWN.prototype.approximateSize = function (start, end, callback) {
if ( start == null
|| end == null
|| typeof start == 'function'
|| typeof end == 'function') {
throw new Error('approximateSize() requires valid `start`, `end` and `callback` arguments')
}
if (typeof callback != 'function')
throw new Error('approximateSize() requires a callback argument')
if (!this._isBuffer(start))
start = String(start);
if (!this._isBuffer(end))
end = String(end);
if (typeof this._approximateSize == 'function')
return this._approximateSize(start, end, callback)
process.nextTick(function () {
callback(null, 0);
});
};
AbstractLevelDOWN.prototype._setupIteratorOptions = function (options) {
var self = this;
options = xtend(options)
;[ 'start', 'end', 'gt', 'gte', 'lt', 'lte' ].forEach(function (o) {
if (options[o] && self._isBuffer(options[o]) && options[o].length === 0)
delete options[o];
});
options.reverse = !!options.reverse;
// fix `start` so it takes into account gt, gte, lt, lte as appropriate
if (options.reverse && options.lt)
options.start = options.lt;
if (options.reverse && options.lte)
options.start = options.lte;
if (!options.reverse && options.gt)
options.start = options.gt;
if (!options.reverse && options.gte)
options.start = options.gte;
if ((options.reverse && options.lt && !options.lte)
|| (!options.reverse && options.gt && !options.gte))
options.exclusiveStart = true; // start should *not* include matching key
return options
};
AbstractLevelDOWN.prototype.iterator = function (options) {
if (typeof options != 'object')
options = {};
options = this._setupIteratorOptions(options);
if (typeof this._iterator == 'function')
return this._iterator(options)
return new abstractIterator(this)
};
AbstractLevelDOWN.prototype._chainedBatch = function () {
return new abstractChainedBatch(this)
};
AbstractLevelDOWN.prototype._isBuffer = function (obj) {
return Buffer.isBuffer(obj)
};
AbstractLevelDOWN.prototype._checkKeyValue = function (obj, type) {
if (obj === null || obj === undefined)
return new Error(type + ' cannot be `null` or `undefined`')
if (this._isBuffer(obj)) {
if (obj.length === 0)
return new Error(type + ' cannot be an empty Buffer')
} else if (String(obj) === '')
return new Error(type + ' cannot be an empty String')
};
var AbstractLevelDOWN_1 = AbstractLevelDOWN;
var AbstractIterator_1 = abstractIterator;
var AbstractChainedBatch_1 = abstractChainedBatch;
var abstractLeveldown = {
AbstractLevelDOWN: AbstractLevelDOWN_1,
AbstractIterator: AbstractIterator_1,
AbstractChainedBatch: AbstractChainedBatch_1
};
var ltgt = createCommonjsModule(function (module, exports) {
exports.compare = function (a, b) {
if(Buffer.isBuffer(a)) {
var l = Math.min(a.length, b.length);
for(var i = 0; i < l; i++) {
var cmp = a[i] - b[i];
if(cmp) return cmp
}
return a.length - b.length
}
return a < b ? -1 : a > b ? 1 : 0
};
// to be compatible with the current abstract-leveldown tests
// nullish or empty strings.
// I could use !!val but I want to permit numbers and booleans,
// if possible.
function isDef (val) {
return val !== undefined && val !== ''
}
function has (range, name) {
return Object.hasOwnProperty.call(range, name)
}
function hasKey(range, name) {
return Object.hasOwnProperty.call(range, name) && name
}
var lowerBoundKey = exports.lowerBoundKey = function (range) {
return (
hasKey(range, 'gt')
|| hasKey(range, 'gte')
|| hasKey(range, 'min')
|| (range.reverse ? hasKey(range, 'end') : hasKey(range, 'start'))
|| undefined
)
};
var lowerBound = exports.lowerBound = function (range, def) {
var k = lowerBoundKey(range);
return k ? range[k] : def
};
var lowerBoundInclusive = exports.lowerBoundInclusive = function (range) {
return has(range, 'gt') ? false : true
};
var upperBoundInclusive = exports.upperBoundInclusive =
function (range) {
return (has(range, 'lt') /*&& !range.maxEx*/) ? false : true
};
var lowerBoundExclusive = exports.lowerBoundExclusive =
function (range) {
return !lowerBoundInclusive(range)
};
var upperBoundExclusive = exports.upperBoundExclusive =
function (range) {
return !upperBoundInclusive(range)
};
var upperBoundKey = exports.upperBoundKey = function (range) {
return (
hasKey(range, 'lt')
|| hasKey(range, 'lte')
|| hasKey(range, 'max')
|| (range.reverse ? hasKey(range, 'start') : hasKey(range, 'end'))
|| undefined
)
};
var upperBound = exports.upperBound = function (range, def) {
var k = upperBoundKey(range);
return k ? range[k] : def
};
exports.start = function (range, def) {
return range.reverse ? upperBound(range, def) : lowerBound(range, def)
};
exports.end = function (range, def) {
return range.reverse ? lowerBound(range, def) : upperBound(range, def)
};
exports.startInclusive = function (range) {
return (
range.reverse
? upperBoundInclusive(range)
: lowerBoundInclusive(range)
)
};
exports.endInclusive = function (range) {
return (
range.reverse
? lowerBoundInclusive(range)
: upperBoundInclusive(range)
)
};
function id (e) { return e }
exports.toLtgt = function (range, _range, map, lower, upper) {
_range = _range || {};
map = map || id;
var defaults = arguments.length > 3;
var lb = exports.lowerBoundKey(range);
var ub = exports.upperBoundKey(range);
if(lb) {
if(lb === 'gt') _range.gt = map(range.gt, false);
else _range.gte = map(range[lb], false);
}
else if(defaults)
_range.gte = map(lower, false);
if(ub) {
if(ub === 'lt') _range.lt = map(range.lt, true);
else _range.lte = map(range[ub], true);
}
else if(defaults)
_range.lte = map(upper, true);
if(range.reverse != null)
_range.reverse = !!range.reverse;
//if range was used mutably
//(in level-sublevel it's part of an options object
//that has more properties on it.)
if(has(_range, 'max')) delete _range.max;
if(has(_range, 'min')) delete _range.min;
if(has(_range, 'start')) delete _range.start;
if(has(_range, 'end')) delete _range.end;
return _range
};
exports.contains = function (range, key, compare) {
compare = compare || exports.compare;
var lb = lowerBound(range);
if(isDef(lb)) {
var cmp = compare(key, lb);
if(cmp < 0 || (cmp === 0 && lowerBoundExclusive(range)))
return false
}
var ub = upperBound(range);
if(isDef(ub)) {
var cmp = compare(key, ub);
if(cmp > 0 || (cmp === 0) && upperBoundExclusive(range))
return false
}
return true
};
exports.filter = function (range, compare) {
return function (key) {
return exports.contains(range, key, compare)
}
};
});
var ltgt_1 = ltgt.compare;
var ltgt_2 = ltgt.lowerBoundKey;
var ltgt_3 = ltgt.lowerBound;
var ltgt_4 = ltgt.lowerBoundInclusive;
var ltgt_5 = ltgt.upperBoundInclusive;
var ltgt_6 = ltgt.lowerBoundExclusive;
var ltgt_7 = ltgt.upperBoundExclusive;
var ltgt_8 = ltgt.upperBoundKey;
var ltgt_9 = ltgt.upperBound;
var ltgt_10 = ltgt.start;
var ltgt_11 = ltgt.end;
var ltgt_12 = ltgt.startInclusive;
var ltgt_13 = ltgt.endInclusive;
var ltgt_14 = ltgt.toLtgt;
var ltgt_15 = ltgt.contains;
var ltgt_16 = ltgt.filter;
var AbstractIterator$1 = abstractLeveldown.AbstractIterator;
var iterator = Iterator;
function Iterator (db, options) {
if (!options) options = {};
this.options = options;
AbstractIterator$1.call(this, db);
this._order = options.reverse ? 'DESC': 'ASC';
this._limit = options.limit;
this._count = 0;
this._done = false;
var lower = ltgt.lowerBound(options);
var upper = ltgt.upperBound(options);
try {
this._keyRange = lower || upper ? this.db.makeKeyRange({
lower: lower,
upper: upper,
excludeLower: ltgt.lowerBoundExclusive(options),
excludeUpper: ltgt.upperBoundExclusive(options)
}) : null;
} catch (e) {
// The lower key is greater than the upper key.
// IndexedDB throws an error, but we'll just return 0 results.
this._keyRangeError = true;
}
this.callback = null;
}
util$2.inherits(Iterator, AbstractIterator$1);
Iterator.prototype.createIterator = function() {
var self = this;
self.iterator = self.db.iterate(function () {
self.onItem.apply(self, arguments);
}, {
keyRange: self._keyRange,
autoContinue: false,
order: self._order,
onError: function(err) { console.log('horrible error', err); },
});
};
// TODO the limit implementation here just ignores all reads after limit has been reached
// it should cancel the iterator instead but I don't know how
Iterator.prototype.onItem = function (value, cursor, cursorTransaction) {
if (!cursor && this.callback) {
this.callback();
this.callback = false;
return
}
var shouldCall = true;
if (!!this._limit && this._limit > 0 && this._count++ >= this._limit)
shouldCall = false;
if (shouldCall) this.callback(false, cursor.key, cursor.value);
if (cursor) cursor['continue']();
};
Iterator.prototype._next = function (callback) {
if (!callback) return new Error('next() requires a callback argument')
if (this._keyRangeError) return callback()
if (!this._started) {
this.createIterator();
this._started = true;
}
this.callback = callback;
};
var Buffer$1 = buffer.Buffer;
var isbuffer = isBuffer;
function isBuffer (o) {
return Buffer$1.isBuffer(o)
|| /\[object (.+Array|Array.+)\]/.test(Object.prototype.toString.call(o));
}
var hasOwn = Object.prototype.hasOwnProperty;
var toString = Object.prototype.toString;
var isFunction = function (fn) {
var isFunc = (typeof fn === 'function' && !(fn instanceof RegExp)) || toString.call(fn) === '[object Function]';
if (!isFunc && typeof window !== 'undefined') {
isFunc = fn === window.setTimeout || fn === window.alert || fn === window.confirm || fn === window.prompt;
}
return isFunc;
};
var foreach = function forEach(obj, fn) {
if (!isFunction(fn)) {
throw new TypeError('iterator must be a function');
}
var i, k,
isString = typeof obj === 'string',
l = obj.length,
context = arguments.length > 2 ? arguments[2] : null;
if (l === +l) {
for (i = 0; i < l; i++) {
if (context === null) {
fn(isString ? obj.charAt(i) : obj[i], i, obj);
} else {
fn.call(context, isString ? obj.charAt(i) : obj[i], i, obj);
}
}
} else {
for (k in obj) {
if (hasOwn.call(obj, k)) {
if (context === null) {
fn(obj[k], k, obj);
} else {
fn.call(context, obj[k], k, obj);
}
}
}
}
};
var toString$1 = Object.prototype.toString;
var isArguments = function isArguments(value) {
var str = toString$1.call(value);
var isArguments = str === '[object Arguments]';
if (!isArguments) {
isArguments = str !== '[object Array]'
&& value !== null
&& typeof value === 'object'
&& typeof value.length === 'number'
&& value.length >= 0
&& toString$1.call(value.callee) === '[object Function]';
}
return isArguments;
};
var shim = createCommonjsModule(function (module) {
(function () {
// modified from https://github.com/kriskowal/es5-shim
var has = Object.prototype.hasOwnProperty,
toString = Object.prototype.toString,
forEach = foreach,
isArgs = isArguments,
hasDontEnumBug = !({'toString': null}).propertyIsEnumerable('toString'),
hasProtoEnumBug = (function () {}).propertyIsEnumerable('prototype'),
dontEnums = [
"toString",
"toLocaleString",
"valueOf",
"hasOwnProperty",
"isPrototypeOf",
"propertyIsEnumerable",
"constructor"
],
keysShim;
keysShim = function keys(object) {
var isObject = object !== null && typeof object === 'object',
isFunction = toString.call(object) === '[object Function]',
isArguments = isArgs(object),
theKeys = [];
if (!isObject && !isFunction && !isArguments) {
throw new TypeError("Object.keys called on a non-object");
}
if (isArguments) {
forEach(object, function (value) {
theKeys.push(value);
});
} else {
var name,
skipProto = hasProtoEnumBug && isFunction;
for (name in object) {
if (!(skipProto && name === 'prototype') && has.call(object, name)) {
theKeys.push(name);
}
}
}
if (hasDontEnumBug) {
var ctor = object.constructor,
skipConstructor = ctor && ctor.prototype === object;
forEach(dontEnums, function (dontEnum) {
if (!(skipConstructor && dontEnum === 'constructor') && has.call(object, dontEnum)) {
theKeys.push(dontEnum);
}
});
}
return theKeys;
};
module.exports = keysShim;
}());
});
var objectKeys = Object.keys || shim;
var hasKeys_1 = hasKeys;
function hasKeys(source) {
return source !== null &&
(typeof source === "object" ||
typeof source === "function")
}
var xtend$1 = extend$1;
function extend$1() {
var target = {};
for (var i = 0; i < arguments.length; i++) {
var source = arguments[i];
if (!hasKeys_1(source)) {
continue
}
var keys = objectKeys(source);
for (var j = 0; j < keys.length; j++) {
var name = keys[j];
target[name] = source[name];
}
}
return target
}
/**
* Convert a typed array to a Buffer without a copy
*
* Author: Feross Aboukhadijeh <feross@feross.org> <http://feross.org>
* License: MIT
*
* `npm install typedarray-to-buffer`
*/
var typedarrayToBuffer = function (arr) {
if (typeof Buffer._augment === 'function' && Buffer.TYPED_ARRAY_SUPPORT) {
// If `Buffer` is from the `buffer` module and this browser supports typed arrays,
// then augment it with all the `Buffer` methods.
return Buffer._augment(arr)
} else {
// Otherwise, fallback to creating a `Buffer` with a copy.
return new Buffer(arr)
}
};
var levelJs = Level;
var AbstractLevelDOWN$1 = abstractLeveldown.AbstractLevelDOWN;
function Level(location) {
if (!(this instanceof Level)) return new Level(location)
if (!location) throw new Error("constructor requires at least a location argument")
this.IDBOptions = {};
this.location = location;
}
util$2.inherits(Level, AbstractLevelDOWN$1);
Level.prototype._open = function(options, callback) {
var self = this;
var idbOpts = {
storeName: this.location,
autoIncrement: false,
keyPath: null,
onStoreReady: function () {
callback && callback(null, self.idb);
},
onError: function(err) {
callback && callback(err);
}
};
xtend$1(idbOpts, options);
this.IDBOptions = idbOpts;
this.idb = new idbstore(idbOpts);
};
Level.prototype._get = function (key, options, callback) {
this.idb.get(key, function (value) {
if (value === undefined) {
// 'NotFound' error, consistent with LevelDOWN API
return callback(new Error('NotFound'))
}
// by default return buffers, unless explicitly told not to
var asBuffer = true;
if (options.asBuffer === false) asBuffer = false;
if (options.raw) asBuffer = false;
if (asBuffer) {
if (value instanceof Uint8Array) value = typedarrayToBuffer(value);
else value = new Buffer(String(value));
}
return callback(null, value, key)
}, callback);
};
Level.prototype._del = function(id, options, callback) {
this.idb.remove(id, callback, callback);
};
Level.prototype._put = function (key, value, options, callback) {
if (value instanceof ArrayBuffer) {
value = typedarrayToBuffer(new Uint8Array(value));
}
var obj = this.convertEncoding(key, value, options);
if (Buffer.isBuffer(obj.value)) {
if (typeof value.toArrayBuffer === 'function') {
obj.value = new Uint8Array(value.toArrayBuffer());
} else {
obj.value = new Uint8Array(value);
}
}
this.idb.put(obj.key, obj.value, function() { callback(); }, callback);
};
Level.prototype.convertEncoding = function(key, value, options) {
if (options.raw) return {key: key, value: value}
if (value) {
var stringed = value.toString();
if (stringed === 'NaN') value = 'NaN';
}
var valEnc = options.valueEncoding;
var obj = {key: key, value: value};
if (value && (!valEnc || valEnc !== 'binary')) {
if (typeof obj.value !== 'object') {
obj.value = stringed;
}
}
return obj
};
Level.prototype.iterator = function (options) {
if (typeof options !== 'object') options = {};
return new iterator(this.idb, options)
};
Level.prototype._batch = function (array, options, callback) {
var i;
var k;
var copiedOp;
var currentOp;
var modified = [];
if (array.length === 0) return setTimeout(callback, 0)
for (i = 0; i < array.length; i++) {
copiedOp = {};
currentOp = array[i];
modified[i] = copiedOp;
var converted = this.convertEncoding(currentOp.key, currentOp.value, options);
currentOp.key = converted.key;
currentOp.value = converted.value;
for (k in currentOp) {
if (k === 'type' && currentOp[k] == 'del') {
copiedOp[k] = 'remove';
} else {
copiedOp[k] = currentOp[k];
}
}
}
return this.idb.batch(modified, function(){ callback(); }, callback)
};
Level.prototype._close = function (callback) {
this.idb.db.close();
callback();
};
Level.prototype._approximateSize = function (start, end, callback) {
var err = new Error('Not implemented');
if (callback)
return callback(err)
throw err
};
Level.prototype._isBuffer = function (obj) {
return Buffer.isBuffer(obj)
};
Level.destroy = function (db, callback) {
if (typeof db === 'object') {
var prefix = db.IDBOptions.storePrefix || 'IDBWrapper-';
var dbname = db.location;
} else {
var prefix = 'IDBWrapper-';
var dbname = db;
}
var request = indexedDB.deleteDatabase(prefix + dbname);
request.onsuccess = function() {
callback();
};
request.onerror = function(err) {
callback(err);
};
};
var checkKeyValue = Level.prototype._checkKeyValue = function (obj, type) {
if (obj === null || obj === undefined)
return new Error(type + ' cannot be `null` or `undefined`')
if (obj === null || obj === undefined)
return new Error(type + ' cannot be `null` or `undefined`')
if (isbuffer(obj) && obj.byteLength === 0)
return new Error(type + ' cannot be an empty ArrayBuffer')
if (String(obj) === '')
return new Error(type + ' cannot be an empty String')
if (obj.length === 0)
return new Error(type + ' cannot be an empty Array')
};
var xtend$2 = extend$2;
function extend$2() {
var target = {};
for (var i = 0; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) {
if (source.hasOwnProperty(key)) {
target[key] = source[key];
}
}
}
return target
}
var prr = createCommonjsModule(function (module) {
/*!
* prr
* (c) 2013 Rod Vagg <rod@vagg.org>
* https://github.com/rvagg/prr
* License: MIT
*/
(function (name, context, definition) {
if (module.exports)
module.exports = definition();
else
context[name] = definition();
})('prr', commonjsGlobal, function() {
var setProperty = typeof Object.defineProperty == 'function'
? function (obj, key, options) {
Object.defineProperty(obj, key, options);
return obj
}
: function (obj, key, options) { // < es5
obj[key] = options.value;
return obj
}
, makeOptions = function (value, options) {
var oo = typeof options == 'object'
, os = !oo && typeof options == 'string'
, op = function (p) {
return oo
? !!options[p]
: os
? options.indexOf(p[0]) > -1
: false
};
return {
enumerable : op('enumerable')
, configurable : op('configurable')
, writable : op('writable')
, value : value
}
}
, prr = function (obj, key, value, options) {
var k;
options = makeOptions(value, options);
if (typeof key == 'object') {
for (k in key) {
if (Object.hasOwnProperty.call(key, k)) {
options.value = key[k];
setProperty(obj, k, options);
}
}
return obj
}
return setProperty(obj, key, options)
};
return prr
});
});
var AbstractLevelDOWN$2 = abstractLeveldown.AbstractLevelDOWN;
function DeferredLevelDOWN (location) {
AbstractLevelDOWN$2.call(this, typeof location == 'string' ? location : ''); // optional location, who cares?
this._db = undefined;
this._operations = [];
}
util$2.inherits(DeferredLevelDOWN, AbstractLevelDOWN$2);
// called by LevelUP when we have a real DB to take its place
DeferredLevelDOWN.prototype.setDb = function (db) {
this._db = db;
this._operations.forEach(function (op) {
db[op.method].apply(db, op.args);
});
};
DeferredLevelDOWN.prototype._open = function (options, callback) {
return process.nextTick(callback)
};
// queue a new deferred operation
DeferredLevelDOWN.prototype._operation = function (method, args) {
if (this._db)
return this._db[method].apply(this._db, args)
this._operations.push({ method: method, args: args });
};
// deferrables
'put get del batch approximateSize'.split(' ').forEach(function (m) {
DeferredLevelDOWN.prototype['_' + m] = function () {
this._operation(m, arguments);
};
});
DeferredLevelDOWN.prototype._isBuffer = function (obj) {
return Buffer.isBuffer(obj)
};
// don't need to implement this as LevelUP's ReadStream checks for 'ready' state
DeferredLevelDOWN.prototype._iterator = function () {
throw new TypeError('not implemented')
};
var deferredLeveldown = DeferredLevelDOWN;
var prr$1 = createCommonjsModule(function (module) {
/*!
* prr
* (c) 2013 Rod Vagg <rod@vagg.org>
* https://github.com/rvagg/prr
* License: MIT
*/
(function (name, context, definition) {
if (module.exports)
module.exports = definition();
else
context[name] = definition();
})('prr', commonjsGlobal, function() {
var setProperty = typeof Object.defineProperty == 'function'
? function (obj, key, options) {
Object.defineProperty(obj, key, options);
return obj
}
: function (obj, key, options) { // < es5
obj[key] = options.value;
return obj
}
, makeOptions = function (value, options) {
var oo = typeof options == 'object'
, os = !oo && typeof options == 'string'
, op = function (p) {
return oo
? !!options[p]
: os
? options.indexOf(p[0]) > -1
: false
};
return {
enumerable : op('enumerable')
, configurable : op('configurable')
, writable : op('writable')
, value : value
}
}
, prr = function (obj, key, value, options) {
var k;
options = makeOptions(value, options);
if (typeof key == 'object') {
for (k in key) {
if (Object.hasOwnProperty.call(key, k)) {
options.value = key[k];
setProperty(obj, k, options);
}
}
return obj
}
return setProperty(obj, key, options)
};
return prr
});
});
function init (type, message, cause) {
if (!!message && typeof message != 'string') {
message = message.message || message.name;
}
prr$1(this, {
type : type
, name : type
// can be passed just a 'cause'
, cause : typeof message != 'string' ? message : cause
, message : message
}, 'ewr');
}
// generic prototype, not intended to be actually used - helpful for `instanceof`
function CustomError (message, cause) {
Error.call(this);
if (Error.captureStackTrace)
Error.captureStackTrace(this, this.constructor);
init.call(this, 'CustomError', message, cause);
}
CustomError.prototype = new Error();
function createError (errno, type, proto) {
var err = function (message, cause) {
init.call(this, type, message, cause);
//TODO: the specificity here is stupid, errno should be available everywhere
if (type == 'FilesystemError') {
this.code = this.cause.code;
this.path = this.cause.path;
this.errno = this.cause.errno;
this.message =
(errno.errno[this.cause.errno]
? errno.errno[this.cause.errno].description
: this.cause.message)
+ (this.cause.path ? ' [' + this.cause.path + ']' : '');
}
Error.call(this);
if (Error.captureStackTrace)
Error.captureStackTrace(this, err);
};
err.prototype = !!proto ? new proto() : new CustomError();
return err
}
var custom = function (errno) {
var ce = function (type, proto) {
return createError(errno, type, proto)
};
return {
CustomError : CustomError
, FilesystemError : ce('FilesystemError')
, createError : ce
}
};
var errno = createCommonjsModule(function (module) {
var all = module.exports.all = [
{
errno: -2,
code: 'ENOENT',
description: 'no such file or directory'
},
{
errno: -1,
code: 'UNKNOWN',
description: 'unknown error'
},
{
errno: 0,
code: 'OK',
description: 'success'
},
{
errno: 1,
code: 'EOF',
description: 'end of file'
},
{
errno: 2,
code: 'EADDRINFO',
description: 'getaddrinfo error'
},
{
errno: 3,
code: 'EACCES',
description: 'permission denied'
},
{
errno: 4,
code: 'EAGAIN',
description: 'resource temporarily unavailable'
},
{
errno: 5,
code: 'EADDRINUSE',
description: 'address already in use'
},
{
errno: 6,
code: 'EADDRNOTAVAIL',
description: 'address not available'
},
{
errno: 7,
code: 'EAFNOSUPPORT',
description: 'address family not supported'
},
{
errno: 8,
code: 'EALREADY',
description: 'connection already in progress'
},
{
errno: 9,
code: 'EBADF',
description: 'bad file descriptor'
},
{
errno: 10,
code: 'EBUSY',
description: 'resource busy or locked'
},
{
errno: 11,
code: 'ECONNABORTED',
description: 'software caused connection abort'
},
{
errno: 12,
code: 'ECONNREFUSED',
description: 'connection refused'
},
{
errno: 13,
code: 'ECONNRESET',
description: 'connection reset by peer'
},
{
errno: 14,
code: 'EDESTADDRREQ',
description: 'destination address required'
},
{
errno: 15,
code: 'EFAULT',
description: 'bad address in system call argument'
},
{
errno: 16,
code: 'EHOSTUNREACH',
description: 'host is unreachable'
},
{
errno: 17,
code: 'EINTR',
description: 'interrupted system call'
},
{
errno: 18,
code: 'EINVAL',
description: 'invalid argument'
},
{
errno: 19,
code: 'EISCONN',
description: 'socket is already connected'
},
{
errno: 20,
code: 'EMFILE',
description: 'too many open files'
},
{
errno: 21,
code: 'EMSGSIZE',
description: 'message too long'
},
{
errno: 22,
code: 'ENETDOWN',
description: 'network is down'
},
{
errno: 23,
code: 'ENETUNREACH',
description: 'network is unreachable'
},
{
errno: 24,
code: 'ENFILE',
description: 'file table overflow'
},
{
errno: 25,
code: 'ENOBUFS',
description: 'no buffer space available'
},
{
errno: 26,
code: 'ENOMEM',
description: 'not enough memory'
},
{
errno: 27,
code: 'ENOTDIR',
description: 'not a directory'
},
{
errno: 28,
code: 'EISDIR',
description: 'illegal operation on a directory'
},
{
errno: 29,
code: 'ENONET',
description: 'machine is not on the network'
},
{
errno: 31,
code: 'ENOTCONN',
description: 'socket is not connected'
},
{
errno: 32,
code: 'ENOTSOCK',
description: 'socket operation on non-socket'
},
{
errno: 33,
code: 'ENOTSUP',
description: 'operation not supported on socket'
},
{
errno: 34,
code: 'ENOENT',
description: 'no such file or directory'
},
{
errno: 35,
code: 'ENOSYS',
description: 'function not implemented'
},
{
errno: 36,
code: 'EPIPE',
description: 'broken pipe'
},
{
errno: 37,
code: 'EPROTO',
description: 'protocol error'
},
{
errno: 38,
code: 'EPROTONOSUPPORT',
description: 'protocol not supported'
},
{
errno: 39,
code: 'EPROTOTYPE',
description: 'protocol wrong type for socket'
},
{
errno: 40,
code: 'ETIMEDOUT',
description: 'connection timed out'
},
{
errno: 41,
code: 'ECHARSET',
description: 'invalid Unicode character'
},
{
errno: 42,
code: 'EAIFAMNOSUPPORT',
description: 'address family for hostname not supported'
},
{
errno: 44,
code: 'EAISERVICE',
description: 'servname not supported for ai_socktype'
},
{
errno: 45,
code: 'EAISOCKTYPE',
description: 'ai_socktype not supported'
},
{
errno: 46,
code: 'ESHUTDOWN',
description: 'cannot send after transport endpoint shutdown'
},
{
errno: 47,
code: 'EEXIST',
description: 'file already exists'
},
{
errno: 48,
code: 'ESRCH',
description: 'no such process'
},
{
errno: 49,
code: 'ENAMETOOLONG',
description: 'name too long'
},
{
errno: 50,
code: 'EPERM',
description: 'operation not permitted'
},
{
errno: 51,
code: 'ELOOP',
description: 'too many symbolic links encountered'
},
{
errno: 52,
code: 'EXDEV',
description: 'cross-device link not permitted'
},
{
errno: 53,
code: 'ENOTEMPTY',
description: 'directory not empty'
},
{
errno: 54,
code: 'ENOSPC',
description: 'no space left on device'
},
{
errno: 55,
code: 'EIO',
description: 'i/o error'
},
{
errno: 56,
code: 'EROFS',
description: 'read-only file system'
},
{
errno: 57,
code: 'ENODEV',
description: 'no such device'
},
{
errno: 58,
code: 'ESPIPE',
description: 'invalid seek'
},
{
errno: 59,
code: 'ECANCELED',
description: 'operation canceled'
}
];
module.exports.errno = {};
module.exports.code = {};
all.forEach(function (error) {
module.exports.errno[error.errno] = error;
module.exports.code[error.code] = error;
});
module.exports.custom = custom(module.exports);
module.exports.create = module.exports.custom.createError;
});
var errno_1 = errno.all;
var errno_2 = errno.errno;
var errno_3 = errno.code;
var errno_4 = errno.custom;
var errno_5 = errno.create;
/* Copyright (c) 2012-2014 LevelUP contributors
* See list at <https://github.com/rvagg/node-levelup#contributing>
* MIT License
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md>
*/
var createError$1 = errno.create
, LevelUPError = createError$1('LevelUPError')
, NotFoundError = createError$1('NotFoundError', LevelUPError);
NotFoundError.prototype.notFound = true;
NotFoundError.prototype.status = 404;
var errors = {
LevelUPError : LevelUPError
, InitializationError : createError$1('InitializationError', LevelUPError)
, OpenError : createError$1('OpenError', LevelUPError)
, ReadError : createError$1('ReadError', LevelUPError)
, WriteError : createError$1('WriteError', LevelUPError)
, NotFoundError : NotFoundError
, EncodingError : createError$1('EncodingError', LevelUPError)
};
var isarray = Array.isArray || function (arr) {
return Object.prototype.toString.call(arr) == '[object Array]';
};
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// NOTE: These type checking functions intentionally don't use `instanceof`
// because it is fragile and can be easily faked with `Object.create()`.
function isArray(arg) {
if (Array.isArray) {
return Array.isArray(arg);
}
return objectToString(arg) === '[object Array]';
}
var isArray_1 = isArray;
function isBoolean(arg) {
return typeof arg === 'boolean';
}
var isBoolean_1 = isBoolean;
function isNull(arg) {
return arg === null;
}
var isNull_1 = isNull;
function isNullOrUndefined(arg) {
return arg == null;
}
var isNullOrUndefined_1 = isNullOrUndefined;
function isNumber(arg) {
return typeof arg === 'number';
}
var isNumber_1 = isNumber;
function isString(arg) {
return typeof arg === 'string';
}
var isString_1 = isString;
function isSymbol(arg) {
return typeof arg === 'symbol';
}
var isSymbol_1 = isSymbol;
function isUndefined(arg) {
return arg === void 0;
}
var isUndefined_1 = isUndefined;
function isRegExp(re) {
return objectToString(re) === '[object RegExp]';
}
var isRegExp_1 = isRegExp;
function isObject(arg) {
return typeof arg === 'object' && arg !== null;
}
var isObject_1 = isObject;
function isDate(d) {
return objectToString(d) === '[object Date]';
}
var isDate_1 = isDate;
function isError(e) {
return (objectToString(e) === '[object Error]' || e instanceof Error);
}
var isError_1 = isError;
function isFunction$1(arg) {
return typeof arg === 'function';
}
var isFunction_1 = isFunction$1;
function isPrimitive(arg) {
return arg === null ||
typeof arg === 'boolean' ||
typeof arg === 'number' ||
typeof arg === 'string' ||
typeof arg === 'symbol' || // ES6 symbol
typeof arg === 'undefined';
}
var isPrimitive_1 = isPrimitive;
var isBuffer$1 = Buffer.isBuffer;
function objectToString(o) {
return Object.prototype.toString.call(o);
}
var util = {
isArray: isArray_1,
isBoolean: isBoolean_1,
isNull: isNull_1,
isNullOrUndefined: isNullOrUndefined_1,
isNumber: isNumber_1,
isString: isString_1,
isSymbol: isSymbol_1,
isUndefined: isUndefined_1,
isRegExp: isRegExp_1,
isObject: isObject_1,
isDate: isDate_1,
isError: isError_1,
isFunction: isFunction_1,
isPrimitive: isPrimitive_1,
isBuffer: isBuffer$1
};
var inherits_browser = createCommonjsModule(function (module) {
if (typeof Object.create === 'function') {
// implementation from standard node.js 'util' module
module.exports = function inherits(ctor, superCtor) {
ctor.super_ = superCtor;
ctor.prototype = Object.create(superCtor.prototype, {
constructor: {
value: ctor,
enumerable: false,
writable: true,
configurable: true
}
});
};
} else {
// old school shim for old browsers
module.exports = function inherits(ctor, superCtor) {
ctor.super_ = superCtor;
var TempCtor = function () {};
TempCtor.prototype = superCtor.prototype;
ctor.prototype = new TempCtor();
ctor.prototype.constructor = ctor;
};
}
});
var string_decoder = createCommonjsModule(function (module, exports) {
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var Buffer = buffer.Buffer;
var isBufferEncoding = Buffer.isEncoding
|| function(encoding) {
switch (encoding && encoding.toLowerCase()) {
case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true;
default: return false;
}
};
function assertEncoding(encoding) {
if (encoding && !isBufferEncoding(encoding)) {
throw new Error('Unknown encoding: ' + encoding);
}
}
// StringDecoder provides an interface for efficiently splitting a series of
// buffers into a series of JS strings without breaking apart multi-byte
// characters. CESU-8 is handled as part of the UTF-8 encoding.
//
// @TODO Handling all encodings inside a single object makes it very difficult
// to reason about this code, so it should be split up in the future.
// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code
// points as used by CESU-8.
var StringDecoder = exports.StringDecoder = function(encoding) {
this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, '');
assertEncoding(encoding);
switch (this.encoding) {
case 'utf8':
// CESU-8 represents each of Surrogate Pair by 3-bytes
this.surrogateSize = 3;
break;
case 'ucs2':
case 'utf16le':
// UTF-16 represents each of Surrogate Pair by 2-bytes
this.surrogateSize = 2;
this.detectIncompleteChar = utf16DetectIncompleteChar;
break;
case 'base64':
// Base-64 stores 3 bytes in 4 chars, and pads the remainder.
this.surrogateSize = 3;
this.detectIncompleteChar = base64DetectIncompleteChar;
break;
default:
this.write = passThroughWrite;
return;
}
// Enough space to store all bytes of a single character. UTF-8 needs 4
// bytes, but CESU-8 may require up to 6 (3 bytes per surrogate).
this.charBuffer = new Buffer(6);
// Number of bytes received for the current incomplete multi-byte character.
this.charReceived = 0;
// Number of bytes expected for the current incomplete multi-byte character.
this.charLength = 0;
};
// write decodes the given buffer and returns it as JS string that is
// guaranteed to not contain any partial multi-byte characters. Any partial
// character found at the end of the buffer is buffered up, and will be
// returned when calling write again with the remaining bytes.
//
// Note: Converting a Buffer containing an orphan surrogate to a String
// currently works, but converting a String to a Buffer (via `new Buffer`, or
// Buffer#write) will replace incomplete surrogates with the unicode
// replacement character. See https://codereview.chromium.org/121173009/ .
StringDecoder.prototype.write = function(buffer) {
var charStr = '';
// if our last write ended with an incomplete multibyte character
while (this.charLength) {
// determine how many remaining bytes this buffer has to offer for this char
var available = (buffer.length >= this.charLength - this.charReceived) ?
this.charLength - this.charReceived :
buffer.length;
// add the new bytes to the char buffer
buffer.copy(this.charBuffer, this.charReceived, 0, available);
this.charReceived += available;
if (this.charReceived < this.charLength) {
// still not enough chars in this buffer? wait for more ...
return '';
}
// remove bytes belonging to the current character from the buffer
buffer = buffer.slice(available, buffer.length);
// get the character that was split
charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding);
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
var charCode = charStr.charCodeAt(charStr.length - 1);
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
this.charLength += this.surrogateSize;
charStr = '';
continue;
}
this.charReceived = this.charLength = 0;
// if there are no more bytes in this buffer, just emit our char
if (buffer.length === 0) {
return charStr;
}
break;
}
// determine and set charLength / charReceived
this.detectIncompleteChar(buffer);
var end = buffer.length;
if (this.charLength) {
// buffer the incomplete character bytes we got
buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end);
end -= this.charReceived;
}
charStr += buffer.toString(this.encoding, 0, end);
var end = charStr.length - 1;
var charCode = charStr.charCodeAt(end);
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
var size = this.surrogateSize;
this.charLength += size;
this.charReceived += size;
this.charBuffer.copy(this.charBuffer, size, 0, size);
buffer.copy(this.charBuffer, 0, 0, size);
return charStr.substring(0, end);
}
// or just emit the charStr
return charStr;
};
// detectIncompleteChar determines if there is an incomplete UTF-8 character at
// the end of the given buffer. If so, it sets this.charLength to the byte
// length that character, and sets this.charReceived to the number of bytes
// that are available for this character.
StringDecoder.prototype.detectIncompleteChar = function(buffer) {
// determine how many bytes we have to check at the end of this buffer
var i = (buffer.length >= 3) ? 3 : buffer.length;
// Figure out if one of the last i bytes of our buffer announces an
// incomplete char.
for (; i > 0; i--) {
var c = buffer[buffer.length - i];
// See http://en.wikipedia.org/wiki/UTF-8#Description
// 110XXXXX
if (i == 1 && c >> 5 == 0x06) {
this.charLength = 2;
break;
}
// 1110XXXX
if (i <= 2 && c >> 4 == 0x0E) {
this.charLength = 3;
break;
}
// 11110XXX
if (i <= 3 && c >> 3 == 0x1E) {
this.charLength = 4;
break;
}
}
this.charReceived = i;
};
StringDecoder.prototype.end = function(buffer) {
var res = '';
if (buffer && buffer.length)
res = this.write(buffer);
if (this.charReceived) {
var cr = this.charReceived;
var buf = this.charBuffer;
var enc = this.encoding;
res += buf.slice(0, cr).toString(enc);
}
return res;
};
function passThroughWrite(buffer) {
return buffer.toString(this.encoding);
}
function utf16DetectIncompleteChar(buffer) {
this.charReceived = buffer.length % 2;
this.charLength = this.charReceived ? 2 : 0;
}
function base64DetectIncompleteChar(buffer) {
this.charReceived = buffer.length % 3;
this.charLength = this.charReceived ? 3 : 0;
}
});
var string_decoder_1 = string_decoder.StringDecoder;
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var _stream_readable = Readable;
/*<replacement>*/
/*</replacement>*/
/*<replacement>*/
var Buffer$2 = buffer.Buffer;
/*</replacement>*/
Readable.ReadableState = ReadableState;
var EE = events.EventEmitter;
/*<replacement>*/
if (!EE.listenerCount) EE.listenerCount = function(emitter, type) {
return emitter.listeners(type).length;
};
/*</replacement>*/
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
var StringDecoder;
util.inherits(Readable, stream);
function ReadableState(options, stream) {
options = options || {};
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.buffer = [];
this.length = 0;
this.pipes = null;
this.pipesCount = 0;
this.flowing = false;
this.ended = false;
this.endEmitted = false;
this.reading = false;
// In streams that never have any data, and do push(null) right away,
// the consumer can miss the 'end' event if they do some I/O before
// consuming the stream. So, we don't emit('end') until some reading
// happens.
this.calledRead = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, becuase any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// whenever we return null, then we set a flag to say
// that we're awaiting a 'readable' event emission.
this.needReadable = false;
this.emittedReadable = false;
this.readableListening = false;
// object stream flag. Used to make read(n) ignore n and to
// make all the buffer merging and length checks go away
this.objectMode = !!options.objectMode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// when piping, we only care about 'readable' events that happen
// after read()ing all the bytes and not getting any pushback.
this.ranOut = false;
// the number of writers that are awaiting a drain event in .pipe()s
this.awaitDrain = 0;
// if true, a maybeReadMore has been scheduled
this.readingMore = false;
this.decoder = null;
this.encoding = null;
if (options.encoding) {
if (!StringDecoder)
StringDecoder = string_decoder.StringDecoder;
this.decoder = new StringDecoder(options.encoding);
this.encoding = options.encoding;
}
}
function Readable(options) {
if (!(this instanceof Readable))
return new Readable(options);
this._readableState = new ReadableState(options, this);
// legacy
this.readable = true;
stream.call(this);
}
// Manually shove something into the read() buffer.
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
Readable.prototype.push = function(chunk, encoding) {
var state = this._readableState;
if (typeof chunk === 'string' && !state.objectMode) {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
chunk = new Buffer$2(chunk, encoding);
encoding = '';
}
}
return readableAddChunk(this, state, chunk, encoding, false);
};
// Unshift should *always* be something directly out of read()
Readable.prototype.unshift = function(chunk) {
var state = this._readableState;
return readableAddChunk(this, state, chunk, '', true);
};
function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var er = chunkInvalid(state, chunk);
if (er) {
stream.emit('error', er);
} else if (chunk === null || chunk === undefined) {
state.reading = false;
if (!state.ended)
onEofChunk(stream, state);
} else if (state.objectMode || chunk && chunk.length > 0) {
if (state.ended && !addToFront) {
var e = new Error('stream.push() after EOF');
stream.emit('error', e);
} else if (state.endEmitted && addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
if (state.decoder && !addToFront && !encoding)
chunk = state.decoder.write(chunk);
// update the buffer info.
state.length += state.objectMode ? 1 : chunk.length;
if (addToFront) {
state.buffer.unshift(chunk);
} else {
state.reading = false;
state.buffer.push(chunk);
}
if (state.needReadable)
emitReadable(stream);
maybeReadMore(stream, state);
}
} else if (!addToFront) {
state.reading = false;
}
return needMoreData(state);
}
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
// such as the repl. Also, if the push() triggered a
// readable event, and the user called read(largeNumber) such that
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData(state) {
return !state.ended &&
(state.needReadable ||
state.length < state.highWaterMark ||
state.length === 0);
}
// backwards compatibility.
Readable.prototype.setEncoding = function(enc) {
if (!StringDecoder)
StringDecoder = string_decoder.StringDecoder;
this._readableState.decoder = new StringDecoder(enc);
this._readableState.encoding = enc;
};
// Don't raise the hwm > 128MB
var MAX_HWM = 0x800000;
function roundUpToNextPowerOf2(n) {
if (n >= MAX_HWM) {
n = MAX_HWM;
} else {
// Get the next highest power of 2
n--;
for (var p = 1; p < 32; p <<= 1) n |= n >> p;
n++;
}
return n;
}
function howMuchToRead(n, state) {
if (state.length === 0 && state.ended)
return 0;
if (state.objectMode)
return n === 0 ? 0 : 1;
if (n === null || isNaN(n)) {
// only flow one buffer at a time
if (state.flowing && state.buffer.length)
return state.buffer[0].length;
else
return state.length;
}
if (n <= 0)
return 0;
// If we're asking for more than the target buffer level,
// then raise the water mark. Bump up to the next highest
// power of 2, to prevent increasing it excessively in tiny
// amounts.
if (n > state.highWaterMark)
state.highWaterMark = roundUpToNextPowerOf2(n);
// don't have that much. return null, unless we've ended.
if (n > state.length) {
if (!state.ended) {
state.needReadable = true;
return 0;
} else
return state.length;
}
return n;
}
// you can override either this method, or the async _read(n) below.
Readable.prototype.read = function(n) {
var state = this._readableState;
state.calledRead = true;
var nOrig = n;
var ret;
if (typeof n !== 'number' || n > 0)
state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
if (n === 0 &&
state.needReadable &&
(state.length >= state.highWaterMark || state.ended)) {
emitReadable(this);
return null;
}
n = howMuchToRead(n, state);
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
ret = null;
// In cases where the decoder did not receive enough data
// to produce a full chunk, then immediately received an
// EOF, state.buffer will contain [<Buffer >, <Buffer 00 ...>].
// howMuchToRead will see this and coerce the amount to
// read to zero (because it's looking at the length of the
// first <Buffer > in state.buffer), and we'll end up here.
//
// This can only happen via state.decoder -- no other venue
// exists for pushing a zero-length chunk into state.buffer
// and triggering this behavior. In this case, we return our
// remaining data and end the stream, if appropriate.
if (state.length > 0 && state.decoder) {
ret = fromList(n, state);
state.length -= ret.length;
}
if (state.length === 0)
endReadable(this);
return ret;
}
// All the actual chunk generation logic needs to be
// *below* the call to _read. The reason is that in certain
// synthetic stream cases, such as passthrough streams, _read
// may be a completely synchronous operation which may change
// the state of the read buffer, providing enough data when
// before there was *not* enough.
//
// So, the steps are:
// 1. Figure out what the state of things will be after we do
// a read from the buffer.
//
// 2. If that resulting state will trigger a _read, then call _read.
// Note that this may be asynchronous, or synchronous. Yes, it is
// deeply ugly to write APIs this way, but that still doesn't mean
// that the Readable class should behave improperly, as streams are
// designed to be sync/async agnostic.
// Take note if the _read call is sync or async (ie, if the read call
// has returned yet), so that we know whether or not it's safe to emit
// 'readable' etc.
//
// 3. Actually pull the requested chunks out of the buffer and return.
// if we need a readable event, then we need to do some reading.
var doRead = state.needReadable;
// if we currently have less than the highWaterMark, then also read some
if (state.length - n <= state.highWaterMark)
doRead = true;
// however, if we've ended, then there's no point, and if we're already
// reading, then it's unnecessary.
if (state.ended || state.reading)
doRead = false;
if (doRead) {
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
if (state.length === 0)
state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
}
// If _read called its callback synchronously, then `reading`
// will be false, and we need to re-evaluate how much data we
// can return to the user.
if (doRead && !state.reading)
n = howMuchToRead(nOrig, state);
if (n > 0)
ret = fromList(n, state);
else
ret = null;
if (ret === null) {
state.needReadable = true;
n = 0;
}
state.length -= n;
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
if (state.length === 0 && !state.ended)
state.needReadable = true;
// If we happened to read() exactly the remaining amount in the
// buffer, and the EOF has been seen at this point, then make sure
// that we emit 'end' on the very next tick.
if (state.ended && !state.endEmitted && state.length === 0)
endReadable(this);
return ret;
};
function chunkInvalid(state, chunk) {
var er = null;
if (!Buffer$2.isBuffer(chunk) &&
'string' !== typeof chunk &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
function onEofChunk(stream, state) {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length) {
state.buffer.push(chunk);
state.length += state.objectMode ? 1 : chunk.length;
}
}
state.ended = true;
// if we've ended and we have some data left, then emit
// 'readable' now to make sure it gets picked up.
if (state.length > 0)
emitReadable(stream);
else
endReadable(stream);
}
// Don't emit readable right away in sync mode, because this can trigger
// another read() call => stack overflow. This way, it might trigger
// a nextTick recursion warning, but that's not so bad.
function emitReadable(stream) {
var state = stream._readableState;
state.needReadable = false;
if (state.emittedReadable)
return;
state.emittedReadable = true;
if (state.sync)
process.nextTick(function() {
emitReadable_(stream);
});
else
emitReadable_(stream);
}
function emitReadable_(stream) {
stream.emit('readable');
}
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
// it's in progress.
// However, if we're not ended, or reading, and the length < hwm,
// then go ahead and try to read some more preemptively.
function maybeReadMore(stream, state) {
if (!state.readingMore) {
state.readingMore = true;
process.nextTick(function() {
maybeReadMore_(stream, state);
});
}
}
function maybeReadMore_(stream, state) {
var len = state.length;
while (!state.reading && !state.flowing && !state.ended &&
state.length < state.highWaterMark) {
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
break;
else
len = state.length;
}
state.readingMore = false;
}
// abstract method. to be overridden in specific implementation classes.
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
Readable.prototype._read = function(n) {
this.emit('error', new Error('not implemented'));
};
Readable.prototype.pipe = function(dest, pipeOpts) {
var src = this;
var state = this._readableState;
switch (state.pipesCount) {
case 0:
state.pipes = dest;
break;
case 1:
state.pipes = [state.pipes, dest];
break;
default:
state.pipes.push(dest);
break;
}
state.pipesCount += 1;
var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
dest !== process.stdout &&
dest !== process.stderr;
var endFn = doEnd ? onend : cleanup;
if (state.endEmitted)
process.nextTick(endFn);
else
src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
if (readable !== src) return;
cleanup();
}
function onend() {
dest.end();
}
// when the dest drains, it reduces the awaitDrain counter
// on the source. This would be more elegant with a .once()
// handler in flow(), but adding and removing repeatedly is
// too slow.
var ondrain = pipeOnDrain(src);
dest.on('drain', ondrain);
function cleanup() {
// cleanup event handlers once the pipe is broken
dest.removeListener('close', onclose);
dest.removeListener('finish', onfinish);
dest.removeListener('drain', ondrain);
dest.removeListener('error', onerror);
dest.removeListener('unpipe', onunpipe);
src.removeListener('end', onend);
src.removeListener('end', cleanup);
// if the reader is waiting for a drain event from this
// specific writer, then it would cause it to never start
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
if (!dest._writableState || dest._writableState.needDrain)
ondrain();
}
// if the dest has an error, then stop piping into it.
// however, don't suppress the throwing behavior for this.
function onerror(er) {
unpipe();
dest.removeListener('error', onerror);
if (EE.listenerCount(dest, 'error') === 0)
dest.emit('error', er);
}
// This is a brutally ugly hack to make sure that our error handler
// is attached before any userland ones. NEVER DO THIS.
if (!dest._events || !dest._events.error)
dest.on('error', onerror);
else if (isarray(dest._events.error))
dest._events.error.unshift(onerror);
else
dest._events.error = [onerror, dest._events.error];
// Both close and finish should trigger unpipe, but only once.
function onclose() {
dest.removeListener('finish', onfinish);
unpipe();
}
dest.once('close', onclose);
function onfinish() {
dest.removeListener('close', onclose);
unpipe();
}
dest.once('finish', onfinish);
function unpipe() {
src.unpipe(dest);
}
// tell the dest that it's being piped to
dest.emit('pipe', src);
// start the flow if it hasn't been started already.
if (!state.flowing) {
// the handler that waits for readable events after all
// the data gets sucked out in flow.
// This would be easier to follow with a .once() handler
// in flow(), but that is too slow.
this.on('readable', pipeOnReadable);
state.flowing = true;
process.nextTick(function() {
flow(src);
});
}
return dest;
};
function pipeOnDrain(src) {
return function() {
var state = src._readableState;
state.awaitDrain--;
if (state.awaitDrain === 0)
flow(src);
};
}
function flow(src) {
var state = src._readableState;
var chunk;
state.awaitDrain = 0;
function write(dest, i, list) {
var written = dest.write(chunk);
if (false === written) {
state.awaitDrain++;
}
}
while (state.pipesCount && null !== (chunk = src.read())) {
if (state.pipesCount === 1)
write(state.pipes);
else
forEach(state.pipes, write);
src.emit('data', chunk);
// if anyone needs a drain, then we have to wait for that.
if (state.awaitDrain > 0)
return;
}
// if every destination was unpiped, either before entering this
// function, or in the while loop, then stop flowing.
//
// NB: This is a pretty rare edge case.
if (state.pipesCount === 0) {
state.flowing = false;
// if there were data event listeners added, then switch to old mode.
if (EE.listenerCount(src, 'data') > 0)
emitDataEvents(src);
return;
}
// at this point, no one needed a drain, so we just ran out of data
// on the next readable event, start it over again.
state.ranOut = true;
}
function pipeOnReadable() {
if (this._readableState.ranOut) {
this._readableState.ranOut = false;
flow(this);
}
}
Readable.prototype.unpipe = function(dest) {
var state = this._readableState;
// if we're not piping anywhere, then do nothing.
if (state.pipesCount === 0)
return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
if (dest && dest !== state.pipes)
return this;
if (!dest)
dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
this.removeListener('readable', pipeOnReadable);
state.flowing = false;
if (dest)
dest.emit('unpipe', this);
return this;
}
// slow case. multiple pipe destinations.
if (!dest) {
// remove all.
var dests = state.pipes;
var len = state.pipesCount;
state.pipes = null;
state.pipesCount = 0;
this.removeListener('readable', pipeOnReadable);
state.flowing = false;
for (var i = 0; i < len; i++)
dests[i].emit('unpipe', this);
return this;
}
// try to find the right one.
var i = indexOf(state.pipes, dest);
if (i === -1)
return this;
state.pipes.splice(i, 1);
state.pipesCount -= 1;
if (state.pipesCount === 1)
state.pipes = state.pipes[0];
dest.emit('unpipe', this);
return this;
};
// set up data events if they are asked for
// Ensure readable listeners eventually get something
Readable.prototype.on = function(ev, fn) {
var res = stream.prototype.on.call(this, ev, fn);
if (ev === 'data' && !this._readableState.flowing)
emitDataEvents(this);
if (ev === 'readable' && this.readable) {
var state = this._readableState;
if (!state.readableListening) {
state.readableListening = true;
state.emittedReadable = false;
state.needReadable = true;
if (!state.reading) {
this.read(0);
} else if (state.length) {
emitReadable(this);
}
}
}
return res;
};
Readable.prototype.addListener = Readable.prototype.on;
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
Readable.prototype.resume = function() {
emitDataEvents(this);
this.read(0);
this.emit('resume');
};
Readable.prototype.pause = function() {
emitDataEvents(this, true);
this.emit('pause');
};
function emitDataEvents(stream$1, startPaused) {
var state = stream$1._readableState;
if (state.flowing) {
// https://github.com/isaacs/readable-stream/issues/16
throw new Error('Cannot switch to old mode now.');
}
var paused = startPaused || false;
var readable = false;
// convert to an old-style stream.
stream$1.readable = true;
stream$1.pipe = stream.prototype.pipe;
stream$1.on = stream$1.addListener = stream.prototype.on;
stream$1.on('readable', function() {
readable = true;
var c;
while (!paused && (null !== (c = stream$1.read())))
stream$1.emit('data', c);
if (c === null) {
readable = false;
stream$1._readableState.needReadable = true;
}
});
stream$1.pause = function() {
paused = true;
this.emit('pause');
};
stream$1.resume = function() {
paused = false;
if (readable)
process.nextTick(function() {
stream$1.emit('readable');
});
else
this.read(0);
this.emit('resume');
};
// now make it start, just in case it hadn't already.
stream$1.emit('readable');
}
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
Readable.prototype.wrap = function(stream) {
var state = this._readableState;
var paused = false;
var self = this;
stream.on('end', function() {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length)
self.push(chunk);
}
self.push(null);
});
stream.on('data', function(chunk) {
if (state.decoder)
chunk = state.decoder.write(chunk);
// don't skip over falsy values in objectMode
//if (state.objectMode && util.isNullOrUndefined(chunk))
if (state.objectMode && (chunk === null || chunk === undefined))
return;
else if (!state.objectMode && (!chunk || !chunk.length))
return;
var ret = self.push(chunk);
if (!ret) {
paused = true;
stream.pause();
}
});
// proxy all the other methods.
// important when wrapping filters and duplexes.
for (var i in stream) {
if (typeof stream[i] === 'function' &&
typeof this[i] === 'undefined') {
this[i] = function(method) { return function() {
return stream[method].apply(stream, arguments);
}}(i);
}
}
// proxy certain important events.
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
forEach(events, function(ev) {
stream.on(ev, self.emit.bind(self, ev));
});
// when we try to consume some more bytes, simply unpause the
// underlying stream.
self._read = function(n) {
if (paused) {
paused = false;
stream.resume();
}
};
return self;
};
// exposed for testing purposes only.
Readable._fromList = fromList;
// Pluck off n bytes from an array of buffers.
// Length is the combined lengths of all the buffers in the list.
function fromList(n, state) {
var list = state.buffer;
var length = state.length;
var stringMode = !!state.decoder;
var objectMode = !!state.objectMode;
var ret;
// nothing in the list, definitely empty.
if (list.length === 0)
return null;
if (length === 0)
ret = null;
else if (objectMode)
ret = list.shift();
else if (!n || n >= length) {
// read it all, truncate the array.
if (stringMode)
ret = list.join('');
else
ret = Buffer$2.concat(list, length);
list.length = 0;
} else {
// read just some of it.
if (n < list[0].length) {
// just take a part of the first list item.
// slice is the same for buffers and strings.
var buf = list[0];
ret = buf.slice(0, n);
list[0] = buf.slice(n);
} else if (n === list[0].length) {
// first list is a perfect match
ret = list.shift();
} else {
// complex case.
// we have enough to cover it, but it spans past the first buffer.
if (stringMode)
ret = '';
else
ret = new Buffer$2(n);
var c = 0;
for (var i = 0, l = list.length; i < l && c < n; i++) {
var buf = list[0];
var cpy = Math.min(n - c, buf.length);
if (stringMode)
ret += buf.slice(0, cpy);
else
buf.copy(ret, c, 0, cpy);
if (cpy < buf.length)
list[0] = buf.slice(cpy);
else
list.shift();
c += cpy;
}
}
}
return ret;
}
function endReadable(stream) {
var state = stream._readableState;
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
if (state.length > 0)
throw new Error('endReadable called on non-empty stream');
if (!state.endEmitted && state.calledRead) {
state.ended = true;
process.nextTick(function() {
// Check that we didn't get one last unshift.
if (!state.endEmitted && state.length === 0) {
state.endEmitted = true;
stream.readable = false;
stream.emit('end');
}
});
}
}
function forEach (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
function indexOf (xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
var _stream_duplex = Duplex;
/*<replacement>*/
var objectKeys$1 = Object.keys || function (obj) {
var keys = [];
for (var key in obj) keys.push(key);
return keys;
};
/*</replacement>*/
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Duplex, _stream_readable);
forEach$1(objectKeys$1(_stream_writable.prototype), function(method) {
if (!Duplex.prototype[method])
Duplex.prototype[method] = _stream_writable.prototype[method];
});
function Duplex(options) {
if (!(this instanceof Duplex))
return new Duplex(options);
_stream_readable.call(this, options);
_stream_writable.call(this, options);
if (options && options.readable === false)
this.readable = false;
if (options && options.writable === false)
this.writable = false;
this.allowHalfOpen = true;
if (options && options.allowHalfOpen === false)
this.allowHalfOpen = false;
this.once('end', onend);
}
// the no-half-open enforcer
function onend() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
if (this.allowHalfOpen || this._writableState.ended)
return;
// no more data can be written.
// But allow more writes to happen in this tick.
process.nextTick(this.end.bind(this));
}
function forEach$1 (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// A bit simpler than readable streams.
// Implement an async ._write(chunk, cb), and it'll handle all
// the drain event emission and buffering.
var _stream_writable = Writable;
/*<replacement>*/
var Buffer$3 = buffer.Buffer;
/*</replacement>*/
Writable.WritableState = WritableState;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Writable, stream);
function WriteReq(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
}
function WritableState(options, stream) {
options = options || {};
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.needDrain = false;
// at the start of calling end()
this.ending = false;
// when end() has been called, and returned
this.ended = false;
// when 'finish' is emitted
this.finished = false;
// should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0;
// a flag to see when we're in the middle of a write.
this.writing = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, becuase any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
this.onwrite = function(er) {
onwrite(stream, er);
};
// the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null;
// the amount that is being written when _write is called.
this.writelen = 0;
this.buffer = [];
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
}
function Writable(options) {
var Duplex = _stream_duplex;
// Writable ctor is applied to Duplexes, though they're not
// instanceof Writable, they're instanceof Readable.
if (!(this instanceof Writable) && !(this instanceof Duplex))
return new Writable(options);
this._writableState = new WritableState(options, this);
// legacy.
this.writable = true;
stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
Writable.prototype.pipe = function() {
this.emit('error', new Error('Cannot pipe. Not readable.'));
};
function writeAfterEnd(stream, state, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
}
// If we get something that is not a buffer, string, null, or undefined,
// and we're not in objectMode, then that's an error.
// Otherwise stream chunks are all considered to be of length=1, and the
// watermarks determine how many objects to keep in the buffer, rather than
// how many bytes or characters.
function validChunk(stream, state, chunk, cb) {
var valid = true;
if (!Buffer$3.isBuffer(chunk) &&
'string' !== typeof chunk &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode) {
var er = new TypeError('Invalid non-string/buffer chunk');
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
valid = false;
}
return valid;
}
Writable.prototype.write = function(chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (Buffer$3.isBuffer(chunk))
encoding = 'buffer';
else if (!encoding)
encoding = state.defaultEncoding;
if (typeof cb !== 'function')
cb = function() {};
if (state.ended)
writeAfterEnd(this, state, cb);
else if (validChunk(this, state, chunk, cb))
ret = writeOrBuffer(this, state, chunk, encoding, cb);
return ret;
};
function decodeChunk(state, chunk, encoding) {
if (!state.objectMode &&
state.decodeStrings !== false &&
typeof chunk === 'string') {
chunk = new Buffer$3(chunk, encoding);
}
return chunk;
}
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer(stream, state, chunk, encoding, cb) {
chunk = decodeChunk(state, chunk, encoding);
if (Buffer$3.isBuffer(chunk))
encoding = 'buffer';
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
if (!ret)
state.needDrain = true;
if (state.writing)
state.buffer.push(new WriteReq(chunk, encoding, cb));
else
doWrite(stream, state, len, chunk, encoding, cb);
return ret;
}
function doWrite(stream, state, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
if (sync)
process.nextTick(function() {
cb(er);
});
else
cb(er);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
}
function onwriteStateUpdate(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
onwriteStateUpdate(state);
if (er)
onwriteError(stream, state, sync, er, cb);
else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish(stream, state);
if (!finished && !state.bufferProcessing && state.buffer.length)
clearBuffer(stream, state);
if (sync) {
process.nextTick(function() {
afterWrite(stream, state, finished, cb);
});
} else {
afterWrite(stream, state, finished, cb);
}
}
}
function afterWrite(stream, state, finished, cb) {
if (!finished)
onwriteDrain(stream, state);
cb();
if (finished)
finishMaybe(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
}
// if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
for (var c = 0; c < state.buffer.length; c++) {
var entry = state.buffer[c];
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite(stream, state, len, chunk, encoding, cb);
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
c++;
break;
}
}
state.bufferProcessing = false;
if (c < state.buffer.length)
state.buffer = state.buffer.slice(c);
else
state.buffer.length = 0;
}
Writable.prototype._write = function(chunk, encoding, cb) {
cb(new Error('not implemented'));
};
Writable.prototype.end = function(chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
cb = chunk;
chunk = null;
encoding = null;
} else if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (typeof chunk !== 'undefined' && chunk !== null)
this.write(chunk, encoding);
// ignore unnecessary end() calls.
if (!state.ending && !state.finished)
endWritable(this, state, cb);
};
function needFinish(stream, state) {
return (state.ending &&
state.length === 0 &&
!state.finished &&
!state.writing);
}
function finishMaybe(stream, state) {
var need = needFinish(stream, state);
if (need) {
state.finished = true;
stream.emit('finish');
}
return need;
}
function endWritable(stream, state, cb) {
state.ending = true;
finishMaybe(stream, state);
if (cb) {
if (state.finished)
process.nextTick(cb);
else
stream.once('finish', cb);
}
state.ended = true;
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
var _stream_transform = Transform;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Transform, _stream_duplex);
function TransformState(options, stream) {
this.afterTransform = function(er, data) {
return afterTransform(stream, er, data);
};
this.needTransform = false;
this.transforming = false;
this.writecb = null;
this.writechunk = null;
}
function afterTransform(stream, er, data) {
var ts = stream._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (!cb)
return stream.emit('error', new Error('no writecb in Transform class'));
ts.writechunk = null;
ts.writecb = null;
if (data !== null && data !== undefined)
stream.push(data);
if (cb)
cb(er);
var rs = stream._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
stream._read(rs.highWaterMark);
}
}
function Transform(options) {
if (!(this instanceof Transform))
return new Transform(options);
_stream_duplex.call(this, options);
var ts = this._transformState = new TransformState(options, this);
// when the writable side finishes, then flush out anything remaining.
var stream = this;
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
this.once('finish', function() {
if ('function' === typeof this._flush)
this._flush(function(er) {
done(stream, er);
});
else
done(stream);
});
}
Transform.prototype.push = function(chunk, encoding) {
this._transformState.needTransform = false;
return _stream_duplex.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform.prototype._transform = function(chunk, encoding, cb) {
throw new Error('not implemented');
};
Transform.prototype._write = function(chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform ||
rs.needReadable ||
rs.length < rs.highWaterMark)
this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform.prototype._read = function(n) {
var ts = this._transformState;
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
function done(stream, er) {
if (er)
return stream.emit('error', er);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
var rs = stream._readableState;
var ts = stream._transformState;
if (ws.length)
throw new Error('calling transform done when ws.length != 0');
if (ts.transforming)
throw new Error('calling transform done when still transforming');
return stream.push(null);
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
var _stream_passthrough = PassThrough;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(PassThrough, _stream_transform);
function PassThrough(options) {
if (!(this instanceof PassThrough))
return new PassThrough(options);
_stream_transform.call(this, options);
}
PassThrough.prototype._transform = function(chunk, encoding, cb) {
cb(null, chunk);
};
var readable = createCommonjsModule(function (module, exports) {
// hack to fix a circular dependency issue when used with browserify
exports = module.exports = _stream_readable;
exports.Stream = stream;
exports.Readable = exports;
exports.Writable = _stream_writable;
exports.Duplex = _stream_duplex;
exports.Transform = _stream_transform;
exports.PassThrough = _stream_passthrough;
if (!process.browser && process.env.READABLE_STREAM === 'disable') {
module.exports = stream;
}
});
var readable_1 = readable.Stream;
var readable_2 = readable.Readable;
var readable_3 = readable.Writable;
var readable_4 = readable.Duplex;
var readable_5 = readable.Transform;
var readable_6 = readable.PassThrough;
var name = "levelup";
var description = "Fast & simple storage - a Node.js-style LevelDB wrapper";
var version = "0.18.6";
var contributors = [
"Rod Vagg <r@va.gg> (https://github.com/rvagg)",
"John Chesley <john@chesl.es> (https://github.com/chesles/)",
"Jake Verbaten <raynos2@gmail.com> (https://github.com/raynos)",
"Dominic Tarr <dominic.tarr@gmail.com> (https://github.com/dominictarr)",
"Max Ogden <max@maxogden.com> (https://github.com/maxogden)",
"Lars-Magnus Skog <lars.magnus.skog@gmail.com> (https://github.com/ralphtheninja)",
"David Björklund <david.bjorklund@gmail.com> (https://github.com/kesla)",
"Julian Gruber <julian@juliangruber.com> (https://github.com/juliangruber)",
"Paolo Fragomeni <paolo@async.ly> (https://github.com/hij1nx)",
"Anton Whalley <anton.whalley@nearform.com> (https://github.com/No9)",
"Matteo Collina <matteo.collina@gmail.com> (https://github.com/mcollina)",
"Pedro Teixeira <pedro.teixeira@gmail.com> (https://github.com/pgte)",
"James Halliday <mail@substack.net> (https://github.com/substack)"
];
var repository = {
type: "git",
url: "https://github.com/rvagg/node-levelup.git"
};
var homepage = "https://github.com/rvagg/node-levelup";
var keywords = [
"leveldb",
"stream",
"database",
"db",
"store",
"storage",
"json"
];
var main = "lib/levelup.js";
var dependencies = {
bl: "~0.8.1",
"deferred-leveldown": "~0.2.0",
errno: "~0.1.1",
prr: "~0.0.0",
"readable-stream": "~1.0.26",
semver: "~2.3.1",
xtend: "~3.0.0"
};
var devDependencies = {
leveldown: "~0.10.0",
bustermove: "*",
tap: "*",
referee: "*",
rimraf: "*",
async: "*",
fstream: "*",
tar: "*",
mkfiletree: "*",
readfiletree: "*",
"slow-stream": ">=0.0.4",
delayed: "*",
boganipsum: "*",
du: "*",
memdown: "*",
"msgpack-js": "*"
};
var browser = {
leveldown: false,
"leveldown/package": false,
semver: false
};
var scripts = {
test: "tap test/*-test.js --stderr",
functionaltests: "node ./test/functional/fstream-test.js && node ./test/functional/binary-data-test.js && node ./test/functional/compat-test.js",
alltests: "npm test && npm run-script functionaltests"
};
var license = "MIT";
var _resolved = "https://registry.npmjs.org/levelup/-/levelup-0.18.6.tgz";
var _integrity = "sha1-5qAcsIlhbI7MApHCqb0/DETj5es=";
var _from = "levelup@0.18.6";
var _package = {
name: name,
description: description,
version: version,
contributors: contributors,
repository: repository,
homepage: homepage,
keywords: keywords,
main: main,
dependencies: dependencies,
devDependencies: devDependencies,
browser: browser,
scripts: scripts,
license: license,
_resolved: _resolved,
_integrity: _integrity,
_from: _from
};
var _package$1 = /*#__PURE__*/Object.freeze({
name: name,
description: description,
version: version,
contributors: contributors,
repository: repository,
homepage: homepage,
keywords: keywords,
main: main,
dependencies: dependencies,
devDependencies: devDependencies,
browser: browser,
scripts: scripts,
license: license,
_resolved: _resolved,
_integrity: _integrity,
_from: _from,
'default': _package
});
var require$$4 = {};
var require$$1 = getCjsExportFromNamespace(_package$1);
/* Copyright (c) 2012-2014 LevelUP contributors
* See list at <https://github.com/rvagg/node-levelup#contributing>
* MIT License
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md>
*/
var LevelUPError$1 = errors.LevelUPError
, encodingNames = [
'hex'
, 'utf8'
, 'utf-8'
, 'ascii'
, 'binary'
, 'base64'
, 'ucs2'
, 'ucs-2'
, 'utf16le'
, 'utf-16le'
]
, defaultOptions = {
createIfMissing : true
, errorIfExists : false
, keyEncoding : 'utf8'
, valueEncoding : 'utf8'
, compression : true
}
, leveldown
, encodings = (function () {
function isBinary (data) {
return data === undefined || data === null || Buffer.isBuffer(data)
}
var encodings = {};
encodings.utf8 = encodings['utf-8'] = {
encode : function (data) {
return isBinary(data) ? data : String(data)
}
, decode : function (data) {
return data
}
, buffer : false
, type : 'utf8'
};
encodings.json = {
encode : JSON.stringify
, decode : JSON.parse
, buffer : false
, type : 'json'
};
encodingNames.forEach(function (type) {
if (encodings[type])
return
encodings[type] = {
encode : function (data) {
return isBinary(data) ? data : new Buffer(data, type)
}
, decode : function (buffer) {
return process.browser ? buffer.toString(type) : buffer;
}
, buffer : true
, type : type // useful for debugging purposes
};
});
return encodings
})()
, encodingOpts = (function () {
var eo = {};
encodingNames.forEach(function (e) {
eo[e] = { valueEncoding : e };
});
return eo
}());
function copy (srcdb, dstdb, callback) {
srcdb.readStream()
.pipe(dstdb.writeStream())
.on('close', callback ? callback : function () {})
.on('error', callback ? callback : function (err) { throw err });
}
function getOptions (levelup, options) {
var s = typeof options == 'string'; // just an encoding
if (!s && options && options.encoding && !options.valueEncoding)
options.valueEncoding = options.encoding;
return xtend$2(
(levelup && levelup.options) || {}
, s ? encodingOpts[options] || encodingOpts[defaultOptions.valueEncoding]
: options
)
}
function getLevelDOWN () {
if (leveldown)
return leveldown
var requiredVersion = require$$1.devDependencies.leveldown
, missingLevelDOWNError = 'Could not locate LevelDOWN, try `npm install leveldown`'
, leveldownVersion;
try {
leveldownVersion = require$$4.version;
} catch (e) {
throw new LevelUPError$1(missingLevelDOWNError)
}
if (!require$$4.satisfies(leveldownVersion, requiredVersion)) {
throw new LevelUPError$1(
'Installed version of LevelDOWN ('
+ leveldownVersion
+ ') does not match required version ('
+ requiredVersion
+ ')'
)
}
try {
return leveldown = require$$4
} catch (e) {
throw new LevelUPError$1(missingLevelDOWNError)
}
}
function dispatchError (levelup, error, callback) {
return typeof callback == 'function'
? callback(error)
: levelup.emit('error', error)
}
function getKeyEncoder (options, op) {
var type = ((op && op.keyEncoding) || options.keyEncoding) || 'utf8';
return encodings[type] || type
}
function getValueEncoder (options, op) {
var type = (((op && (op.valueEncoding || op.encoding))
|| options.valueEncoding || options.encoding)) || 'utf8';
return encodings[type] || type
}
function encodeKey (key, options, op) {
return getKeyEncoder(options, op).encode(key)
}
function encodeValue (value, options, op) {
return getValueEncoder(options, op).encode(value)
}
function decodeKey (key, options) {
return getKeyEncoder(options).decode(key)
}
function decodeValue (value, options) {
return getValueEncoder(options).decode(value)
}
function isValueAsBuffer (options, op) {
return getValueEncoder(options, op).buffer
}
function isKeyAsBuffer (options, op) {
return getKeyEncoder(options, op).buffer
}
var util$1 = {
defaultOptions : defaultOptions
, copy : copy
, getOptions : getOptions
, getLevelDOWN : getLevelDOWN
, dispatchError : dispatchError
, encodeKey : encodeKey
, encodeValue : encodeValue
, isValueAsBuffer : isValueAsBuffer
, isKeyAsBuffer : isKeyAsBuffer
, decodeValue : decodeValue
, decodeKey : decodeKey
};
/* Copyright (c) 2012-2014 LevelUP contributors
* See list at <https://github.com/rvagg/node-levelup#contributing>
* MIT License <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md>
*/
// NOTE: we are fixed to readable-stream@1.0.x for now
// for pure Streams2 across Node versions
var Readable$1 = readable.Readable
, inherits = util$2.inherits
, EncodingError = errors.EncodingError
, defaultOptions$1 = { keys: true, values: true }
, makeKeyValueData = function (key, value) {
return {
key: util$1.decodeKey(key, this._options)
, value: util$1.decodeValue(value, this._options)
}
}
, makeKeyData = function (key) {
return util$1.decodeKey(key, this._options)
}
, makeValueData = function (_, value) {
return util$1.decodeValue(value, this._options)
}
, makeNoData = function () { return null };
function ReadStream (options, db, iteratorFactory) {
if (!(this instanceof ReadStream))
return new ReadStream(options, db, iteratorFactory)
Readable$1.call(this, { objectMode: true, highWaterMark: options.highWaterMark });
// purely to keep `db` around until we're done so it's not GCed if the user doesn't keep a ref
this._db = db;
options = this._options = xtend$2(defaultOptions$1, options);
this._keyEncoding = options.keyEncoding || options.encoding;
this._valueEncoding = options.valueEncoding || options.encoding;
if (typeof this._options.start != 'undefined')
this._options.start = util$1.encodeKey(this._options.start, this._options);
if (typeof this._options.end != 'undefined')
this._options.end = util$1.encodeKey(this._options.end, this._options);
if (typeof this._options.limit != 'number')
this._options.limit = -1;
this._options.keyAsBuffer = util$1.isKeyAsBuffer(this._options);
this._options.valueAsBuffer = util$1.isValueAsBuffer(this._options);
this._makeData = this._options.keys && this._options.values
? makeKeyValueData : this._options.keys
? makeKeyData : this._options.values
? makeValueData : makeNoData;
var self = this;
if (!this._db.isOpen()) {
this._db.once('ready', function () {
if (!self._destroyed) {
self._iterator = iteratorFactory(self._options);
}
});
} else
this._iterator = iteratorFactory(this._options);
}
inherits(ReadStream, Readable$1);
ReadStream.prototype._read = function read () {
var self = this;
if (!self._db.isOpen()) {
return self._db.once('ready', function () { read.call(self); })
}
if (self._destroyed)
return
self._iterator.next(function(err, key, value) {
if (err || (key === undefined && value === undefined)) {
if (!err && !self._destroyed)
self.push(null);
return self._cleanup(err)
}
try {
value = self._makeData(key, value);
} catch (e) {
return self._cleanup(new EncodingError(e))
}
if (!self._destroyed)
self.push(value);
});
};
ReadStream.prototype._cleanup = function (err) {
if (this._destroyed)
return
this._destroyed = true;
var self = this;
if (err)
self.emit('error', err);
if (self._iterator) {
self._iterator.end(function () {
self._iterator = null;
self.emit('close');
});
} else {
self.emit('close');
}
};
ReadStream.prototype.destroy = function () {
this._cleanup();
};
ReadStream.prototype.toString = function () {
return 'LevelUP.ReadStream'
};
var readStream = ReadStream;
var isarray$1 = Array.isArray || function (arr) {
return Object.prototype.toString.call(arr) == '[object Array]';
};
var string_decoder$1 = createCommonjsModule(function (module, exports) {
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var Buffer = buffer.Buffer;
var isBufferEncoding = Buffer.isEncoding
|| function(encoding) {
switch (encoding && encoding.toLowerCase()) {
case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true;
default: return false;
}
};
function assertEncoding(encoding) {
if (encoding && !isBufferEncoding(encoding)) {
throw new Error('Unknown encoding: ' + encoding);
}
}
// StringDecoder provides an interface for efficiently splitting a series of
// buffers into a series of JS strings without breaking apart multi-byte
// characters. CESU-8 is handled as part of the UTF-8 encoding.
//
// @TODO Handling all encodings inside a single object makes it very difficult
// to reason about this code, so it should be split up in the future.
// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code
// points as used by CESU-8.
var StringDecoder = exports.StringDecoder = function(encoding) {
this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, '');
assertEncoding(encoding);
switch (this.encoding) {
case 'utf8':
// CESU-8 represents each of Surrogate Pair by 3-bytes
this.surrogateSize = 3;
break;
case 'ucs2':
case 'utf16le':
// UTF-16 represents each of Surrogate Pair by 2-bytes
this.surrogateSize = 2;
this.detectIncompleteChar = utf16DetectIncompleteChar;
break;
case 'base64':
// Base-64 stores 3 bytes in 4 chars, and pads the remainder.
this.surrogateSize = 3;
this.detectIncompleteChar = base64DetectIncompleteChar;
break;
default:
this.write = passThroughWrite;
return;
}
// Enough space to store all bytes of a single character. UTF-8 needs 4
// bytes, but CESU-8 may require up to 6 (3 bytes per surrogate).
this.charBuffer = new Buffer(6);
// Number of bytes received for the current incomplete multi-byte character.
this.charReceived = 0;
// Number of bytes expected for the current incomplete multi-byte character.
this.charLength = 0;
};
// write decodes the given buffer and returns it as JS string that is
// guaranteed to not contain any partial multi-byte characters. Any partial
// character found at the end of the buffer is buffered up, and will be
// returned when calling write again with the remaining bytes.
//
// Note: Converting a Buffer containing an orphan surrogate to a String
// currently works, but converting a String to a Buffer (via `new Buffer`, or
// Buffer#write) will replace incomplete surrogates with the unicode
// replacement character. See https://codereview.chromium.org/121173009/ .
StringDecoder.prototype.write = function(buffer) {
var charStr = '';
// if our last write ended with an incomplete multibyte character
while (this.charLength) {
// determine how many remaining bytes this buffer has to offer for this char
var available = (buffer.length >= this.charLength - this.charReceived) ?
this.charLength - this.charReceived :
buffer.length;
// add the new bytes to the char buffer
buffer.copy(this.charBuffer, this.charReceived, 0, available);
this.charReceived += available;
if (this.charReceived < this.charLength) {
// still not enough chars in this buffer? wait for more ...
return '';
}
// remove bytes belonging to the current character from the buffer
buffer = buffer.slice(available, buffer.length);
// get the character that was split
charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding);
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
var charCode = charStr.charCodeAt(charStr.length - 1);
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
this.charLength += this.surrogateSize;
charStr = '';
continue;
}
this.charReceived = this.charLength = 0;
// if there are no more bytes in this buffer, just emit our char
if (buffer.length === 0) {
return charStr;
}
break;
}
// determine and set charLength / charReceived
this.detectIncompleteChar(buffer);
var end = buffer.length;
if (this.charLength) {
// buffer the incomplete character bytes we got
buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end);
end -= this.charReceived;
}
charStr += buffer.toString(this.encoding, 0, end);
var end = charStr.length - 1;
var charCode = charStr.charCodeAt(end);
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
var size = this.surrogateSize;
this.charLength += size;
this.charReceived += size;
this.charBuffer.copy(this.charBuffer, size, 0, size);
buffer.copy(this.charBuffer, 0, 0, size);
return charStr.substring(0, end);
}
// or just emit the charStr
return charStr;
};
// detectIncompleteChar determines if there is an incomplete UTF-8 character at
// the end of the given buffer. If so, it sets this.charLength to the byte
// length that character, and sets this.charReceived to the number of bytes
// that are available for this character.
StringDecoder.prototype.detectIncompleteChar = function(buffer) {
// determine how many bytes we have to check at the end of this buffer
var i = (buffer.length >= 3) ? 3 : buffer.length;
// Figure out if one of the last i bytes of our buffer announces an
// incomplete char.
for (; i > 0; i--) {
var c = buffer[buffer.length - i];
// See http://en.wikipedia.org/wiki/UTF-8#Description
// 110XXXXX
if (i == 1 && c >> 5 == 0x06) {
this.charLength = 2;
break;
}
// 1110XXXX
if (i <= 2 && c >> 4 == 0x0E) {
this.charLength = 3;
break;
}
// 11110XXX
if (i <= 3 && c >> 3 == 0x1E) {
this.charLength = 4;
break;
}
}
this.charReceived = i;
};
StringDecoder.prototype.end = function(buffer) {
var res = '';
if (buffer && buffer.length)
res = this.write(buffer);
if (this.charReceived) {
var cr = this.charReceived;
var buf = this.charBuffer;
var enc = this.encoding;
res += buf.slice(0, cr).toString(enc);
}
return res;
};
function passThroughWrite(buffer) {
return buffer.toString(this.encoding);
}
function utf16DetectIncompleteChar(buffer) {
this.charReceived = buffer.length % 2;
this.charLength = this.charReceived ? 2 : 0;
}
function base64DetectIncompleteChar(buffer) {
this.charReceived = buffer.length % 3;
this.charLength = this.charReceived ? 3 : 0;
}
});
var string_decoder_1$1 = string_decoder$1.StringDecoder;
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var _stream_readable$1 = Readable$2;
/*<replacement>*/
/*</replacement>*/
/*<replacement>*/
var Buffer$4 = buffer.Buffer;
/*</replacement>*/
Readable$2.ReadableState = ReadableState$1;
var EE$1 = events.EventEmitter;
/*<replacement>*/
if (!EE$1.listenerCount) EE$1.listenerCount = function(emitter, type) {
return emitter.listeners(type).length;
};
/*</replacement>*/
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
var StringDecoder$1;
util.inherits(Readable$2, stream);
function ReadableState$1(options, stream) {
options = options || {};
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.buffer = [];
this.length = 0;
this.pipes = null;
this.pipesCount = 0;
this.flowing = false;
this.ended = false;
this.endEmitted = false;
this.reading = false;
// In streams that never have any data, and do push(null) right away,
// the consumer can miss the 'end' event if they do some I/O before
// consuming the stream. So, we don't emit('end') until some reading
// happens.
this.calledRead = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, becuase any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// whenever we return null, then we set a flag to say
// that we're awaiting a 'readable' event emission.
this.needReadable = false;
this.emittedReadable = false;
this.readableListening = false;
// object stream flag. Used to make read(n) ignore n and to
// make all the buffer merging and length checks go away
this.objectMode = !!options.objectMode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// when piping, we only care about 'readable' events that happen
// after read()ing all the bytes and not getting any pushback.
this.ranOut = false;
// the number of writers that are awaiting a drain event in .pipe()s
this.awaitDrain = 0;
// if true, a maybeReadMore has been scheduled
this.readingMore = false;
this.decoder = null;
this.encoding = null;
if (options.encoding) {
if (!StringDecoder$1)
StringDecoder$1 = string_decoder$1.StringDecoder;
this.decoder = new StringDecoder$1(options.encoding);
this.encoding = options.encoding;
}
}
function Readable$2(options) {
if (!(this instanceof Readable$2))
return new Readable$2(options);
this._readableState = new ReadableState$1(options, this);
// legacy
this.readable = true;
stream.call(this);
}
// Manually shove something into the read() buffer.
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
Readable$2.prototype.push = function(chunk, encoding) {
var state = this._readableState;
if (typeof chunk === 'string' && !state.objectMode) {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
chunk = new Buffer$4(chunk, encoding);
encoding = '';
}
}
return readableAddChunk$1(this, state, chunk, encoding, false);
};
// Unshift should *always* be something directly out of read()
Readable$2.prototype.unshift = function(chunk) {
var state = this._readableState;
return readableAddChunk$1(this, state, chunk, '', true);
};
function readableAddChunk$1(stream, state, chunk, encoding, addToFront) {
var er = chunkInvalid$1(state, chunk);
if (er) {
stream.emit('error', er);
} else if (chunk === null || chunk === undefined) {
state.reading = false;
if (!state.ended)
onEofChunk$1(stream, state);
} else if (state.objectMode || chunk && chunk.length > 0) {
if (state.ended && !addToFront) {
var e = new Error('stream.push() after EOF');
stream.emit('error', e);
} else if (state.endEmitted && addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
if (state.decoder && !addToFront && !encoding)
chunk = state.decoder.write(chunk);
// update the buffer info.
state.length += state.objectMode ? 1 : chunk.length;
if (addToFront) {
state.buffer.unshift(chunk);
} else {
state.reading = false;
state.buffer.push(chunk);
}
if (state.needReadable)
emitReadable$1(stream);
maybeReadMore$1(stream, state);
}
} else if (!addToFront) {
state.reading = false;
}
return needMoreData$1(state);
}
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
// such as the repl. Also, if the push() triggered a
// readable event, and the user called read(largeNumber) such that
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData$1(state) {
return !state.ended &&
(state.needReadable ||
state.length < state.highWaterMark ||
state.length === 0);
}
// backwards compatibility.
Readable$2.prototype.setEncoding = function(enc) {
if (!StringDecoder$1)
StringDecoder$1 = string_decoder$1.StringDecoder;
this._readableState.decoder = new StringDecoder$1(enc);
this._readableState.encoding = enc;
};
// Don't raise the hwm > 128MB
var MAX_HWM$1 = 0x800000;
function roundUpToNextPowerOf2$1(n) {
if (n >= MAX_HWM$1) {
n = MAX_HWM$1;
} else {
// Get the next highest power of 2
n--;
for (var p = 1; p < 32; p <<= 1) n |= n >> p;
n++;
}
return n;
}
function howMuchToRead$1(n, state) {
if (state.length === 0 && state.ended)
return 0;
if (state.objectMode)
return n === 0 ? 0 : 1;
if (n === null || isNaN(n)) {
// only flow one buffer at a time
if (state.flowing && state.buffer.length)
return state.buffer[0].length;
else
return state.length;
}
if (n <= 0)
return 0;
// If we're asking for more than the target buffer level,
// then raise the water mark. Bump up to the next highest
// power of 2, to prevent increasing it excessively in tiny
// amounts.
if (n > state.highWaterMark)
state.highWaterMark = roundUpToNextPowerOf2$1(n);
// don't have that much. return null, unless we've ended.
if (n > state.length) {
if (!state.ended) {
state.needReadable = true;
return 0;
} else
return state.length;
}
return n;
}
// you can override either this method, or the async _read(n) below.
Readable$2.prototype.read = function(n) {
var state = this._readableState;
state.calledRead = true;
var nOrig = n;
var ret;
if (typeof n !== 'number' || n > 0)
state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
if (n === 0 &&
state.needReadable &&
(state.length >= state.highWaterMark || state.ended)) {
emitReadable$1(this);
return null;
}
n = howMuchToRead$1(n, state);
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
ret = null;
// In cases where the decoder did not receive enough data
// to produce a full chunk, then immediately received an
// EOF, state.buffer will contain [<Buffer >, <Buffer 00 ...>].
// howMuchToRead will see this and coerce the amount to
// read to zero (because it's looking at the length of the
// first <Buffer > in state.buffer), and we'll end up here.
//
// This can only happen via state.decoder -- no other venue
// exists for pushing a zero-length chunk into state.buffer
// and triggering this behavior. In this case, we return our
// remaining data and end the stream, if appropriate.
if (state.length > 0 && state.decoder) {
ret = fromList$1(n, state);
state.length -= ret.length;
}
if (state.length === 0)
endReadable$1(this);
return ret;
}
// All the actual chunk generation logic needs to be
// *below* the call to _read. The reason is that in certain
// synthetic stream cases, such as passthrough streams, _read
// may be a completely synchronous operation which may change
// the state of the read buffer, providing enough data when
// before there was *not* enough.
//
// So, the steps are:
// 1. Figure out what the state of things will be after we do
// a read from the buffer.
//
// 2. If that resulting state will trigger a _read, then call _read.
// Note that this may be asynchronous, or synchronous. Yes, it is
// deeply ugly to write APIs this way, but that still doesn't mean
// that the Readable class should behave improperly, as streams are
// designed to be sync/async agnostic.
// Take note if the _read call is sync or async (ie, if the read call
// has returned yet), so that we know whether or not it's safe to emit
// 'readable' etc.
//
// 3. Actually pull the requested chunks out of the buffer and return.
// if we need a readable event, then we need to do some reading.
var doRead = state.needReadable;
// if we currently have less than the highWaterMark, then also read some
if (state.length - n <= state.highWaterMark)
doRead = true;
// however, if we've ended, then there's no point, and if we're already
// reading, then it's unnecessary.
if (state.ended || state.reading)
doRead = false;
if (doRead) {
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
if (state.length === 0)
state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
}
// If _read called its callback synchronously, then `reading`
// will be false, and we need to re-evaluate how much data we
// can return to the user.
if (doRead && !state.reading)
n = howMuchToRead$1(nOrig, state);
if (n > 0)
ret = fromList$1(n, state);
else
ret = null;
if (ret === null) {
state.needReadable = true;
n = 0;
}
state.length -= n;
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
if (state.length === 0 && !state.ended)
state.needReadable = true;
// If we happened to read() exactly the remaining amount in the
// buffer, and the EOF has been seen at this point, then make sure
// that we emit 'end' on the very next tick.
if (state.ended && !state.endEmitted && state.length === 0)
endReadable$1(this);
return ret;
};
function chunkInvalid$1(state, chunk) {
var er = null;
if (!Buffer$4.isBuffer(chunk) &&
'string' !== typeof chunk &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
function onEofChunk$1(stream, state) {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length) {
state.buffer.push(chunk);
state.length += state.objectMode ? 1 : chunk.length;
}
}
state.ended = true;
// if we've ended and we have some data left, then emit
// 'readable' now to make sure it gets picked up.
if (state.length > 0)
emitReadable$1(stream);
else
endReadable$1(stream);
}
// Don't emit readable right away in sync mode, because this can trigger
// another read() call => stack overflow. This way, it might trigger
// a nextTick recursion warning, but that's not so bad.
function emitReadable$1(stream) {
var state = stream._readableState;
state.needReadable = false;
if (state.emittedReadable)
return;
state.emittedReadable = true;
if (state.sync)
process.nextTick(function() {
emitReadable_$1(stream);
});
else
emitReadable_$1(stream);
}
function emitReadable_$1(stream) {
stream.emit('readable');
}
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
// it's in progress.
// However, if we're not ended, or reading, and the length < hwm,
// then go ahead and try to read some more preemptively.
function maybeReadMore$1(stream, state) {
if (!state.readingMore) {
state.readingMore = true;
process.nextTick(function() {
maybeReadMore_$1(stream, state);
});
}
}
function maybeReadMore_$1(stream, state) {
var len = state.length;
while (!state.reading && !state.flowing && !state.ended &&
state.length < state.highWaterMark) {
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
break;
else
len = state.length;
}
state.readingMore = false;
}
// abstract method. to be overridden in specific implementation classes.
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
Readable$2.prototype._read = function(n) {
this.emit('error', new Error('not implemented'));
};
Readable$2.prototype.pipe = function(dest, pipeOpts) {
var src = this;
var state = this._readableState;
switch (state.pipesCount) {
case 0:
state.pipes = dest;
break;
case 1:
state.pipes = [state.pipes, dest];
break;
default:
state.pipes.push(dest);
break;
}
state.pipesCount += 1;
var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
dest !== process.stdout &&
dest !== process.stderr;
var endFn = doEnd ? onend : cleanup;
if (state.endEmitted)
process.nextTick(endFn);
else
src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
if (readable !== src) return;
cleanup();
}
function onend() {
dest.end();
}
// when the dest drains, it reduces the awaitDrain counter
// on the source. This would be more elegant with a .once()
// handler in flow(), but adding and removing repeatedly is
// too slow.
var ondrain = pipeOnDrain$1(src);
dest.on('drain', ondrain);
function cleanup() {
// cleanup event handlers once the pipe is broken
dest.removeListener('close', onclose);
dest.removeListener('finish', onfinish);
dest.removeListener('drain', ondrain);
dest.removeListener('error', onerror);
dest.removeListener('unpipe', onunpipe);
src.removeListener('end', onend);
src.removeListener('end', cleanup);
// if the reader is waiting for a drain event from this
// specific writer, then it would cause it to never start
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
if (!dest._writableState || dest._writableState.needDrain)
ondrain();
}
// if the dest has an error, then stop piping into it.
// however, don't suppress the throwing behavior for this.
function onerror(er) {
unpipe();
dest.removeListener('error', onerror);
if (EE$1.listenerCount(dest, 'error') === 0)
dest.emit('error', er);
}
// This is a brutally ugly hack to make sure that our error handler
// is attached before any userland ones. NEVER DO THIS.
if (!dest._events || !dest._events.error)
dest.on('error', onerror);
else if (isarray$1(dest._events.error))
dest._events.error.unshift(onerror);
else
dest._events.error = [onerror, dest._events.error];
// Both close and finish should trigger unpipe, but only once.
function onclose() {
dest.removeListener('finish', onfinish);
unpipe();
}
dest.once('close', onclose);
function onfinish() {
dest.removeListener('close', onclose);
unpipe();
}
dest.once('finish', onfinish);
function unpipe() {
src.unpipe(dest);
}
// tell the dest that it's being piped to
dest.emit('pipe', src);
// start the flow if it hasn't been started already.
if (!state.flowing) {
// the handler that waits for readable events after all
// the data gets sucked out in flow.
// This would be easier to follow with a .once() handler
// in flow(), but that is too slow.
this.on('readable', pipeOnReadable$1);
state.flowing = true;
process.nextTick(function() {
flow$1(src);
});
}
return dest;
};
function pipeOnDrain$1(src) {
return function() {
var state = src._readableState;
state.awaitDrain--;
if (state.awaitDrain === 0)
flow$1(src);
};
}
function flow$1(src) {
var state = src._readableState;
var chunk;
state.awaitDrain = 0;
function write(dest, i, list) {
var written = dest.write(chunk);
if (false === written) {
state.awaitDrain++;
}
}
while (state.pipesCount && null !== (chunk = src.read())) {
if (state.pipesCount === 1)
write(state.pipes);
else
forEach$2(state.pipes, write);
src.emit('data', chunk);
// if anyone needs a drain, then we have to wait for that.
if (state.awaitDrain > 0)
return;
}
// if every destination was unpiped, either before entering this
// function, or in the while loop, then stop flowing.
//
// NB: This is a pretty rare edge case.
if (state.pipesCount === 0) {
state.flowing = false;
// if there were data event listeners added, then switch to old mode.
if (EE$1.listenerCount(src, 'data') > 0)
emitDataEvents$1(src);
return;
}
// at this point, no one needed a drain, so we just ran out of data
// on the next readable event, start it over again.
state.ranOut = true;
}
function pipeOnReadable$1() {
if (this._readableState.ranOut) {
this._readableState.ranOut = false;
flow$1(this);
}
}
Readable$2.prototype.unpipe = function(dest) {
var state = this._readableState;
// if we're not piping anywhere, then do nothing.
if (state.pipesCount === 0)
return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
if (dest && dest !== state.pipes)
return this;
if (!dest)
dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
this.removeListener('readable', pipeOnReadable$1);
state.flowing = false;
if (dest)
dest.emit('unpipe', this);
return this;
}
// slow case. multiple pipe destinations.
if (!dest) {
// remove all.
var dests = state.pipes;
var len = state.pipesCount;
state.pipes = null;
state.pipesCount = 0;
this.removeListener('readable', pipeOnReadable$1);
state.flowing = false;
for (var i = 0; i < len; i++)
dests[i].emit('unpipe', this);
return this;
}
// try to find the right one.
var i = indexOf$1(state.pipes, dest);
if (i === -1)
return this;
state.pipes.splice(i, 1);
state.pipesCount -= 1;
if (state.pipesCount === 1)
state.pipes = state.pipes[0];
dest.emit('unpipe', this);
return this;
};
// set up data events if they are asked for
// Ensure readable listeners eventually get something
Readable$2.prototype.on = function(ev, fn) {
var res = stream.prototype.on.call(this, ev, fn);
if (ev === 'data' && !this._readableState.flowing)
emitDataEvents$1(this);
if (ev === 'readable' && this.readable) {
var state = this._readableState;
if (!state.readableListening) {
state.readableListening = true;
state.emittedReadable = false;
state.needReadable = true;
if (!state.reading) {
this.read(0);
} else if (state.length) {
emitReadable$1(this);
}
}
}
return res;
};
Readable$2.prototype.addListener = Readable$2.prototype.on;
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
Readable$2.prototype.resume = function() {
emitDataEvents$1(this);
this.read(0);
this.emit('resume');
};
Readable$2.prototype.pause = function() {
emitDataEvents$1(this, true);
this.emit('pause');
};
function emitDataEvents$1(stream$1, startPaused) {
var state = stream$1._readableState;
if (state.flowing) {
// https://github.com/isaacs/readable-stream/issues/16
throw new Error('Cannot switch to old mode now.');
}
var paused = startPaused || false;
var readable = false;
// convert to an old-style stream.
stream$1.readable = true;
stream$1.pipe = stream.prototype.pipe;
stream$1.on = stream$1.addListener = stream.prototype.on;
stream$1.on('readable', function() {
readable = true;
var c;
while (!paused && (null !== (c = stream$1.read())))
stream$1.emit('data', c);
if (c === null) {
readable = false;
stream$1._readableState.needReadable = true;
}
});
stream$1.pause = function() {
paused = true;
this.emit('pause');
};
stream$1.resume = function() {
paused = false;
if (readable)
process.nextTick(function() {
stream$1.emit('readable');
});
else
this.read(0);
this.emit('resume');
};
// now make it start, just in case it hadn't already.
stream$1.emit('readable');
}
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
Readable$2.prototype.wrap = function(stream) {
var state = this._readableState;
var paused = false;
var self = this;
stream.on('end', function() {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length)
self.push(chunk);
}
self.push(null);
});
stream.on('data', function(chunk) {
if (state.decoder)
chunk = state.decoder.write(chunk);
// don't skip over falsy values in objectMode
//if (state.objectMode && util.isNullOrUndefined(chunk))
if (state.objectMode && (chunk === null || chunk === undefined))
return;
else if (!state.objectMode && (!chunk || !chunk.length))
return;
var ret = self.push(chunk);
if (!ret) {
paused = true;
stream.pause();
}
});
// proxy all the other methods.
// important when wrapping filters and duplexes.
for (var i in stream) {
if (typeof stream[i] === 'function' &&
typeof this[i] === 'undefined') {
this[i] = function(method) { return function() {
return stream[method].apply(stream, arguments);
}}(i);
}
}
// proxy certain important events.
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
forEach$2(events, function(ev) {
stream.on(ev, self.emit.bind(self, ev));
});
// when we try to consume some more bytes, simply unpause the
// underlying stream.
self._read = function(n) {
if (paused) {
paused = false;
stream.resume();
}
};
return self;
};
// exposed for testing purposes only.
Readable$2._fromList = fromList$1;
// Pluck off n bytes from an array of buffers.
// Length is the combined lengths of all the buffers in the list.
function fromList$1(n, state) {
var list = state.buffer;
var length = state.length;
var stringMode = !!state.decoder;
var objectMode = !!state.objectMode;
var ret;
// nothing in the list, definitely empty.
if (list.length === 0)
return null;
if (length === 0)
ret = null;
else if (objectMode)
ret = list.shift();
else if (!n || n >= length) {
// read it all, truncate the array.
if (stringMode)
ret = list.join('');
else
ret = Buffer$4.concat(list, length);
list.length = 0;
} else {
// read just some of it.
if (n < list[0].length) {
// just take a part of the first list item.
// slice is the same for buffers and strings.
var buf = list[0];
ret = buf.slice(0, n);
list[0] = buf.slice(n);
} else if (n === list[0].length) {
// first list is a perfect match
ret = list.shift();
} else {
// complex case.
// we have enough to cover it, but it spans past the first buffer.
if (stringMode)
ret = '';
else
ret = new Buffer$4(n);
var c = 0;
for (var i = 0, l = list.length; i < l && c < n; i++) {
var buf = list[0];
var cpy = Math.min(n - c, buf.length);
if (stringMode)
ret += buf.slice(0, cpy);
else
buf.copy(ret, c, 0, cpy);
if (cpy < buf.length)
list[0] = buf.slice(cpy);
else
list.shift();
c += cpy;
}
}
}
return ret;
}
function endReadable$1(stream) {
var state = stream._readableState;
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
if (state.length > 0)
throw new Error('endReadable called on non-empty stream');
if (!state.endEmitted && state.calledRead) {
state.ended = true;
process.nextTick(function() {
// Check that we didn't get one last unshift.
if (!state.endEmitted && state.length === 0) {
state.endEmitted = true;
stream.readable = false;
stream.emit('end');
}
});
}
}
function forEach$2 (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
function indexOf$1 (xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
var _stream_duplex$1 = Duplex$1;
/*<replacement>*/
var objectKeys$2 = Object.keys || function (obj) {
var keys = [];
for (var key in obj) keys.push(key);
return keys;
};
/*</replacement>*/
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Duplex$1, _stream_readable$1);
forEach$3(objectKeys$2(_stream_writable$1.prototype), function(method) {
if (!Duplex$1.prototype[method])
Duplex$1.prototype[method] = _stream_writable$1.prototype[method];
});
function Duplex$1(options) {
if (!(this instanceof Duplex$1))
return new Duplex$1(options);
_stream_readable$1.call(this, options);
_stream_writable$1.call(this, options);
if (options && options.readable === false)
this.readable = false;
if (options && options.writable === false)
this.writable = false;
this.allowHalfOpen = true;
if (options && options.allowHalfOpen === false)
this.allowHalfOpen = false;
this.once('end', onend$1);
}
// the no-half-open enforcer
function onend$1() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
if (this.allowHalfOpen || this._writableState.ended)
return;
// no more data can be written.
// But allow more writes to happen in this tick.
process.nextTick(this.end.bind(this));
}
function forEach$3 (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// A bit simpler than readable streams.
// Implement an async ._write(chunk, cb), and it'll handle all
// the drain event emission and buffering.
var _stream_writable$1 = Writable$1;
/*<replacement>*/
var Buffer$5 = buffer.Buffer;
/*</replacement>*/
Writable$1.WritableState = WritableState$1;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Writable$1, stream);
function WriteReq$1(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
}
function WritableState$1(options, stream) {
options = options || {};
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.needDrain = false;
// at the start of calling end()
this.ending = false;
// when end() has been called, and returned
this.ended = false;
// when 'finish' is emitted
this.finished = false;
// should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0;
// a flag to see when we're in the middle of a write.
this.writing = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, becuase any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
this.onwrite = function(er) {
onwrite$1(stream, er);
};
// the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null;
// the amount that is being written when _write is called.
this.writelen = 0;
this.buffer = [];
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
}
function Writable$1(options) {
var Duplex = _stream_duplex$1;
// Writable ctor is applied to Duplexes, though they're not
// instanceof Writable, they're instanceof Readable.
if (!(this instanceof Writable$1) && !(this instanceof Duplex))
return new Writable$1(options);
this._writableState = new WritableState$1(options, this);
// legacy.
this.writable = true;
stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
Writable$1.prototype.pipe = function() {
this.emit('error', new Error('Cannot pipe. Not readable.'));
};
function writeAfterEnd$1(stream, state, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
}
// If we get something that is not a buffer, string, null, or undefined,
// and we're not in objectMode, then that's an error.
// Otherwise stream chunks are all considered to be of length=1, and the
// watermarks determine how many objects to keep in the buffer, rather than
// how many bytes or characters.
function validChunk$1(stream, state, chunk, cb) {
var valid = true;
if (!Buffer$5.isBuffer(chunk) &&
'string' !== typeof chunk &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode) {
var er = new TypeError('Invalid non-string/buffer chunk');
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
valid = false;
}
return valid;
}
Writable$1.prototype.write = function(chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (Buffer$5.isBuffer(chunk))
encoding = 'buffer';
else if (!encoding)
encoding = state.defaultEncoding;
if (typeof cb !== 'function')
cb = function() {};
if (state.ended)
writeAfterEnd$1(this, state, cb);
else if (validChunk$1(this, state, chunk, cb))
ret = writeOrBuffer$1(this, state, chunk, encoding, cb);
return ret;
};
function decodeChunk$1(state, chunk, encoding) {
if (!state.objectMode &&
state.decodeStrings !== false &&
typeof chunk === 'string') {
chunk = new Buffer$5(chunk, encoding);
}
return chunk;
}
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer$1(stream, state, chunk, encoding, cb) {
chunk = decodeChunk$1(state, chunk, encoding);
if (Buffer$5.isBuffer(chunk))
encoding = 'buffer';
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
if (!ret)
state.needDrain = true;
if (state.writing)
state.buffer.push(new WriteReq$1(chunk, encoding, cb));
else
doWrite$1(stream, state, len, chunk, encoding, cb);
return ret;
}
function doWrite$1(stream, state, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError$1(stream, state, sync, er, cb) {
if (sync)
process.nextTick(function() {
cb(er);
});
else
cb(er);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
}
function onwriteStateUpdate$1(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite$1(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
onwriteStateUpdate$1(state);
if (er)
onwriteError$1(stream, state, sync, er, cb);
else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish$1(stream, state);
if (!finished && !state.bufferProcessing && state.buffer.length)
clearBuffer$1(stream, state);
if (sync) {
process.nextTick(function() {
afterWrite$1(stream, state, finished, cb);
});
} else {
afterWrite$1(stream, state, finished, cb);
}
}
}
function afterWrite$1(stream, state, finished, cb) {
if (!finished)
onwriteDrain$1(stream, state);
cb();
if (finished)
finishMaybe$1(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain$1(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
}
// if there's something in the buffer waiting, then process it
function clearBuffer$1(stream, state) {
state.bufferProcessing = true;
for (var c = 0; c < state.buffer.length; c++) {
var entry = state.buffer[c];
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite$1(stream, state, len, chunk, encoding, cb);
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
c++;
break;
}
}
state.bufferProcessing = false;
if (c < state.buffer.length)
state.buffer = state.buffer.slice(c);
else
state.buffer.length = 0;
}
Writable$1.prototype._write = function(chunk, encoding, cb) {
cb(new Error('not implemented'));
};
Writable$1.prototype.end = function(chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
cb = chunk;
chunk = null;
encoding = null;
} else if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (typeof chunk !== 'undefined' && chunk !== null)
this.write(chunk, encoding);
// ignore unnecessary end() calls.
if (!state.ending && !state.finished)
endWritable$1(this, state, cb);
};
function needFinish$1(stream, state) {
return (state.ending &&
state.length === 0 &&
!state.finished &&
!state.writing);
}
function finishMaybe$1(stream, state) {
var need = needFinish$1(stream, state);
if (need) {
state.finished = true;
stream.emit('finish');
}
return need;
}
function endWritable$1(stream, state, cb) {
state.ending = true;
finishMaybe$1(stream, state);
if (cb) {
if (state.finished)
process.nextTick(cb);
else
stream.once('finish', cb);
}
state.ended = true;
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
var _stream_transform$1 = Transform$1;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Transform$1, _stream_duplex$1);
function TransformState$1(options, stream) {
this.afterTransform = function(er, data) {
return afterTransform$1(stream, er, data);
};
this.needTransform = false;
this.transforming = false;
this.writecb = null;
this.writechunk = null;
}
function afterTransform$1(stream, er, data) {
var ts = stream._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (!cb)
return stream.emit('error', new Error('no writecb in Transform class'));
ts.writechunk = null;
ts.writecb = null;
if (data !== null && data !== undefined)
stream.push(data);
if (cb)
cb(er);
var rs = stream._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
stream._read(rs.highWaterMark);
}
}
function Transform$1(options) {
if (!(this instanceof Transform$1))
return new Transform$1(options);
_stream_duplex$1.call(this, options);
var ts = this._transformState = new TransformState$1(options, this);
// when the writable side finishes, then flush out anything remaining.
var stream = this;
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
this.once('finish', function() {
if ('function' === typeof this._flush)
this._flush(function(er) {
done$1(stream, er);
});
else
done$1(stream);
});
}
Transform$1.prototype.push = function(chunk, encoding) {
this._transformState.needTransform = false;
return _stream_duplex$1.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform$1.prototype._transform = function(chunk, encoding, cb) {
throw new Error('not implemented');
};
Transform$1.prototype._write = function(chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform ||
rs.needReadable ||
rs.length < rs.highWaterMark)
this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform$1.prototype._read = function(n) {
var ts = this._transformState;
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
function done$1(stream, er) {
if (er)
return stream.emit('error', er);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
var rs = stream._readableState;
var ts = stream._transformState;
if (ws.length)
throw new Error('calling transform done when ws.length != 0');
if (ts.transforming)
throw new Error('calling transform done when still transforming');
return stream.push(null);
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
var _stream_passthrough$1 = PassThrough$1;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(PassThrough$1, _stream_transform$1);
function PassThrough$1(options) {
if (!(this instanceof PassThrough$1))
return new PassThrough$1(options);
_stream_transform$1.call(this, options);
}
PassThrough$1.prototype._transform = function(chunk, encoding, cb) {
cb(null, chunk);
};
var readable$1 = createCommonjsModule(function (module, exports) {
// hack to fix a circular dependency issue when used with browserify
exports = module.exports = _stream_readable$1;
exports.Stream = stream;
exports.Readable = exports;
exports.Writable = _stream_writable$1;
exports.Duplex = _stream_duplex$1;
exports.Transform = _stream_transform$1;
exports.PassThrough = _stream_passthrough$1;
if (!process.browser && process.env.READABLE_STREAM === 'disable') {
module.exports = stream;
}
});
var readable_1$1 = readable$1.Stream;
var readable_2$1 = readable$1.Readable;
var readable_3$1 = readable$1.Writable;
var readable_4$1 = readable$1.Duplex;
var readable_5$1 = readable$1.Transform;
var readable_6$1 = readable$1.PassThrough;
var DuplexStream = readable$1.Duplex;
function BufferList (callback) {
if (!(this instanceof BufferList))
return new BufferList(callback)
this._bufs = [];
this.length = 0;
if (typeof callback == 'function') {
this._callback = callback;
var piper = function (err) {
if (this._callback) {
this._callback(err);
this._callback = null;
}
}.bind(this);
this.on('pipe', function (src) {
src.on('error', piper);
});
this.on('unpipe', function (src) {
src.removeListener('error', piper);
});
}
else if (Buffer.isBuffer(callback))
this.append(callback);
else if (Array.isArray(callback)) {
callback.forEach(function (b) {
Buffer.isBuffer(b) && this.append(b);
}.bind(this));
}
DuplexStream.call(this);
}
util$2.inherits(BufferList, DuplexStream);
BufferList.prototype._offset = function (offset) {
var tot = 0, i = 0, _t;
for (; i < this._bufs.length; i++) {
_t = tot + this._bufs[i].length;
if (offset < _t)
return [ i, offset - tot ]
tot = _t;
}
};
BufferList.prototype.append = function (buf) {
this._bufs.push(Buffer.isBuffer(buf) ? buf : new Buffer(buf));
this.length += buf.length;
return this
};
BufferList.prototype._write = function (buf, encoding, callback) {
this.append(buf);
if (callback)
callback();
};
BufferList.prototype._read = function (size) {
if (!this.length)
return this.push(null)
size = Math.min(size, this.length);
this.push(this.slice(0, size));
this.consume(size);
};
BufferList.prototype.end = function (chunk) {
DuplexStream.prototype.end.call(this, chunk);
if (this._callback) {
this._callback(null, this.slice());
this._callback = null;
}
};
BufferList.prototype.get = function (index) {
return this.slice(index, index + 1)[0]
};
BufferList.prototype.slice = function (start, end) {
return this.copy(null, 0, start, end)
};
BufferList.prototype.copy = function (dst, dstStart, srcStart, srcEnd) {
if (typeof srcStart != 'number' || srcStart < 0)
srcStart = 0;
if (typeof srcEnd != 'number' || srcEnd > this.length)
srcEnd = this.length;
if (srcStart >= this.length)
return dst || new Buffer(0)
if (srcEnd <= 0)
return dst || new Buffer(0)
var copy = !!dst
, off = this._offset(srcStart)
, len = srcEnd - srcStart
, bytes = len
, bufoff = (copy && dstStart) || 0
, start = off[1]
, l
, i;
// copy/slice everything
if (srcStart === 0 && srcEnd == this.length) {
if (!copy) // slice, just return a full concat
return Buffer.concat(this._bufs)
// copy, need to copy individual buffers
for (i = 0; i < this._bufs.length; i++) {
this._bufs[i].copy(dst, bufoff);
bufoff += this._bufs[i].length;
}
return dst
}
// easy, cheap case where it's a subset of one of the buffers
if (bytes <= this._bufs[off[0]].length - start) {
return copy
? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
: this._bufs[off[0]].slice(start, start + bytes)
}
if (!copy) // a slice, we need something to copy in to
dst = new Buffer(len);
for (i = off[0]; i < this._bufs.length; i++) {
l = this._bufs[i].length - start;
if (bytes > l) {
this._bufs[i].copy(dst, bufoff, start);
} else {
this._bufs[i].copy(dst, bufoff, start, start + bytes);
break
}
bufoff += l;
bytes -= l;
if (start)
start = 0;
}
return dst
};
BufferList.prototype.toString = function (encoding, start, end) {
return this.slice(start, end).toString(encoding)
};
BufferList.prototype.consume = function (bytes) {
while (this._bufs.length) {
if (bytes > this._bufs[0].length) {
bytes -= this._bufs[0].length;
this.length -= this._bufs[0].length;
this._bufs.shift();
} else {
this._bufs[0] = this._bufs[0].slice(bytes);
this.length -= bytes;
break
}
}
return this
};
BufferList.prototype.duplicate = function () {
var i = 0
, copy = new BufferList();
for (; i < this._bufs.length; i++)
copy.append(this._bufs[i]);
return copy
};
BufferList.prototype.destroy = function () {
this._bufs.length = 0;
this.length = 0;
this.push(null);
}
;(function () {
var methods = {
'readDoubleBE' : 8
, 'readDoubleLE' : 8
, 'readFloatBE' : 4
, 'readFloatLE' : 4
, 'readInt32BE' : 4
, 'readInt32LE' : 4
, 'readUInt32BE' : 4
, 'readUInt32LE' : 4
, 'readInt16BE' : 2
, 'readInt16LE' : 2
, 'readUInt16BE' : 2
, 'readUInt16LE' : 2
, 'readInt8' : 1
, 'readUInt8' : 1
};
for (var m in methods) {
(function (m) {
BufferList.prototype[m] = function (offset) {
return this.slice(offset, offset + methods[m])[m](0)
};
}(m));
}
}());
var bl = BufferList;
/* Copyright (c) 2012-2014 LevelUP contributors
* See list at <https://github.com/rvagg/node-levelup#contributing>
* MIT License
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md>
*/
var Stream = stream.Stream
, inherits$1 = util$2.inherits
, setImmediate$1 = commonjsGlobal.setImmediate || process.nextTick
, getOptions$1 = util$1.getOptions
, defaultOptions$2 = { type: 'put' };
function WriteStream (options, db) {
if (!(this instanceof WriteStream))
return new WriteStream(options, db)
Stream.call(this);
this._options = xtend$2(defaultOptions$2, getOptions$1(db, options));
this._db = db;
this._buffer = [];
this._status = 'init';
this._end = false;
this.writable = true;
this.readable = false;
var self = this
, ready = function () {
if (!self.writable)
return
self._status = 'ready';
self.emit('ready');
self._process();
};
if (db.isOpen())
setImmediate$1(ready);
else
db.once('ready', ready);
}
inherits$1(WriteStream, Stream);
WriteStream.prototype.write = function (data) {
if (!this.writable)
return false
this._buffer.push(data);
if (this._status != 'init')
this._processDelayed();
if (this._options.maxBufferLength &&
this._buffer.length > this._options.maxBufferLength) {
this._writeBlock = true;
return false
}
return true
};
WriteStream.prototype.end = function (data) {
var self = this;
if (data)
this.write(data);
setImmediate$1(function () {
self._end = true;
self._process();
});
};
WriteStream.prototype.destroy = function () {
this.writable = false;
this.end();
};
WriteStream.prototype.destroySoon = function () {
this.end();
};
WriteStream.prototype.add = function (entry) {
if (!entry.props)
return
if (entry.props.Directory)
entry.pipe(this._db.writeStream(this._options));
else if (entry.props.File || entry.File || entry.type == 'File')
this._write(entry);
return true
};
WriteStream.prototype._processDelayed = function () {
var self = this;
setImmediate$1(function () {
self._process();
});
};
WriteStream.prototype._process = function () {
var buffer
, self = this
, cb = function (err) {
if (!self.writable)
return
if (self._status != 'closed')
self._status = 'ready';
if (err) {
self.writable = false;
return self.emit('error', err)
}
self._process();
};
if (self._status != 'ready' && self.writable) {
if (self._buffer.length && self._status != 'closed')
self._processDelayed();
return
}
if (self._buffer.length && self.writable) {
self._status = 'writing';
buffer = self._buffer;
self._buffer = [];
self._db.batch(buffer.map(function (d) {
return {
type : d.type || self._options.type
, key : d.key
, value : d.value
, keyEncoding : d.keyEncoding || self._options.keyEncoding
, valueEncoding : d.valueEncoding
|| d.encoding
|| self._options.valueEncoding
}
}), cb);
if (self._writeBlock) {
self._writeBlock = false;
self.emit('drain');
}
// don't allow close until callback has returned
return
}
if (self._end && self._status != 'closed') {
self._status = 'closed';
self.writable = false;
self.emit('close');
}
};
WriteStream.prototype._write = function (entry) {
var key = entry.path || entry.props.path
, self = this;
if (!key)
return
entry.pipe(bl(function (err, data) {
if (err) {
self.writable = false;
return self.emit('error', err)
}
if (self._options.fstreamRoot &&
key.indexOf(self._options.fstreamRoot) > -1)
key = key.substr(self._options.fstreamRoot.length + 1);
self.write({ key: key, value: data.slice(0) });
}));
};
WriteStream.prototype.toString = function () {
return 'LevelUP.WriteStream'
};
var writeStream = WriteStream;
/* Copyright (c) 2012-2014 LevelUP contributors
* See list at <https://github.com/rvagg/node-levelup#contributing>
* MIT License
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md>
*/
var WriteError = errors.WriteError
, getOptions$2 = util$1.getOptions
, dispatchError$1 = util$1.dispatchError;
function Batch (levelup) {
this._levelup = levelup;
this.batch = levelup.db.batch();
this.ops = [];
}
Batch.prototype.put = function (key_, value_, options) {
options = getOptions$2(this._levelup, options);
var key = util$1.encodeKey(key_, options)
, value = util$1.encodeValue(value_, options);
try {
this.batch.put(key, value);
} catch (e) {
throw new WriteError(e)
}
this.ops.push({ type : 'put', key : key, value : value });
return this
};
Batch.prototype.del = function (key_, options) {
options = getOptions$2(this._levelup, options);
var key = util$1.encodeKey(key_, options);
try {
this.batch.del(key);
} catch (err) {
throw new WriteError(err)
}
this.ops.push({ type : 'del', key : key });
return this
};
Batch.prototype.clear = function () {
try {
this.batch.clear();
} catch (err) {
throw new WriteError(err)
}
this.ops = [];
return this
};
Batch.prototype.write = function (callback) {
var levelup = this._levelup
, ops = this.ops;
try {
this.batch.write(function (err) {
if (err)
return dispatchError$1(levelup, new WriteError(err), callback)
levelup.emit('batch', ops);
if (callback)
callback();
});
} catch (err) {
throw new WriteError(err)
}
};
var batch = Batch;
/* Copyright (c) 2012-2014 LevelUP contributors
* See list at <https://github.com/rvagg/node-levelup#contributing>
* MIT License
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md>
*/
var EventEmitter = events.EventEmitter
, inherits$2 = util$2.inherits
, WriteError$1 = errors.WriteError
, ReadError = errors.ReadError
, NotFoundError$1 = errors.NotFoundError
, OpenError = errors.OpenError
, EncodingError$1 = errors.EncodingError
, InitializationError = errors.InitializationError
, getOptions$3 = util$1.getOptions
, defaultOptions$3 = util$1.defaultOptions
, getLevelDOWN$1 = util$1.getLevelDOWN
, dispatchError$2 = util$1.dispatchError;
function getCallback (options, callback) {
return typeof options == 'function' ? options : callback
}
// Possible LevelUP#_status values:
// - 'new' - newly created, not opened or closed
// - 'opening' - waiting for the database to be opened, post open()
// - 'open' - successfully opened the database, available for use
// - 'closing' - waiting for the database to be closed, post close()
// - 'closed' - database has been successfully closed, should not be
// used except for another open() operation
function LevelUP (location, options, callback) {
if (!(this instanceof LevelUP))
return new LevelUP(location, options, callback)
var error;
EventEmitter.call(this);
this.setMaxListeners(Infinity);
if (typeof location == 'function') {
options = typeof options == 'object' ? options : {};
options.db = location;
location = null;
} else if (typeof location == 'object' && typeof location.db == 'function') {
options = location;
location = null;
}
if (typeof options == 'function') {
callback = options;
options = {};
}
if ((!options || typeof options.db != 'function') && typeof location != 'string') {
error = new InitializationError(
'Must provide a location for the database');
if (callback) {
return process.nextTick(function () {
callback(error);
})
}
throw error
}
options = getOptions$3(this, options);
this.options = xtend$2(defaultOptions$3, options);
this._status = 'new';
// set this.location as enumerable but not configurable or writable
prr(this, 'location', location, 'e');
this.open(callback);
}
inherits$2(LevelUP, EventEmitter);
LevelUP.prototype.open = function (callback) {
var self = this
, dbFactory
, db;
if (this.isOpen()) {
if (callback)
process.nextTick(function () { callback(null, self); });
return this
}
if (this._isOpening()) {
return callback && this.once(
'open'
, function () { callback(null, self); }
)
}
this.emit('opening');
this._status = 'opening';
this.db = new deferredLeveldown(this.location);
dbFactory = this.options.db || getLevelDOWN$1();
db = dbFactory(this.location);
db.open(this.options, function (err) {
if (err) {
return dispatchError$2(self, new OpenError(err), callback)
} else {
self.db.setDb(db);
self.db = db;
self._status = 'open';
if (callback)
callback(null, self);
self.emit('open');
self.emit('ready');
}
});
};
LevelUP.prototype.close = function (callback) {
var self = this;
if (this.isOpen()) {
this._status = 'closing';
this.db.close(function () {
self._status = 'closed';
self.emit('closed');
if (callback)
callback.apply(null, arguments);
});
this.emit('closing');
this.db = null;
} else if (this._status == 'closed' && callback) {
return process.nextTick(callback)
} else if (this._status == 'closing' && callback) {
this.once('closed', callback);
} else if (this._isOpening()) {
this.once('open', function () {
self.close(callback);
});
}
};
LevelUP.prototype.isOpen = function () {
return this._status == 'open'
};
LevelUP.prototype._isOpening = function () {
return this._status == 'opening'
};
LevelUP.prototype.isClosed = function () {
return (/^clos/).test(this._status)
};
LevelUP.prototype.get = function (key_, options, callback) {
var self = this
, key;
callback = getCallback(options, callback);
if (typeof callback != 'function') {
return dispatchError$2(
this
, new ReadError('get() requires key and callback arguments')
)
}
if (!this._isOpening() && !this.isOpen()) {
return dispatchError$2(
this
, new ReadError('Database is not open')
, callback
)
}
options = util$1.getOptions(this, options);
key = util$1.encodeKey(key_, options);
options.asBuffer = util$1.isValueAsBuffer(options);
this.db.get(key, options, function (err, value) {
if (err) {
if ((/notfound/i).test(err)) {
err = new NotFoundError$1(
'Key not found in database [' + key_ + ']', err);
} else {
err = new ReadError(err);
}
return dispatchError$2(self, err, callback)
}
if (callback) {
try {
value = util$1.decodeValue(value, options);
} catch (e) {
return callback(new EncodingError$1(e))
}
callback(null, value);
}
});
};
LevelUP.prototype.put = function (key_, value_, options, callback) {
var self = this
, key
, value;
callback = getCallback(options, callback);
if (key_ === null || key_ === undefined
|| value_ === null || value_ === undefined) {
return dispatchError$2(
this
, new WriteError$1('put() requires key and value arguments')
, callback
)
}
if (!this._isOpening() && !this.isOpen()) {
return dispatchError$2(
this
, new WriteError$1('Database is not open')
, callback
)
}
options = getOptions$3(this, options);
key = util$1.encodeKey(key_, options);
value = util$1.encodeValue(value_, options);
this.db.put(key, value, options, function (err) {
if (err) {
return dispatchError$2(self, new WriteError$1(err), callback)
} else {
self.emit('put', key_, value_);
if (callback)
callback();
}
});
};
LevelUP.prototype.del = function (key_, options, callback) {
var self = this
, key;
callback = getCallback(options, callback);
if (key_ === null || key_ === undefined) {
return dispatchError$2(
this
, new WriteError$1('del() requires a key argument')
, callback
)
}
if (!this._isOpening() && !this.isOpen()) {
return dispatchError$2(
this
, new WriteError$1('Database is not open')
, callback
)
}
options = getOptions$3(this, options);
key = util$1.encodeKey(key_, options);
this.db.del(key, options, function (err) {
if (err) {
return dispatchError$2(self, new WriteError$1(err), callback)
} else {
self.emit('del', key_);
if (callback)
callback();
}
});
};
LevelUP.prototype.batch = function (arr_, options, callback) {
var self = this
, keyEnc
, valueEnc
, arr;
if (!arguments.length)
return new batch(this)
callback = getCallback(options, callback);
if (!Array.isArray(arr_)) {
return dispatchError$2(
this
, new WriteError$1('batch() requires an array argument')
, callback
)
}
if (!this._isOpening() && !this.isOpen()) {
return dispatchError$2(
this
, new WriteError$1('Database is not open')
, callback
)
}
options = getOptions$3(this, options);
keyEnc = options.keyEncoding;
valueEnc = options.valueEncoding;
arr = arr_.map(function (e) {
if (e.type === undefined || e.key === undefined)
return {}
// inherit encoding
var kEnc = e.keyEncoding || keyEnc
, vEnc = e.valueEncoding || e.encoding || valueEnc
, o;
// If we're not dealing with plain utf8 strings or plain
// Buffers then we have to do some work on the array to
// encode the keys and/or values. This includes JSON types.
if (kEnc != 'utf8' && kEnc != 'binary'
|| vEnc != 'utf8' && vEnc != 'binary') {
o = {
type: e.type
, key: util$1.encodeKey(e.key, options, e)
};
if (e.value !== undefined)
o.value = util$1.encodeValue(e.value, options, e);
return o
} else {
return e
}
});
this.db.batch(arr, options, function (err) {
if (err) {
return dispatchError$2(self, new WriteError$1(err), callback)
} else {
self.emit('batch', arr_);
if (callback)
callback();
}
});
};
// DEPRECATED: prefer accessing LevelDOWN for this: db.db.approximateSize()
LevelUP.prototype.approximateSize = function (start_, end_, callback) {
var self = this
, start
, end;
if (start_ === null || start_ === undefined
|| end_ === null || end_ === undefined
|| typeof callback != 'function') {
return dispatchError$2(
this
, new ReadError('approximateSize() requires start, end and callback arguments')
, callback
)
}
start = util$1.encodeKey(start_, this.options);
end = util$1.encodeKey(end_, this.options);
if (!this._isOpening() && !this.isOpen()) {
return dispatchError$2(
this
, new WriteError$1('Database is not open')
, callback
)
}
this.db.approximateSize(start, end, function (err, size) {
if (err) {
return dispatchError$2(self, new OpenError(err), callback)
} else if (callback) {
callback(null, size);
}
});
};
LevelUP.prototype.readStream =
LevelUP.prototype.createReadStream = function (options) {
var self = this;
options = xtend$2(this.options, options);
return new readStream(
options
, this
, function (options) {
return self.db.iterator(options)
}
)
};
LevelUP.prototype.keyStream =
LevelUP.prototype.createKeyStream = function (options) {
return this.createReadStream(xtend$2(options, { keys: true, values: false }))
};
LevelUP.prototype.valueStream =
LevelUP.prototype.createValueStream = function (options) {
return this.createReadStream(xtend$2(options, { keys: false, values: true }))
};
LevelUP.prototype.writeStream =
LevelUP.prototype.createWriteStream = function (options) {
return new writeStream(xtend$2(options), this)
};
LevelUP.prototype.toString = function () {
return 'LevelUP'
};
function utilStatic (name) {
return function (location, callback) {
getLevelDOWN$1()[name](location, callback || function () {});
}
}
var levelup = LevelUP;
var copy$1 = util$1.copy;
// DEPRECATED: prefer accessing LevelDOWN for this: require('leveldown').destroy()
var destroy = utilStatic('destroy');
// DEPRECATED: prefer accessing LevelDOWN for this: require('leveldown').repair()
var repair = utilStatic('repair');
levelup.copy = copy$1;
levelup.destroy = destroy;
levelup.repair = repair;
var isarray$2 = Array.isArray || function (arr) {
return Object.prototype.toString.call(arr) == '[object Array]';
};
var string_decoder$2 = createCommonjsModule(function (module, exports) {
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var Buffer = buffer.Buffer;
var isBufferEncoding = Buffer.isEncoding
|| function(encoding) {
switch (encoding && encoding.toLowerCase()) {
case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true;
default: return false;
}
};
function assertEncoding(encoding) {
if (encoding && !isBufferEncoding(encoding)) {
throw new Error('Unknown encoding: ' + encoding);
}
}
// StringDecoder provides an interface for efficiently splitting a series of
// buffers into a series of JS strings without breaking apart multi-byte
// characters. CESU-8 is handled as part of the UTF-8 encoding.
//
// @TODO Handling all encodings inside a single object makes it very difficult
// to reason about this code, so it should be split up in the future.
// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code
// points as used by CESU-8.
var StringDecoder = exports.StringDecoder = function(encoding) {
this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, '');
assertEncoding(encoding);
switch (this.encoding) {
case 'utf8':
// CESU-8 represents each of Surrogate Pair by 3-bytes
this.surrogateSize = 3;
break;
case 'ucs2':
case 'utf16le':
// UTF-16 represents each of Surrogate Pair by 2-bytes
this.surrogateSize = 2;
this.detectIncompleteChar = utf16DetectIncompleteChar;
break;
case 'base64':
// Base-64 stores 3 bytes in 4 chars, and pads the remainder.
this.surrogateSize = 3;
this.detectIncompleteChar = base64DetectIncompleteChar;
break;
default:
this.write = passThroughWrite;
return;
}
// Enough space to store all bytes of a single character. UTF-8 needs 4
// bytes, but CESU-8 may require up to 6 (3 bytes per surrogate).
this.charBuffer = new Buffer(6);
// Number of bytes received for the current incomplete multi-byte character.
this.charReceived = 0;
// Number of bytes expected for the current incomplete multi-byte character.
this.charLength = 0;
};
// write decodes the given buffer and returns it as JS string that is
// guaranteed to not contain any partial multi-byte characters. Any partial
// character found at the end of the buffer is buffered up, and will be
// returned when calling write again with the remaining bytes.
//
// Note: Converting a Buffer containing an orphan surrogate to a String
// currently works, but converting a String to a Buffer (via `new Buffer`, or
// Buffer#write) will replace incomplete surrogates with the unicode
// replacement character. See https://codereview.chromium.org/121173009/ .
StringDecoder.prototype.write = function(buffer) {
var charStr = '';
// if our last write ended with an incomplete multibyte character
while (this.charLength) {
// determine how many remaining bytes this buffer has to offer for this char
var available = (buffer.length >= this.charLength - this.charReceived) ?
this.charLength - this.charReceived :
buffer.length;
// add the new bytes to the char buffer
buffer.copy(this.charBuffer, this.charReceived, 0, available);
this.charReceived += available;
if (this.charReceived < this.charLength) {
// still not enough chars in this buffer? wait for more ...
return '';
}
// remove bytes belonging to the current character from the buffer
buffer = buffer.slice(available, buffer.length);
// get the character that was split
charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding);
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
var charCode = charStr.charCodeAt(charStr.length - 1);
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
this.charLength += this.surrogateSize;
charStr = '';
continue;
}
this.charReceived = this.charLength = 0;
// if there are no more bytes in this buffer, just emit our char
if (buffer.length === 0) {
return charStr;
}
break;
}
// determine and set charLength / charReceived
this.detectIncompleteChar(buffer);
var end = buffer.length;
if (this.charLength) {
// buffer the incomplete character bytes we got
buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end);
end -= this.charReceived;
}
charStr += buffer.toString(this.encoding, 0, end);
var end = charStr.length - 1;
var charCode = charStr.charCodeAt(end);
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
var size = this.surrogateSize;
this.charLength += size;
this.charReceived += size;
this.charBuffer.copy(this.charBuffer, size, 0, size);
buffer.copy(this.charBuffer, 0, 0, size);
return charStr.substring(0, end);
}
// or just emit the charStr
return charStr;
};
// detectIncompleteChar determines if there is an incomplete UTF-8 character at
// the end of the given buffer. If so, it sets this.charLength to the byte
// length that character, and sets this.charReceived to the number of bytes
// that are available for this character.
StringDecoder.prototype.detectIncompleteChar = function(buffer) {
// determine how many bytes we have to check at the end of this buffer
var i = (buffer.length >= 3) ? 3 : buffer.length;
// Figure out if one of the last i bytes of our buffer announces an
// incomplete char.
for (; i > 0; i--) {
var c = buffer[buffer.length - i];
// See http://en.wikipedia.org/wiki/UTF-8#Description
// 110XXXXX
if (i == 1 && c >> 5 == 0x06) {
this.charLength = 2;
break;
}
// 1110XXXX
if (i <= 2 && c >> 4 == 0x0E) {
this.charLength = 3;
break;
}
// 11110XXX
if (i <= 3 && c >> 3 == 0x1E) {
this.charLength = 4;
break;
}
}
this.charReceived = i;
};
StringDecoder.prototype.end = function(buffer) {
var res = '';
if (buffer && buffer.length)
res = this.write(buffer);
if (this.charReceived) {
var cr = this.charReceived;
var buf = this.charBuffer;
var enc = this.encoding;
res += buf.slice(0, cr).toString(enc);
}
return res;
};
function passThroughWrite(buffer) {
return buffer.toString(this.encoding);
}
function utf16DetectIncompleteChar(buffer) {
this.charReceived = buffer.length % 2;
this.charLength = this.charReceived ? 2 : 0;
}
function base64DetectIncompleteChar(buffer) {
this.charReceived = buffer.length % 3;
this.charLength = this.charReceived ? 3 : 0;
}
});
var string_decoder_1$2 = string_decoder$2.StringDecoder;
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var _stream_readable$2 = Readable$3;
/*<replacement>*/
/*</replacement>*/
/*<replacement>*/
var Buffer$6 = buffer.Buffer;
/*</replacement>*/
Readable$3.ReadableState = ReadableState$2;
var EE$2 = events.EventEmitter;
/*<replacement>*/
if (!EE$2.listenerCount) EE$2.listenerCount = function(emitter, type) {
return emitter.listeners(type).length;
};
/*</replacement>*/
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
var StringDecoder$2;
util.inherits(Readable$3, stream);
function ReadableState$2(options, stream) {
options = options || {};
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.buffer = [];
this.length = 0;
this.pipes = null;
this.pipesCount = 0;
this.flowing = false;
this.ended = false;
this.endEmitted = false;
this.reading = false;
// In streams that never have any data, and do push(null) right away,
// the consumer can miss the 'end' event if they do some I/O before
// consuming the stream. So, we don't emit('end') until some reading
// happens.
this.calledRead = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, becuase any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// whenever we return null, then we set a flag to say
// that we're awaiting a 'readable' event emission.
this.needReadable = false;
this.emittedReadable = false;
this.readableListening = false;
// object stream flag. Used to make read(n) ignore n and to
// make all the buffer merging and length checks go away
this.objectMode = !!options.objectMode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// when piping, we only care about 'readable' events that happen
// after read()ing all the bytes and not getting any pushback.
this.ranOut = false;
// the number of writers that are awaiting a drain event in .pipe()s
this.awaitDrain = 0;
// if true, a maybeReadMore has been scheduled
this.readingMore = false;
this.decoder = null;
this.encoding = null;
if (options.encoding) {
if (!StringDecoder$2)
StringDecoder$2 = string_decoder$2.StringDecoder;
this.decoder = new StringDecoder$2(options.encoding);
this.encoding = options.encoding;
}
}
function Readable$3(options) {
if (!(this instanceof Readable$3))
return new Readable$3(options);
this._readableState = new ReadableState$2(options, this);
// legacy
this.readable = true;
stream.call(this);
}
// Manually shove something into the read() buffer.
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
Readable$3.prototype.push = function(chunk, encoding) {
var state = this._readableState;
if (typeof chunk === 'string' && !state.objectMode) {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
chunk = new Buffer$6(chunk, encoding);
encoding = '';
}
}
return readableAddChunk$2(this, state, chunk, encoding, false);
};
// Unshift should *always* be something directly out of read()
Readable$3.prototype.unshift = function(chunk) {
var state = this._readableState;
return readableAddChunk$2(this, state, chunk, '', true);
};
function readableAddChunk$2(stream, state, chunk, encoding, addToFront) {
var er = chunkInvalid$2(state, chunk);
if (er) {
stream.emit('error', er);
} else if (chunk === null || chunk === undefined) {
state.reading = false;
if (!state.ended)
onEofChunk$2(stream, state);
} else if (state.objectMode || chunk && chunk.length > 0) {
if (state.ended && !addToFront) {
var e = new Error('stream.push() after EOF');
stream.emit('error', e);
} else if (state.endEmitted && addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
if (state.decoder && !addToFront && !encoding)
chunk = state.decoder.write(chunk);
// update the buffer info.
state.length += state.objectMode ? 1 : chunk.length;
if (addToFront) {
state.buffer.unshift(chunk);
} else {
state.reading = false;
state.buffer.push(chunk);
}
if (state.needReadable)
emitReadable$2(stream);
maybeReadMore$2(stream, state);
}
} else if (!addToFront) {
state.reading = false;
}
return needMoreData$2(state);
}
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
// such as the repl. Also, if the push() triggered a
// readable event, and the user called read(largeNumber) such that
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData$2(state) {
return !state.ended &&
(state.needReadable ||
state.length < state.highWaterMark ||
state.length === 0);
}
// backwards compatibility.
Readable$3.prototype.setEncoding = function(enc) {
if (!StringDecoder$2)
StringDecoder$2 = string_decoder$2.StringDecoder;
this._readableState.decoder = new StringDecoder$2(enc);
this._readableState.encoding = enc;
};
// Don't raise the hwm > 128MB
var MAX_HWM$2 = 0x800000;
function roundUpToNextPowerOf2$2(n) {
if (n >= MAX_HWM$2) {
n = MAX_HWM$2;
} else {
// Get the next highest power of 2
n--;
for (var p = 1; p < 32; p <<= 1) n |= n >> p;
n++;
}
return n;
}
function howMuchToRead$2(n, state) {
if (state.length === 0 && state.ended)
return 0;
if (state.objectMode)
return n === 0 ? 0 : 1;
if (n === null || isNaN(n)) {
// only flow one buffer at a time
if (state.flowing && state.buffer.length)
return state.buffer[0].length;
else
return state.length;
}
if (n <= 0)
return 0;
// If we're asking for more than the target buffer level,
// then raise the water mark. Bump up to the next highest
// power of 2, to prevent increasing it excessively in tiny
// amounts.
if (n > state.highWaterMark)
state.highWaterMark = roundUpToNextPowerOf2$2(n);
// don't have that much. return null, unless we've ended.
if (n > state.length) {
if (!state.ended) {
state.needReadable = true;
return 0;
} else
return state.length;
}
return n;
}
// you can override either this method, or the async _read(n) below.
Readable$3.prototype.read = function(n) {
var state = this._readableState;
state.calledRead = true;
var nOrig = n;
var ret;
if (typeof n !== 'number' || n > 0)
state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
if (n === 0 &&
state.needReadable &&
(state.length >= state.highWaterMark || state.ended)) {
emitReadable$2(this);
return null;
}
n = howMuchToRead$2(n, state);
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
ret = null;
// In cases where the decoder did not receive enough data
// to produce a full chunk, then immediately received an
// EOF, state.buffer will contain [<Buffer >, <Buffer 00 ...>].
// howMuchToRead will see this and coerce the amount to
// read to zero (because it's looking at the length of the
// first <Buffer > in state.buffer), and we'll end up here.
//
// This can only happen via state.decoder -- no other venue
// exists for pushing a zero-length chunk into state.buffer
// and triggering this behavior. In this case, we return our
// remaining data and end the stream, if appropriate.
if (state.length > 0 && state.decoder) {
ret = fromList$2(n, state);
state.length -= ret.length;
}
if (state.length === 0)
endReadable$2(this);
return ret;
}
// All the actual chunk generation logic needs to be
// *below* the call to _read. The reason is that in certain
// synthetic stream cases, such as passthrough streams, _read
// may be a completely synchronous operation which may change
// the state of the read buffer, providing enough data when
// before there was *not* enough.
//
// So, the steps are:
// 1. Figure out what the state of things will be after we do
// a read from the buffer.
//
// 2. If that resulting state will trigger a _read, then call _read.
// Note that this may be asynchronous, or synchronous. Yes, it is
// deeply ugly to write APIs this way, but that still doesn't mean
// that the Readable class should behave improperly, as streams are
// designed to be sync/async agnostic.
// Take note if the _read call is sync or async (ie, if the read call
// has returned yet), so that we know whether or not it's safe to emit
// 'readable' etc.
//
// 3. Actually pull the requested chunks out of the buffer and return.
// if we need a readable event, then we need to do some reading.
var doRead = state.needReadable;
// if we currently have less than the highWaterMark, then also read some
if (state.length - n <= state.highWaterMark)
doRead = true;
// however, if we've ended, then there's no point, and if we're already
// reading, then it's unnecessary.
if (state.ended || state.reading)
doRead = false;
if (doRead) {
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
if (state.length === 0)
state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
}
// If _read called its callback synchronously, then `reading`
// will be false, and we need to re-evaluate how much data we
// can return to the user.
if (doRead && !state.reading)
n = howMuchToRead$2(nOrig, state);
if (n > 0)
ret = fromList$2(n, state);
else
ret = null;
if (ret === null) {
state.needReadable = true;
n = 0;
}
state.length -= n;
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
if (state.length === 0 && !state.ended)
state.needReadable = true;
// If we happened to read() exactly the remaining amount in the
// buffer, and the EOF has been seen at this point, then make sure
// that we emit 'end' on the very next tick.
if (state.ended && !state.endEmitted && state.length === 0)
endReadable$2(this);
return ret;
};
function chunkInvalid$2(state, chunk) {
var er = null;
if (!Buffer$6.isBuffer(chunk) &&
'string' !== typeof chunk &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
function onEofChunk$2(stream, state) {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length) {
state.buffer.push(chunk);
state.length += state.objectMode ? 1 : chunk.length;
}
}
state.ended = true;
// if we've ended and we have some data left, then emit
// 'readable' now to make sure it gets picked up.
if (state.length > 0)
emitReadable$2(stream);
else
endReadable$2(stream);
}
// Don't emit readable right away in sync mode, because this can trigger
// another read() call => stack overflow. This way, it might trigger
// a nextTick recursion warning, but that's not so bad.
function emitReadable$2(stream) {
var state = stream._readableState;
state.needReadable = false;
if (state.emittedReadable)
return;
state.emittedReadable = true;
if (state.sync)
process.nextTick(function() {
emitReadable_$2(stream);
});
else
emitReadable_$2(stream);
}
function emitReadable_$2(stream) {
stream.emit('readable');
}
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
// it's in progress.
// However, if we're not ended, or reading, and the length < hwm,
// then go ahead and try to read some more preemptively.
function maybeReadMore$2(stream, state) {
if (!state.readingMore) {
state.readingMore = true;
process.nextTick(function() {
maybeReadMore_$2(stream, state);
});
}
}
function maybeReadMore_$2(stream, state) {
var len = state.length;
while (!state.reading && !state.flowing && !state.ended &&
state.length < state.highWaterMark) {
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
break;
else
len = state.length;
}
state.readingMore = false;
}
// abstract method. to be overridden in specific implementation classes.
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
Readable$3.prototype._read = function(n) {
this.emit('error', new Error('not implemented'));
};
Readable$3.prototype.pipe = function(dest, pipeOpts) {
var src = this;
var state = this._readableState;
switch (state.pipesCount) {
case 0:
state.pipes = dest;
break;
case 1:
state.pipes = [state.pipes, dest];
break;
default:
state.pipes.push(dest);
break;
}
state.pipesCount += 1;
var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
dest !== process.stdout &&
dest !== process.stderr;
var endFn = doEnd ? onend : cleanup;
if (state.endEmitted)
process.nextTick(endFn);
else
src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
if (readable !== src) return;
cleanup();
}
function onend() {
dest.end();
}
// when the dest drains, it reduces the awaitDrain counter
// on the source. This would be more elegant with a .once()
// handler in flow(), but adding and removing repeatedly is
// too slow.
var ondrain = pipeOnDrain$2(src);
dest.on('drain', ondrain);
function cleanup() {
// cleanup event handlers once the pipe is broken
dest.removeListener('close', onclose);
dest.removeListener('finish', onfinish);
dest.removeListener('drain', ondrain);
dest.removeListener('error', onerror);
dest.removeListener('unpipe', onunpipe);
src.removeListener('end', onend);
src.removeListener('end', cleanup);
// if the reader is waiting for a drain event from this
// specific writer, then it would cause it to never start
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
if (!dest._writableState || dest._writableState.needDrain)
ondrain();
}
// if the dest has an error, then stop piping into it.
// however, don't suppress the throwing behavior for this.
function onerror(er) {
unpipe();
dest.removeListener('error', onerror);
if (EE$2.listenerCount(dest, 'error') === 0)
dest.emit('error', er);
}
// This is a brutally ugly hack to make sure that our error handler
// is attached before any userland ones. NEVER DO THIS.
if (!dest._events || !dest._events.error)
dest.on('error', onerror);
else if (isarray$2(dest._events.error))
dest._events.error.unshift(onerror);
else
dest._events.error = [onerror, dest._events.error];
// Both close and finish should trigger unpipe, but only once.
function onclose() {
dest.removeListener('finish', onfinish);
unpipe();
}
dest.once('close', onclose);
function onfinish() {
dest.removeListener('close', onclose);
unpipe();
}
dest.once('finish', onfinish);
function unpipe() {
src.unpipe(dest);
}
// tell the dest that it's being piped to
dest.emit('pipe', src);
// start the flow if it hasn't been started already.
if (!state.flowing) {
// the handler that waits for readable events after all
// the data gets sucked out in flow.
// This would be easier to follow with a .once() handler
// in flow(), but that is too slow.
this.on('readable', pipeOnReadable$2);
state.flowing = true;
process.nextTick(function() {
flow$2(src);
});
}
return dest;
};
function pipeOnDrain$2(src) {
return function() {
var state = src._readableState;
state.awaitDrain--;
if (state.awaitDrain === 0)
flow$2(src);
};
}
function flow$2(src) {
var state = src._readableState;
var chunk;
state.awaitDrain = 0;
function write(dest, i, list) {
var written = dest.write(chunk);
if (false === written) {
state.awaitDrain++;
}
}
while (state.pipesCount && null !== (chunk = src.read())) {
if (state.pipesCount === 1)
write(state.pipes);
else
forEach$4(state.pipes, write);
src.emit('data', chunk);
// if anyone needs a drain, then we have to wait for that.
if (state.awaitDrain > 0)
return;
}
// if every destination was unpiped, either before entering this
// function, or in the while loop, then stop flowing.
//
// NB: This is a pretty rare edge case.
if (state.pipesCount === 0) {
state.flowing = false;
// if there were data event listeners added, then switch to old mode.
if (EE$2.listenerCount(src, 'data') > 0)
emitDataEvents$2(src);
return;
}
// at this point, no one needed a drain, so we just ran out of data
// on the next readable event, start it over again.
state.ranOut = true;
}
function pipeOnReadable$2() {
if (this._readableState.ranOut) {
this._readableState.ranOut = false;
flow$2(this);
}
}
Readable$3.prototype.unpipe = function(dest) {
var state = this._readableState;
// if we're not piping anywhere, then do nothing.
if (state.pipesCount === 0)
return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
if (dest && dest !== state.pipes)
return this;
if (!dest)
dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
this.removeListener('readable', pipeOnReadable$2);
state.flowing = false;
if (dest)
dest.emit('unpipe', this);
return this;
}
// slow case. multiple pipe destinations.
if (!dest) {
// remove all.
var dests = state.pipes;
var len = state.pipesCount;
state.pipes = null;
state.pipesCount = 0;
this.removeListener('readable', pipeOnReadable$2);
state.flowing = false;
for (var i = 0; i < len; i++)
dests[i].emit('unpipe', this);
return this;
}
// try to find the right one.
var i = indexOf$2(state.pipes, dest);
if (i === -1)
return this;
state.pipes.splice(i, 1);
state.pipesCount -= 1;
if (state.pipesCount === 1)
state.pipes = state.pipes[0];
dest.emit('unpipe', this);
return this;
};
// set up data events if they are asked for
// Ensure readable listeners eventually get something
Readable$3.prototype.on = function(ev, fn) {
var res = stream.prototype.on.call(this, ev, fn);
if (ev === 'data' && !this._readableState.flowing)
emitDataEvents$2(this);
if (ev === 'readable' && this.readable) {
var state = this._readableState;
if (!state.readableListening) {
state.readableListening = true;
state.emittedReadable = false;
state.needReadable = true;
if (!state.reading) {
this.read(0);
} else if (state.length) {
emitReadable$2(this);
}
}
}
return res;
};
Readable$3.prototype.addListener = Readable$3.prototype.on;
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
Readable$3.prototype.resume = function() {
emitDataEvents$2(this);
this.read(0);
this.emit('resume');
};
Readable$3.prototype.pause = function() {
emitDataEvents$2(this, true);
this.emit('pause');
};
function emitDataEvents$2(stream$1, startPaused) {
var state = stream$1._readableState;
if (state.flowing) {
// https://github.com/isaacs/readable-stream/issues/16
throw new Error('Cannot switch to old mode now.');
}
var paused = startPaused || false;
var readable = false;
// convert to an old-style stream.
stream$1.readable = true;
stream$1.pipe = stream.prototype.pipe;
stream$1.on = stream$1.addListener = stream.prototype.on;
stream$1.on('readable', function() {
readable = true;
var c;
while (!paused && (null !== (c = stream$1.read())))
stream$1.emit('data', c);
if (c === null) {
readable = false;
stream$1._readableState.needReadable = true;
}
});
stream$1.pause = function() {
paused = true;
this.emit('pause');
};
stream$1.resume = function() {
paused = false;
if (readable)
process.nextTick(function() {
stream$1.emit('readable');
});
else
this.read(0);
this.emit('resume');
};
// now make it start, just in case it hadn't already.
stream$1.emit('readable');
}
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
Readable$3.prototype.wrap = function(stream) {
var state = this._readableState;
var paused = false;
var self = this;
stream.on('end', function() {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length)
self.push(chunk);
}
self.push(null);
});
stream.on('data', function(chunk) {
if (state.decoder)
chunk = state.decoder.write(chunk);
// don't skip over falsy values in objectMode
//if (state.objectMode && util.isNullOrUndefined(chunk))
if (state.objectMode && (chunk === null || chunk === undefined))
return;
else if (!state.objectMode && (!chunk || !chunk.length))
return;
var ret = self.push(chunk);
if (!ret) {
paused = true;
stream.pause();
}
});
// proxy all the other methods.
// important when wrapping filters and duplexes.
for (var i in stream) {
if (typeof stream[i] === 'function' &&
typeof this[i] === 'undefined') {
this[i] = function(method) { return function() {
return stream[method].apply(stream, arguments);
}}(i);
}
}
// proxy certain important events.
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
forEach$4(events, function(ev) {
stream.on(ev, self.emit.bind(self, ev));
});
// when we try to consume some more bytes, simply unpause the
// underlying stream.
self._read = function(n) {
if (paused) {
paused = false;
stream.resume();
}
};
return self;
};
// exposed for testing purposes only.
Readable$3._fromList = fromList$2;
// Pluck off n bytes from an array of buffers.
// Length is the combined lengths of all the buffers in the list.
function fromList$2(n, state) {
var list = state.buffer;
var length = state.length;
var stringMode = !!state.decoder;
var objectMode = !!state.objectMode;
var ret;
// nothing in the list, definitely empty.
if (list.length === 0)
return null;
if (length === 0)
ret = null;
else if (objectMode)
ret = list.shift();
else if (!n || n >= length) {
// read it all, truncate the array.
if (stringMode)
ret = list.join('');
else
ret = Buffer$6.concat(list, length);
list.length = 0;
} else {
// read just some of it.
if (n < list[0].length) {
// just take a part of the first list item.
// slice is the same for buffers and strings.
var buf = list[0];
ret = buf.slice(0, n);
list[0] = buf.slice(n);
} else if (n === list[0].length) {
// first list is a perfect match
ret = list.shift();
} else {
// complex case.
// we have enough to cover it, but it spans past the first buffer.
if (stringMode)
ret = '';
else
ret = new Buffer$6(n);
var c = 0;
for (var i = 0, l = list.length; i < l && c < n; i++) {
var buf = list[0];
var cpy = Math.min(n - c, buf.length);
if (stringMode)
ret += buf.slice(0, cpy);
else
buf.copy(ret, c, 0, cpy);
if (cpy < buf.length)
list[0] = buf.slice(cpy);
else
list.shift();
c += cpy;
}
}
}
return ret;
}
function endReadable$2(stream) {
var state = stream._readableState;
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
if (state.length > 0)
throw new Error('endReadable called on non-empty stream');
if (!state.endEmitted && state.calledRead) {
state.ended = true;
process.nextTick(function() {
// Check that we didn't get one last unshift.
if (!state.endEmitted && state.length === 0) {
state.endEmitted = true;
stream.readable = false;
stream.emit('end');
}
});
}
}
function forEach$4 (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
function indexOf$2 (xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
var _stream_duplex$2 = Duplex$2;
/*<replacement>*/
var objectKeys$3 = Object.keys || function (obj) {
var keys = [];
for (var key in obj) keys.push(key);
return keys;
};
/*</replacement>*/
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Duplex$2, _stream_readable$2);
forEach$5(objectKeys$3(_stream_writable$2.prototype), function(method) {
if (!Duplex$2.prototype[method])
Duplex$2.prototype[method] = _stream_writable$2.prototype[method];
});
function Duplex$2(options) {
if (!(this instanceof Duplex$2))
return new Duplex$2(options);
_stream_readable$2.call(this, options);
_stream_writable$2.call(this, options);
if (options && options.readable === false)
this.readable = false;
if (options && options.writable === false)
this.writable = false;
this.allowHalfOpen = true;
if (options && options.allowHalfOpen === false)
this.allowHalfOpen = false;
this.once('end', onend$2);
}
// the no-half-open enforcer
function onend$2() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
if (this.allowHalfOpen || this._writableState.ended)
return;
// no more data can be written.
// But allow more writes to happen in this tick.
process.nextTick(this.end.bind(this));
}
function forEach$5 (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// A bit simpler than readable streams.
// Implement an async ._write(chunk, cb), and it'll handle all
// the drain event emission and buffering.
var _stream_writable$2 = Writable$2;
/*<replacement>*/
var Buffer$7 = buffer.Buffer;
/*</replacement>*/
Writable$2.WritableState = WritableState$2;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Writable$2, stream);
function WriteReq$2(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
}
function WritableState$2(options, stream) {
options = options || {};
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.needDrain = false;
// at the start of calling end()
this.ending = false;
// when end() has been called, and returned
this.ended = false;
// when 'finish' is emitted
this.finished = false;
// should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0;
// a flag to see when we're in the middle of a write.
this.writing = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, becuase any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
this.onwrite = function(er) {
onwrite$2(stream, er);
};
// the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null;
// the amount that is being written when _write is called.
this.writelen = 0;
this.buffer = [];
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
}
function Writable$2(options) {
var Duplex = _stream_duplex$2;
// Writable ctor is applied to Duplexes, though they're not
// instanceof Writable, they're instanceof Readable.
if (!(this instanceof Writable$2) && !(this instanceof Duplex))
return new Writable$2(options);
this._writableState = new WritableState$2(options, this);
// legacy.
this.writable = true;
stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
Writable$2.prototype.pipe = function() {
this.emit('error', new Error('Cannot pipe. Not readable.'));
};
function writeAfterEnd$2(stream, state, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
}
// If we get something that is not a buffer, string, null, or undefined,
// and we're not in objectMode, then that's an error.
// Otherwise stream chunks are all considered to be of length=1, and the
// watermarks determine how many objects to keep in the buffer, rather than
// how many bytes or characters.
function validChunk$2(stream, state, chunk, cb) {
var valid = true;
if (!Buffer$7.isBuffer(chunk) &&
'string' !== typeof chunk &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode) {
var er = new TypeError('Invalid non-string/buffer chunk');
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
valid = false;
}
return valid;
}
Writable$2.prototype.write = function(chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (Buffer$7.isBuffer(chunk))
encoding = 'buffer';
else if (!encoding)
encoding = state.defaultEncoding;
if (typeof cb !== 'function')
cb = function() {};
if (state.ended)
writeAfterEnd$2(this, state, cb);
else if (validChunk$2(this, state, chunk, cb))
ret = writeOrBuffer$2(this, state, chunk, encoding, cb);
return ret;
};
function decodeChunk$2(state, chunk, encoding) {
if (!state.objectMode &&
state.decodeStrings !== false &&
typeof chunk === 'string') {
chunk = new Buffer$7(chunk, encoding);
}
return chunk;
}
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer$2(stream, state, chunk, encoding, cb) {
chunk = decodeChunk$2(state, chunk, encoding);
if (Buffer$7.isBuffer(chunk))
encoding = 'buffer';
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
if (!ret)
state.needDrain = true;
if (state.writing)
state.buffer.push(new WriteReq$2(chunk, encoding, cb));
else
doWrite$2(stream, state, len, chunk, encoding, cb);
return ret;
}
function doWrite$2(stream, state, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError$2(stream, state, sync, er, cb) {
if (sync)
process.nextTick(function() {
cb(er);
});
else
cb(er);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
}
function onwriteStateUpdate$2(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite$2(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
onwriteStateUpdate$2(state);
if (er)
onwriteError$2(stream, state, sync, er, cb);
else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish$2(stream, state);
if (!finished && !state.bufferProcessing && state.buffer.length)
clearBuffer$2(stream, state);
if (sync) {
process.nextTick(function() {
afterWrite$2(stream, state, finished, cb);
});
} else {
afterWrite$2(stream, state, finished, cb);
}
}
}
function afterWrite$2(stream, state, finished, cb) {
if (!finished)
onwriteDrain$2(stream, state);
cb();
if (finished)
finishMaybe$2(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain$2(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
}
// if there's something in the buffer waiting, then process it
function clearBuffer$2(stream, state) {
state.bufferProcessing = true;
for (var c = 0; c < state.buffer.length; c++) {
var entry = state.buffer[c];
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite$2(stream, state, len, chunk, encoding, cb);
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
c++;
break;
}
}
state.bufferProcessing = false;
if (c < state.buffer.length)
state.buffer = state.buffer.slice(c);
else
state.buffer.length = 0;
}
Writable$2.prototype._write = function(chunk, encoding, cb) {
cb(new Error('not implemented'));
};
Writable$2.prototype.end = function(chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
cb = chunk;
chunk = null;
encoding = null;
} else if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (typeof chunk !== 'undefined' && chunk !== null)
this.write(chunk, encoding);
// ignore unnecessary end() calls.
if (!state.ending && !state.finished)
endWritable$2(this, state, cb);
};
function needFinish$2(stream, state) {
return (state.ending &&
state.length === 0 &&
!state.finished &&
!state.writing);
}
function finishMaybe$2(stream, state) {
var need = needFinish$2(stream, state);
if (need) {
state.finished = true;
stream.emit('finish');
}
return need;
}
function endWritable$2(stream, state, cb) {
state.ending = true;
finishMaybe$2(stream, state);
if (cb) {
if (state.finished)
process.nextTick(cb);
else
stream.once('finish', cb);
}
state.ended = true;
}
var writable = _stream_writable$2;
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
var _stream_transform$2 = Transform$2;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Transform$2, _stream_duplex$2);
function TransformState$2(options, stream) {
this.afterTransform = function(er, data) {
return afterTransform$2(stream, er, data);
};
this.needTransform = false;
this.transforming = false;
this.writecb = null;
this.writechunk = null;
}
function afterTransform$2(stream, er, data) {
var ts = stream._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (!cb)
return stream.emit('error', new Error('no writecb in Transform class'));
ts.writechunk = null;
ts.writecb = null;
if (data !== null && data !== undefined)
stream.push(data);
if (cb)
cb(er);
var rs = stream._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
stream._read(rs.highWaterMark);
}
}
function Transform$2(options) {
if (!(this instanceof Transform$2))
return new Transform$2(options);
_stream_duplex$2.call(this, options);
var ts = this._transformState = new TransformState$2(options, this);
// when the writable side finishes, then flush out anything remaining.
var stream = this;
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
this.once('finish', function() {
if ('function' === typeof this._flush)
this._flush(function(er) {
done$2(stream, er);
});
else
done$2(stream);
});
}
Transform$2.prototype.push = function(chunk, encoding) {
this._transformState.needTransform = false;
return _stream_duplex$2.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform$2.prototype._transform = function(chunk, encoding, cb) {
throw new Error('not implemented');
};
Transform$2.prototype._write = function(chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform ||
rs.needReadable ||
rs.length < rs.highWaterMark)
this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform$2.prototype._read = function(n) {
var ts = this._transformState;
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
function done$2(stream, er) {
if (er)
return stream.emit('error', er);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
var rs = stream._readableState;
var ts = stream._transformState;
if (ws.length)
throw new Error('calling transform done when ws.length != 0');
if (ts.transforming)
throw new Error('calling transform done when still transforming');
return stream.push(null);
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
var _stream_passthrough$2 = PassThrough$2;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(PassThrough$2, _stream_transform$2);
function PassThrough$2(options) {
if (!(this instanceof PassThrough$2))
return new PassThrough$2(options);
_stream_transform$2.call(this, options);
}
PassThrough$2.prototype._transform = function(chunk, encoding, cb) {
cb(null, chunk);
};
var readable$2 = createCommonjsModule(function (module, exports) {
// hack to fix a circular dependency issue when used with browserify
exports = module.exports = _stream_readable$2;
exports.Stream = stream;
exports.Readable = exports;
exports.Writable = _stream_writable$2;
exports.Duplex = _stream_duplex$2;
exports.Transform = _stream_transform$2;
exports.PassThrough = _stream_passthrough$2;
if (!process.browser && process.env.READABLE_STREAM === 'disable') {
module.exports = stream;
}
});
var readable_1$2 = readable$2.Stream;
var readable_2$2 = readable$2.Readable;
var readable_3$2 = readable$2.Writable;
var readable_4$2 = readable$2.Duplex;
var readable_5$2 = readable$2.Transform;
var readable_6$2 = readable$2.PassThrough;
var duplex = _stream_duplex$2;
var fwdStream = createCommonjsModule(function (module, exports) {
var DUMMY = new Buffer(0);
var noop = function() {};
var toFunction = function(fn) {
if (typeof fn === 'function') return fn;
return function(cb) {
cb(null, fn);
};
};
var onreadable = function(rs, init) {
var reading = false;
var destroyed = false;
rs._read = function() {
reading = true;
};
rs.destroy = function() {
destroyed = true;
};
init(function(err, source) {
if (err) return rs.emit('error', err);
var fwd = function() {
var data;
while ((data = source.read()) !== null) {
reading = false;
rs.push(data);
}
};
source.on('readable', function() {
if (reading) fwd();
});
source.on('end', function() {
fwd();
rs.push(null);
});
source.on('error', function(err) {
rs.emit('error', err);
});
source.on('close', function() {
fwd();
process.nextTick(function() {
rs.emit('close');
});
});
rs._read = function() {
reading = true;
fwd();
};
rs.destroy = function() {
if (destroyed) return;
destroyed = true;
if (source.destroy) source.destroy();
};
if (destroyed) {
destroyed = false;
rs.destroy();
return;
}
if (reading) fwd();
});
return rs;
};
var onwritable = function(ws, init) {
var ready = noop;
var destroyed = false;
ws._write = function(data, enc, cb) {
ready = cb;
};
ws.destroy = function() {
destroyed = true;
};
ws.write(DUMMY);
init(function(err, source) {
if (err) return ws.emit('error', err);
source.on('close', function() {
ws.emit('close');
});
source.on('error', function(err) {
ws.emit('error', err);
});
ws._write = function(data, enc, cb) {
if (data === DUMMY) return cb();
source.write(data, enc, cb);
};
var emit = ws.emit;
source.on('finish', function() {
emit.call(ws, 'finish');
});
ws.destroy = function() {
if (destroyed) return;
destroyed = true;
if (source.destroy) source.destroy();
};
ws.emit = function(name) {
if (name !== 'finish') return emit.apply(ws, arguments);
source.end();
};
if (destroyed) {
destroyed = false;
ws.destroy();
return;
}
ready();
});
return ws;
};
exports.readable = function(opts, init) {
if (arguments.length === 1) return exports.readable(null, opts);
if (!opts) opts = {};
return onreadable(new readable$2(opts), toFunction(init));
};
exports.writable = function(opts, init) {
if (arguments.length === 1) return exports.writable(null, opts);
if (!opts) opts = {};
return onwritable(new writable(opts), toFunction(init));
};
exports.duplex = function(opts, initWritable, initReadable) {
if (arguments.length === 2) return exports.duplex(null, opts, initWritable);
if (!opts) opts = {};
var dupl = new duplex(opts);
onwritable(dupl, toFunction(initWritable));
onreadable(dupl, toFunction(initReadable));
return dupl;
};
});
var fwdStream_1 = fwdStream.readable;
var fwdStream_2 = fwdStream.writable;
var fwdStream_3 = fwdStream.duplex;
var stringRange = createCommonjsModule(function (module, exports) {
//force to a valid range
var range = exports.range = function (obj) {
return null == obj ? {} : 'string' === typeof range ? {
min: range, max: range + '\xff'
} : obj
};
//turn into a sub range.
var prefix = exports.prefix = function (range, within, term) {
range = exports.range(range);
var _range = {};
term = term || '\xff';
if(range instanceof RegExp || 'function' == typeof range) {
_range.min = within;
_range.max = within + term,
_range.inner = function (k) {
var j = k.substring(within.length);
if(range.test)
return range.test(j)
return range(j)
};
}
else if('object' === typeof range) {
_range.min = within + (range.min || range.start || '');
_range.max = within + (range.max || range.end || (term || '~'));
_range.reverse = !!range.reverse;
}
return _range
};
//return a function that checks a range
var checker = exports.checker = function (range) {
if(!range) range = {};
if ('string' === typeof range)
return function (key) {
return key.indexOf(range) == 0
}
else if(range instanceof RegExp)
return function (key) {
return range.test(key)
}
else if('object' === typeof range)
return function (key) {
var min = range.min || range.start;
var max = range.max || range.end;
// fixes keys passed as ints from sublevels
key = String(key);
return (
!min || key >= min
) && (
!max || key <= max
) && (
!range.inner || (
range.inner.test
? range.inner.test(key)
: range.inner(key)
)
)
}
else if('function' === typeof range)
return range
};
//check if a key is within a range.
var satifies = exports.satisfies = function (key, range) {
return checker(range)(key)
};
});
var stringRange_1 = stringRange.range;
var stringRange_2 = stringRange.prefix;
var stringRange_3 = stringRange.checker;
var stringRange_4 = stringRange.satisfies;
var clone_1 = createCommonjsModule(function (module) {
function objectToString(o) {
return Object.prototype.toString.call(o);
}
// shim for Node's 'util' package
// DO NOT REMOVE THIS! It is required for compatibility with EnderJS (http://enderjs.com/).
var util = {
isArray: function (ar) {
return Array.isArray(ar) || (typeof ar === 'object' && objectToString(ar) === '[object Array]');
},
isDate: function (d) {
return typeof d === 'object' && objectToString(d) === '[object Date]';
},
isRegExp: function (re) {
return typeof re === 'object' && objectToString(re) === '[object RegExp]';
},
getRegExpFlags: function (re) {
var flags = '';
re.global && (flags += 'g');
re.ignoreCase && (flags += 'i');
re.multiline && (flags += 'm');
return flags;
}
};
module.exports = clone;
/**
* Clones (copies) an Object using deep copying.
*
* This function supports circular references by default, but if you are certain
* there are no circular references in your object, you can save some CPU time
* by calling clone(obj, false).
*
* Caution: if `circular` is false and `parent` contains circular references,
* your program may enter an infinite loop and crash.
*
* @param `parent` - the object to be cloned
* @param `circular` - set to true if the object to be cloned may contain
* circular references. (optional - true by default)
* @param `depth` - set to a number if the object is only to be cloned to
* a particular depth. (optional - defaults to Infinity)
* @param `prototype` - sets the prototype to be used when cloning an object.
* (optional - defaults to parent prototype).
*/
function clone(parent, circular, depth, prototype) {
// maintain two arrays for circular references, where corresponding parents
// and children have the same index
var allParents = [];
var allChildren = [];
var useBuffer = typeof Buffer != 'undefined';
if (typeof circular == 'undefined')
circular = true;
if (typeof depth == 'undefined')
depth = Infinity;
// recurse this function so we don't reset allParents and allChildren
function _clone(parent, depth) {
// cloning null always returns null
if (parent === null)
return null;
if (depth == 0)
return parent;
var child;
var proto;
if (typeof parent != 'object') {
return parent;
}
if (util.isArray(parent)) {
child = [];
} else if (util.isRegExp(parent)) {
child = new RegExp(parent.source, util.getRegExpFlags(parent));
if (parent.lastIndex) child.lastIndex = parent.lastIndex;
} else if (util.isDate(parent)) {
child = new Date(parent.getTime());
} else if (useBuffer && Buffer.isBuffer(parent)) {
child = new Buffer(parent.length);
parent.copy(child);
return child;
} else {
if (typeof prototype == 'undefined') {
proto = Object.getPrototypeOf(parent);
child = Object.create(proto);
}
else {
child = Object.create(prototype);
proto = prototype;
}
}
if (circular) {
var index = allParents.indexOf(parent);
if (index != -1) {
return allChildren[index];
}
allParents.push(parent);
allChildren.push(child);
}
for (var i in parent) {
var attrs;
if (proto) {
attrs = Object.getOwnPropertyDescriptor(proto, i);
}
if (attrs && attrs.set == null) {
continue;
}
child[i] = _clone(parent[i], depth - 1);
}
return child;
}
return _clone(parent, depth);
}
/**
* Simple flat clone using prototype, accepts only objects, usefull for property
* override on FLAT configuration object (no nested props).
*
* USE WITH CAUTION! This may not behave as you wish if you do not know how this
* works.
*/
clone.clonePrototype = function(parent) {
if (parent === null)
return null;
var c = function () {};
c.prototype = parent;
return new c();
};
});
var levelFixRange =
function fixRange(opts) {
opts = clone_1(opts);
var reverse = opts.reverse;
var end = opts.max || opts.end;
var start = opts.min || opts.start;
var range = [start, end];
if(start != null && end != null)
range.sort();
if(reverse)
range = range.reverse();
opts.start = range[0];
opts.end = range[1];
delete opts.min;
delete opts.max;
return opts
};
var is_1 = createCommonjsModule(function (module) {
/**!
* is
* the definitive JavaScript type testing library
*
* @copyright 2013 Enrico Marino
* @license MIT
*/
var objProto = Object.prototype;
var owns = objProto.hasOwnProperty;
var toString = objProto.toString;
var isActualNaN = function (value) {
return value !== value;
};
var NON_HOST_TYPES = {
"boolean": 1,
"number": 1,
"string": 1,
"undefined": 1
};
/**
* Expose `is`
*/
var is = module.exports = {};
/**
* Test general.
*/
/**
* is.type
* Test if `value` is a type of `type`.
*
* @param {Mixed} value value to test
* @param {String} type type
* @return {Boolean} true if `value` is a type of `type`, false otherwise
* @api public
*/
is.a =
is.type = function (value, type) {
return typeof value === type;
};
/**
* is.defined
* Test if `value` is defined.
*
* @param {Mixed} value value to test
* @return {Boolean} true if 'value' is defined, false otherwise
* @api public
*/
is.defined = function (value) {
return value !== undefined;
};
/**
* is.empty
* Test if `value` is empty.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is empty, false otherwise
* @api public
*/
is.empty = function (value) {
var type = toString.call(value);
var key;
if ('[object Array]' === type || '[object Arguments]' === type) {
return value.length === 0;
}
if ('[object Object]' === type) {
for (key in value) if (owns.call(value, key)) return false;
return true;
}
if ('[object String]' === type) {
return '' === value;
}
return false;
};
/**
* is.equal
* Test if `value` is equal to `other`.
*
* @param {Mixed} value value to test
* @param {Mixed} other value to compare with
* @return {Boolean} true if `value` is equal to `other`, false otherwise
*/
is.equal = function (value, other) {
var type = toString.call(value);
var key;
if (type !== toString.call(other)) {
return false;
}
if ('[object Object]' === type) {
for (key in value) {
if (!is.equal(value[key], other[key])) {
return false;
}
}
return true;
}
if ('[object Array]' === type) {
key = value.length;
if (key !== other.length) {
return false;
}
while (--key) {
if (!is.equal(value[key], other[key])) {
return false;
}
}
return true;
}
if ('[object Function]' === type) {
return value.prototype === other.prototype;
}
if ('[object Date]' === type) {
return value.getTime() === other.getTime();
}
return value === other;
};
/**
* is.hosted
* Test if `value` is hosted by `host`.
*
* @param {Mixed} value to test
* @param {Mixed} host host to test with
* @return {Boolean} true if `value` is hosted by `host`, false otherwise
* @api public
*/
is.hosted = function (value, host) {
var type = typeof host[value];
return type === 'object' ? !!host[value] : !NON_HOST_TYPES[type];
};
/**
* is.instance
* Test if `value` is an instance of `constructor`.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is an instance of `constructor`
* @api public
*/
is.instance = is['instanceof'] = function (value, constructor) {
return value instanceof constructor;
};
/**
* is.null
* Test if `value` is null.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is null, false otherwise
* @api public
*/
is['null'] = function (value) {
return value === null;
};
/**
* is.undefined
* Test if `value` is undefined.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is undefined, false otherwise
* @api public
*/
is.undefined = function (value) {
return value === undefined;
};
/**
* Test arguments.
*/
/**
* is.arguments
* Test if `value` is an arguments object.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is an arguments object, false otherwise
* @api public
*/
is.arguments = function (value) {
var isStandardArguments = '[object Arguments]' === toString.call(value);
var isOldArguments = !is.array(value) && is.arraylike(value) && is.object(value) && is.fn(value.callee);
return isStandardArguments || isOldArguments;
};
/**
* Test array.
*/
/**
* is.array
* Test if 'value' is an array.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is an array, false otherwise
* @api public
*/
is.array = function (value) {
return '[object Array]' === toString.call(value);
};
/**
* is.arguments.empty
* Test if `value` is an empty arguments object.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is an empty arguments object, false otherwise
* @api public
*/
is.arguments.empty = function (value) {
return is.arguments(value) && value.length === 0;
};
/**
* is.array.empty
* Test if `value` is an empty array.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is an empty array, false otherwise
* @api public
*/
is.array.empty = function (value) {
return is.array(value) && value.length === 0;
};
/**
* is.arraylike
* Test if `value` is an arraylike object.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is an arguments object, false otherwise
* @api public
*/
is.arraylike = function (value) {
return !!value && !is.boolean(value)
&& owns.call(value, 'length')
&& isFinite(value.length)
&& is.number(value.length)
&& value.length >= 0;
};
/**
* Test boolean.
*/
/**
* is.boolean
* Test if `value` is a boolean.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is a boolean, false otherwise
* @api public
*/
is.boolean = function (value) {
return '[object Boolean]' === toString.call(value);
};
/**
* is.false
* Test if `value` is false.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is false, false otherwise
* @api public
*/
is['false'] = function (value) {
return is.boolean(value) && (value === false || value.valueOf() === false);
};
/**
* is.true
* Test if `value` is true.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is true, false otherwise
* @api public
*/
is['true'] = function (value) {
return is.boolean(value) && (value === true || value.valueOf() === true);
};
/**
* Test date.
*/
/**
* is.date
* Test if `value` is a date.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is a date, false otherwise
* @api public
*/
is.date = function (value) {
return '[object Date]' === toString.call(value);
};
/**
* Test element.
*/
/**
* is.element
* Test if `value` is an html element.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is an HTML Element, false otherwise
* @api public
*/
is.element = function (value) {
return value !== undefined
&& typeof HTMLElement !== 'undefined'
&& value instanceof HTMLElement
&& value.nodeType === 1;
};
/**
* Test error.
*/
/**
* is.error
* Test if `value` is an error object.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is an error object, false otherwise
* @api public
*/
is.error = function (value) {
return '[object Error]' === toString.call(value);
};
/**
* Test function.
*/
/**
* is.fn / is.function (deprecated)
* Test if `value` is a function.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is a function, false otherwise
* @api public
*/
is.fn = is['function'] = function (value) {
var isAlert = typeof window !== 'undefined' && value === window.alert;
return isAlert || '[object Function]' === toString.call(value);
};
/**
* Test number.
*/
/**
* is.number
* Test if `value` is a number.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is a number, false otherwise
* @api public
*/
is.number = function (value) {
return '[object Number]' === toString.call(value);
};
/**
* is.infinite
* Test if `value` is positive or negative infinity.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is positive or negative Infinity, false otherwise
* @api public
*/
is.infinite = function (value) {
return value === Infinity || value === -Infinity;
};
/**
* is.decimal
* Test if `value` is a decimal number.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is a decimal number, false otherwise
* @api public
*/
is.decimal = function (value) {
return is.number(value) && !isActualNaN(value) && !is.infinite(value) && value % 1 !== 0;
};
/**
* is.divisibleBy
* Test if `value` is divisible by `n`.
*
* @param {Number} value value to test
* @param {Number} n dividend
* @return {Boolean} true if `value` is divisible by `n`, false otherwise
* @api public
*/
is.divisibleBy = function (value, n) {
var isDividendInfinite = is.infinite(value);
var isDivisorInfinite = is.infinite(n);
var isNonZeroNumber = is.number(value) && !isActualNaN(value) && is.number(n) && !isActualNaN(n) && n !== 0;
return isDividendInfinite || isDivisorInfinite || (isNonZeroNumber && value % n === 0);
};
/**
* is.int
* Test if `value` is an integer.
*
* @param value to test
* @return {Boolean} true if `value` is an integer, false otherwise
* @api public
*/
is.int = function (value) {
return is.number(value) && !isActualNaN(value) && value % 1 === 0;
};
/**
* is.maximum
* Test if `value` is greater than 'others' values.
*
* @param {Number} value value to test
* @param {Array} others values to compare with
* @return {Boolean} true if `value` is greater than `others` values
* @api public
*/
is.maximum = function (value, others) {
if (isActualNaN(value)) {
throw new TypeError('NaN is not a valid value');
} else if (!is.arraylike(others)) {
throw new TypeError('second argument must be array-like');
}
var len = others.length;
while (--len >= 0) {
if (value < others[len]) {
return false;
}
}
return true;
};
/**
* is.minimum
* Test if `value` is less than `others` values.
*
* @param {Number} value value to test
* @param {Array} others values to compare with
* @return {Boolean} true if `value` is less than `others` values
* @api public
*/
is.minimum = function (value, others) {
if (isActualNaN(value)) {
throw new TypeError('NaN is not a valid value');
} else if (!is.arraylike(others)) {
throw new TypeError('second argument must be array-like');
}
var len = others.length;
while (--len >= 0) {
if (value > others[len]) {
return false;
}
}
return true;
};
/**
* is.nan
* Test if `value` is not a number.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is not a number, false otherwise
* @api public
*/
is.nan = function (value) {
return !is.number(value) || value !== value;
};
/**
* is.even
* Test if `value` is an even number.
*
* @param {Number} value value to test
* @return {Boolean} true if `value` is an even number, false otherwise
* @api public
*/
is.even = function (value) {
return is.infinite(value) || (is.number(value) && value === value && value % 2 === 0);
};
/**
* is.odd
* Test if `value` is an odd number.
*
* @param {Number} value value to test
* @return {Boolean} true if `value` is an odd number, false otherwise
* @api public
*/
is.odd = function (value) {
return is.infinite(value) || (is.number(value) && value === value && value % 2 !== 0);
};
/**
* is.ge
* Test if `value` is greater than or equal to `other`.
*
* @param {Number} value value to test
* @param {Number} other value to compare with
* @return {Boolean}
* @api public
*/
is.ge = function (value, other) {
if (isActualNaN(value) || isActualNaN(other)) {
throw new TypeError('NaN is not a valid value');
}
return !is.infinite(value) && !is.infinite(other) && value >= other;
};
/**
* is.gt
* Test if `value` is greater than `other`.
*
* @param {Number} value value to test
* @param {Number} other value to compare with
* @return {Boolean}
* @api public
*/
is.gt = function (value, other) {
if (isActualNaN(value) || isActualNaN(other)) {
throw new TypeError('NaN is not a valid value');
}
return !is.infinite(value) && !is.infinite(other) && value > other;
};
/**
* is.le
* Test if `value` is less than or equal to `other`.
*
* @param {Number} value value to test
* @param {Number} other value to compare with
* @return {Boolean} if 'value' is less than or equal to 'other'
* @api public
*/
is.le = function (value, other) {
if (isActualNaN(value) || isActualNaN(other)) {
throw new TypeError('NaN is not a valid value');
}
return !is.infinite(value) && !is.infinite(other) && value <= other;
};
/**
* is.lt
* Test if `value` is less than `other`.
*
* @param {Number} value value to test
* @param {Number} other value to compare with
* @return {Boolean} if `value` is less than `other`
* @api public
*/
is.lt = function (value, other) {
if (isActualNaN(value) || isActualNaN(other)) {
throw new TypeError('NaN is not a valid value');
}
return !is.infinite(value) && !is.infinite(other) && value < other;
};
/**
* is.within
* Test if `value` is within `start` and `finish`.
*
* @param {Number} value value to test
* @param {Number} start lower bound
* @param {Number} finish upper bound
* @return {Boolean} true if 'value' is is within 'start' and 'finish'
* @api public
*/
is.within = function (value, start, finish) {
if (isActualNaN(value) || isActualNaN(start) || isActualNaN(finish)) {
throw new TypeError('NaN is not a valid value');
} else if (!is.number(value) || !is.number(start) || !is.number(finish)) {
throw new TypeError('all arguments must be numbers');
}
var isAnyInfinite = is.infinite(value) || is.infinite(start) || is.infinite(finish);
return isAnyInfinite || (value >= start && value <= finish);
};
/**
* Test object.
*/
/**
* is.object
* Test if `value` is an object.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is an object, false otherwise
* @api public
*/
is.object = function (value) {
return value && '[object Object]' === toString.call(value);
};
/**
* is.hash
* Test if `value` is a hash - a plain object literal.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is a hash, false otherwise
* @api public
*/
is.hash = function (value) {
return is.object(value) && value.constructor === Object && !value.nodeType && !value.setInterval;
};
/**
* Test regexp.
*/
/**
* is.regexp
* Test if `value` is a regular expression.
*
* @param {Mixed} value value to test
* @return {Boolean} true if `value` is a regexp, false otherwise
* @api public
*/
is.regexp = function (value) {
return '[object RegExp]' === toString.call(value);
};
/**
* Test string.
*/
/**
* is.string
* Test if `value` is a string.
*
* @param {Mixed} value value to test
* @return {Boolean} true if 'value' is a string, false otherwise
* @api public
*/
is.string = function (value) {
return '[object String]' === toString.call(value);
};
});
var hasOwn$1 = Object.prototype.hasOwnProperty;
var toString$2 = Object.prototype.toString;
var foreach$1 = function forEach (obj, fn, ctx) {
if (toString$2.call(fn) !== '[object Function]') {
throw new TypeError('iterator must be a function');
}
var l = obj.length;
if (l === +l) {
for (var i = 0; i < l; i++) {
fn.call(ctx, obj[i], i, obj);
}
} else {
for (var k in obj) {
if (hasOwn$1.call(obj, k)) {
fn.call(ctx, obj[k], k, obj);
}
}
}
};
var shim$1 = createCommonjsModule(function (module) {
(function () {
// modified from https://github.com/kriskowal/es5-shim
var has = Object.prototype.hasOwnProperty,
is = is_1,
forEach = foreach$1,
hasDontEnumBug = !({'toString': null}).propertyIsEnumerable('toString'),
dontEnums = [
"toString",
"toLocaleString",
"valueOf",
"hasOwnProperty",
"isPrototypeOf",
"propertyIsEnumerable",
"constructor"
],
keysShim;
keysShim = function keys(object) {
if (!is.object(object) && !is.array(object)) {
throw new TypeError("Object.keys called on a non-object");
}
var name, theKeys = [];
for (name in object) {
if (has.call(object, name)) {
theKeys.push(name);
}
}
if (hasDontEnumBug) {
forEach(dontEnums, function (dontEnum) {
if (has.call(object, dontEnum)) {
theKeys.push(dontEnum);
}
});
}
return theKeys;
};
module.exports = keysShim;
}());
});
var objectKeys$4 = Object.keys || shim$1;
var hasKeys_1$1 = hasKeys$1;
function hasKeys$1(source) {
return source !== null &&
(typeof source === "object" ||
typeof source === "function")
}
var xtend$3 = extend$3;
function extend$3() {
var target = {};
for (var i = 0; i < arguments.length; i++) {
var source = arguments[i];
if (!hasKeys_1$1(source)) {
continue
}
var keys = objectKeys$4(source);
for (var j = 0; j < keys.length; j++) {
var name = keys[j];
target[name] = source[name];
}
}
return target
}
function addOperation (type, key, value, options) {
var operation = {
type: type,
key: key,
value: value,
options: options
};
if (options && options.prefix) {
operation.prefix = options.prefix;
delete options.prefix;
}
this._operations.push(operation);
return this
}
function Batch$1(sdb) {
this._operations = [];
this._sdb = sdb;
this.put = addOperation.bind(this, 'put');
this.del = addOperation.bind(this, 'del');
}
var B = Batch$1.prototype;
B.clear = function () {
this._operations = [];
};
B.write = function (cb) {
this._sdb.batch(this._operations, cb);
};
var batch$1 = Batch$1;
var sub = createCommonjsModule(function (module) {
var EventEmitter = events.EventEmitter;
var inherits = util$2.inherits;
inherits(SubDB, EventEmitter);
function SubDB (db, prefix, options) {
if('string' === typeof options) {
console.error('db.sublevel(name, seperator<string>) is depreciated');
console.error('use db.sublevel(name, {sep: separator})) if you must');
options = {sep: options};
}
if(!(this instanceof SubDB)) return new SubDB(db, prefix, options)
if(!db) throw new Error('must provide db')
if(!prefix) throw new Error('must provide prefix')
options = options || {};
options.sep = options.sep || '\xff';
this._parent = db;
this._options = options;
this.options = options;
this._prefix = prefix;
this._root = root(this);
db.sublevels[prefix] = this;
this.sublevels = {};
this.methods = {};
var self = this;
this.hooks = {
pre: function () {
return self.pre.apply(self, arguments)
},
post: function () {
return self.post.apply(self, arguments)
}
};
}
var SDB = SubDB.prototype;
SDB._key = function (key) {
var sep = this._options.sep;
return sep
+ this._prefix
+ sep
+ key
};
SDB._getOptsAndCb = function (opts, cb) {
if (typeof opts == 'function') {
cb = opts;
opts = {};
}
return { opts: xtend$3(opts, this._options), cb: cb }
};
SDB.sublevel = function (prefix, options) {
if(this.sublevels[prefix])
return this.sublevels[prefix]
return new SubDB(this, prefix, options || this._options)
};
SDB.put = function (key, value, opts, cb) {
var res = this._getOptsAndCb(opts, cb);
this._root.put(this.prefix(key), value, res.opts, res.cb);
};
SDB.get = function (key, opts, cb) {
var res = this._getOptsAndCb(opts, cb);
this._root.get(this.prefix(key), res.opts, res.cb);
};
SDB.del = function (key, opts, cb) {
var res = this._getOptsAndCb(opts, cb);
this._root.del(this.prefix(key), res.opts, res.cb);
};
SDB.batch = function (changes, opts, cb) {
if(!Array.isArray(changes))
return new batch$1(this)
var self = this,
res = this._getOptsAndCb(opts, cb);
changes.forEach(function (ch) {
//OH YEAH, WE NEED TO VALIDATE THAT UPDATING THIS KEY/PREFIX IS ALLOWED
if('string' === typeof ch.prefix)
ch.key = ch.prefix + ch.key;
else
ch.key = (ch.prefix || self).prefix(ch.key);
if(ch.prefix) ch.prefix = null;
});
this._root.batch(changes, res.opts, res.cb);
};
SDB._getKeyEncoding = function () {
if(this.options.keyEncoding)
return this.options.keyEncoding
if(this._parent && this._parent._getKeyEncoding)
return this._parent._getKeyEncoding()
};
SDB._getValueEncoding = function () {
if(this.options.valueEncoding)
return this.options.valueEncoding
if(this._parent && this._parent._getValueEncoding)
return this._parent._getValueEncoding()
};
SDB.prefix = function (key) {
var sep = this._options.sep;
return this._parent.prefix() + sep + this._prefix + sep + (key || '')
};
SDB.keyStream =
SDB.createKeyStream = function (opts) {
opts = opts || {};
opts.keys = true;
opts.values = false;
return this.createReadStream(opts)
};
SDB.valueStream =
SDB.createValueStream = function (opts) {
opts = opts || {};
opts.keys = false;
opts.values = true;
opts.keys = false;
return this.createReadStream(opts)
};
function selectivelyMerge(_opts, opts) {
[ 'valueEncoding'
, 'encoding'
, 'keyEncoding'
, 'reverse'
, 'values'
, 'keys'
, 'limit'
, 'fillCache'
]
.forEach(function (k) {
if (opts.hasOwnProperty(k)) _opts[k] = opts[k];
});
}
SDB.readStream =
SDB.createReadStream = function (opts) {
opts = opts || {};
var r = root(this);
var p = this.prefix();
var _opts = stringRange.prefix(opts, p);
selectivelyMerge(_opts, xtend$3(opts, this._options));
var s = r.createReadStream(_opts);
if(_opts.values === false) {
var read = s.read;
if (read) {
s.read = function (size) {
var val = read.call(this, size);
if (val) val = val.substring(p.length);
return val
};
} else {
var emit = s.emit;
s.emit = function (event, val) {
if(event === 'data') {
emit.call(this, 'data', val.substring(p.length));
} else
emit.call(this, event, val);
};
}
return s
} else if(_opts.keys === false)
return s
else {
var read = s.read;
if (read) {
s.read = function (size) {
var d = read.call(this, size);
if (d) d.key = d.key.substring(p.length);
return d
};
} else {
s.on('data', function (d) {
//mutate the prefix!
//this doesn't work for createKeyStream admittedly.
d.key = d.key.substring(p.length);
});
}
return s
}
};
SDB.writeStream =
SDB.createWriteStream = function () {
var r = root(this);
var p = this.prefix();
var ws = r.createWriteStream.apply(r, arguments);
var write = ws.write;
var encoding = this._options.encoding;
var valueEncoding = this._options.valueEncoding;
var keyEncoding = this._options.keyEncoding;
// slight optimization, if no encoding was specified at all,
// which will be the case most times, make write not check at all
var nocheck = !encoding && !valueEncoding && !keyEncoding;
ws.write = nocheck
? function (data) {
data.key = p + data.key;
return write.call(ws, data)
}
: function (data) {
data.key = p + data.key;
// not merging all options here since this happens on every write and things could get slowed down
// at this point we only consider encoding important to propagate
if (encoding && typeof data.encoding === 'undefined')
data.encoding = encoding;
if (valueEncoding && typeof data.valueEncoding === 'undefined')
data.valueEncoding = valueEncoding;
if (keyEncoding && typeof data.keyEncoding === 'undefined')
data.keyEncoding = keyEncoding;
return write.call(ws, data)
};
return ws
};
SDB.approximateSize = function () {
var r = root(db);
return r.approximateSize.apply(r, arguments)
};
function root(db) {
if(!db._parent) return db
return root(db._parent)
}
SDB.pre = function (range, hook) {
if(!hook) hook = range, range = null;
range = stringRange.prefix(range, this.prefix(), this._options.sep);
var r = root(this._parent);
var p = this.prefix();
return r.hooks.pre(levelFixRange(range), function (ch, add, batch) {
hook({
key: ch.key.substring(p.length),
value: ch.value,
type: ch.type
}, function (ch, _p) {
//maybe remove the second add arg now
//that op can have prefix?
add(ch, ch.prefix ? _p : (_p || p));
}, batch);
})
};
SDB.post = function (range, hook) {
if(!hook) hook = range, range = null;
var r = root(this._parent);
var p = this.prefix();
range = stringRange.prefix(range, p, this._options.sep);
return r.hooks.post(levelFixRange(range), function (data) {
hook({key: data.key.substring(p.length), value: data.value, type: data.type});
})
};
var exports = module.exports = SubDB;
});
var levelHooks = function (db) {
if(db.hooks) {
return
}
var posthooks = [];
var prehooks = [];
function getPrefix (p) {
return p && (
'string' === typeof p ? p
: 'string' === typeof p.prefix ? p.prefix
: 'function' === typeof p.prefix ? p.prefix()
: ''
)
}
function getKeyEncoding (db) {
if(db && db._getKeyEncoding)
return db._getKeyEncoding(db)
}
function getValueEncoding (db) {
if(db && db._getValueEncoding)
return db._getValueEncoding(db)
}
function remover (array, item) {
return function () {
var i = array.indexOf(item);
if(!~i) return false
array.splice(i, 1);
return true
}
}
db.hooks = {
post: function (prefix, hook) {
if(!hook) hook = prefix, prefix = '';
var h = {test: stringRange.checker(prefix), hook: hook};
posthooks.push(h);
return remover(posthooks, h)
},
pre: function (prefix, hook) {
if(!hook) hook = prefix, prefix = '';
var h = {
test: stringRange.checker(prefix),
hook: hook,
safe: false !== prefix.safe
};
prehooks.push(h);
return remover(prehooks, h)
},
posthooks: posthooks,
prehooks: prehooks
};
//POST HOOKS
function each (e) {
if(e && e.type) {
posthooks.forEach(function (h) {
if(h.test(e.key)) h.hook(e);
});
}
}
db.on('put', function (key, val) {
each({type: 'put', key: key, value: val});
});
db.on('del', function (key, val) {
each({type: 'del', key: key, value: val});
});
db.on('batch', function onBatch (ary) {
ary.forEach(each);
});
//PRE HOOKS
var put = db.put;
var del = db.del;
var batch = db.batch;
function callHooks (isBatch, b, opts, cb) {
try {
b.forEach(function hook(e, i) {
prehooks.forEach(function (h) {
if(h.test(String(e.key))) {
//optimize this?
//maybe faster to not create a new object each time?
//have one object and expose scope to it?
var context = {
add: function (ch, db) {
if(typeof ch === 'undefined') {
return this
}
if(ch === false)
return delete b[i]
var prefix = (
getPrefix(ch.prefix) ||
getPrefix(db) ||
h.prefix || ''
);
//don't leave a circular json object there incase using multilevel.
if(prefix) ch.prefix = prefix;
ch.key = prefix + ch.key;
if(h.safe && h.test(String(ch.key))) {
//this usually means a stack overflow.
throw new Error('prehook cannot insert into own range')
}
var ke = ch.keyEncoding || getKeyEncoding(ch.prefix);
var ve = ch.valueEncoding || getValueEncoding(ch.prefix);
if(ke) ch.keyEncoding = ke;
if(ve) ch.valueEncoding = ve;
b.push(ch);
hook(ch, b.length - 1);
return this
},
put: function (ch, db) {
if('object' === typeof ch) ch.type = 'put';
return this.add(ch, db)
},
del: function (ch, db) {
if('object' === typeof ch) ch.type = 'del';
return this.add(ch, db)
},
veto: function () {
return this.add(false)
}
};
h.hook.call(context, e, context.add, b);
}
});
});
} catch (err) {
return (cb || opts)(err)
}
b = b.filter(function (e) {
return e && e.type //filter out empty items
});
if(b.length == 1 && !isBatch) {
var change = b[0];
return change.type == 'put'
? put.call(db, change.key, change.value, opts, cb)
: del.call(db, change.key, opts, cb)
}
return batch.call(db, b, opts, cb)
}
db.put = function (key, value, opts, cb ) {
var batch = [{key: key, value: value, type: 'put'}];
return callHooks(false, batch, opts, cb)
};
db.del = function (key, opts, cb) {
var batch = [{key: key, type: 'del'}];
return callHooks(false, batch, opts, cb)
};
db.batch = function (batch, opts, cb) {
return callHooks(true, batch, opts, cb)
};
};
var EventEmitter$1 = events.EventEmitter;
var next = process.nextTick;
var levelSublevel = function (_db, options) {
function DB () {}
DB.prototype = _db;
var db = new DB();
if (db.sublevel) return db
options = options || {};
//use \xff (255) as the seperator,
//so that sections of the database will sort after the regular keys
var sep = options.sep = options.sep || '\xff';
db._options = options;
levelHooks(db);
db.sublevels = {};
db.sublevel = function (prefix, options) {
if(db.sublevels[prefix])
return db.sublevels[prefix]
return new sub(db, prefix, options || this._options)
};
db.methods = {};
db.prefix = function (key) {
return '' + (key || '')
};
db.pre = function (range, hook) {
if(!hook)
hook = range, range = {
max : sep
};
return db.hooks.pre(range, hook)
};
db.post = function (range, hook) {
if(!hook)
hook = range, range = {
max : sep
};
return db.hooks.post(range, hook)
};
function safeRange(fun) {
return function (opts) {
opts = opts || {};
opts = levelFixRange(opts);
if(opts.reverse) opts.start = opts.start || sep;
else opts.end = opts.end || sep;
return fun.call(db, opts)
}
}
db.readStream =
db.createReadStream = safeRange(db.createReadStream);
db.keyStream =
db.createKeyStream = safeRange(db.createKeyStream);
db.valuesStream =
db.createValueStream = safeRange(db.createValueStream);
var batch = db.batch;
db.batch = function (changes, opts, cb) {
if(!Array.isArray(changes))
return new batch$1(db)
changes.forEach(function (e) {
if(e.prefix) {
if('function' === typeof e.prefix.prefix)
e.key = e.prefix.prefix(e.key);
else if('string' === typeof e.prefix)
e.key = e.prefix + e.key;
}
});
batch.call(db, changes, opts, cb);
};
return db
};
var isarray$3 = Array.isArray || function (arr) {
return Object.prototype.toString.call(arr) == '[object Array]';
};
var string_decoder$3 = createCommonjsModule(function (module, exports) {
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var Buffer = buffer.Buffer;
var isBufferEncoding = Buffer.isEncoding
|| function(encoding) {
switch (encoding && encoding.toLowerCase()) {
case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true;
default: return false;
}
};
function assertEncoding(encoding) {
if (encoding && !isBufferEncoding(encoding)) {
throw new Error('Unknown encoding: ' + encoding);
}
}
// StringDecoder provides an interface for efficiently splitting a series of
// buffers into a series of JS strings without breaking apart multi-byte
// characters. CESU-8 is handled as part of the UTF-8 encoding.
//
// @TODO Handling all encodings inside a single object makes it very difficult
// to reason about this code, so it should be split up in the future.
// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code
// points as used by CESU-8.
var StringDecoder = exports.StringDecoder = function(encoding) {
this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, '');
assertEncoding(encoding);
switch (this.encoding) {
case 'utf8':
// CESU-8 represents each of Surrogate Pair by 3-bytes
this.surrogateSize = 3;
break;
case 'ucs2':
case 'utf16le':
// UTF-16 represents each of Surrogate Pair by 2-bytes
this.surrogateSize = 2;
this.detectIncompleteChar = utf16DetectIncompleteChar;
break;
case 'base64':
// Base-64 stores 3 bytes in 4 chars, and pads the remainder.
this.surrogateSize = 3;
this.detectIncompleteChar = base64DetectIncompleteChar;
break;
default:
this.write = passThroughWrite;
return;
}
// Enough space to store all bytes of a single character. UTF-8 needs 4
// bytes, but CESU-8 may require up to 6 (3 bytes per surrogate).
this.charBuffer = new Buffer(6);
// Number of bytes received for the current incomplete multi-byte character.
this.charReceived = 0;
// Number of bytes expected for the current incomplete multi-byte character.
this.charLength = 0;
};
// write decodes the given buffer and returns it as JS string that is
// guaranteed to not contain any partial multi-byte characters. Any partial
// character found at the end of the buffer is buffered up, and will be
// returned when calling write again with the remaining bytes.
//
// Note: Converting a Buffer containing an orphan surrogate to a String
// currently works, but converting a String to a Buffer (via `new Buffer`, or
// Buffer#write) will replace incomplete surrogates with the unicode
// replacement character. See https://codereview.chromium.org/121173009/ .
StringDecoder.prototype.write = function(buffer) {
var charStr = '';
// if our last write ended with an incomplete multibyte character
while (this.charLength) {
// determine how many remaining bytes this buffer has to offer for this char
var available = (buffer.length >= this.charLength - this.charReceived) ?
this.charLength - this.charReceived :
buffer.length;
// add the new bytes to the char buffer
buffer.copy(this.charBuffer, this.charReceived, 0, available);
this.charReceived += available;
if (this.charReceived < this.charLength) {
// still not enough chars in this buffer? wait for more ...
return '';
}
// remove bytes belonging to the current character from the buffer
buffer = buffer.slice(available, buffer.length);
// get the character that was split
charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding);
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
var charCode = charStr.charCodeAt(charStr.length - 1);
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
this.charLength += this.surrogateSize;
charStr = '';
continue;
}
this.charReceived = this.charLength = 0;
// if there are no more bytes in this buffer, just emit our char
if (buffer.length === 0) {
return charStr;
}
break;
}
// determine and set charLength / charReceived
this.detectIncompleteChar(buffer);
var end = buffer.length;
if (this.charLength) {
// buffer the incomplete character bytes we got
buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end);
end -= this.charReceived;
}
charStr += buffer.toString(this.encoding, 0, end);
var end = charStr.length - 1;
var charCode = charStr.charCodeAt(end);
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
var size = this.surrogateSize;
this.charLength += size;
this.charReceived += size;
this.charBuffer.copy(this.charBuffer, size, 0, size);
buffer.copy(this.charBuffer, 0, 0, size);
return charStr.substring(0, end);
}
// or just emit the charStr
return charStr;
};
// detectIncompleteChar determines if there is an incomplete UTF-8 character at
// the end of the given buffer. If so, it sets this.charLength to the byte
// length that character, and sets this.charReceived to the number of bytes
// that are available for this character.
StringDecoder.prototype.detectIncompleteChar = function(buffer) {
// determine how many bytes we have to check at the end of this buffer
var i = (buffer.length >= 3) ? 3 : buffer.length;
// Figure out if one of the last i bytes of our buffer announces an
// incomplete char.
for (; i > 0; i--) {
var c = buffer[buffer.length - i];
// See http://en.wikipedia.org/wiki/UTF-8#Description
// 110XXXXX
if (i == 1 && c >> 5 == 0x06) {
this.charLength = 2;
break;
}
// 1110XXXX
if (i <= 2 && c >> 4 == 0x0E) {
this.charLength = 3;
break;
}
// 11110XXX
if (i <= 3 && c >> 3 == 0x1E) {
this.charLength = 4;
break;
}
}
this.charReceived = i;
};
StringDecoder.prototype.end = function(buffer) {
var res = '';
if (buffer && buffer.length)
res = this.write(buffer);
if (this.charReceived) {
var cr = this.charReceived;
var buf = this.charBuffer;
var enc = this.encoding;
res += buf.slice(0, cr).toString(enc);
}
return res;
};
function passThroughWrite(buffer) {
return buffer.toString(this.encoding);
}
function utf16DetectIncompleteChar(buffer) {
this.charReceived = buffer.length % 2;
this.charLength = this.charReceived ? 2 : 0;
}
function base64DetectIncompleteChar(buffer) {
this.charReceived = buffer.length % 3;
this.charLength = this.charReceived ? 3 : 0;
}
});
var string_decoder_1$3 = string_decoder$3.StringDecoder;
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var _stream_readable$3 = Readable$4;
/*<replacement>*/
/*</replacement>*/
/*<replacement>*/
var Buffer$8 = buffer.Buffer;
/*</replacement>*/
Readable$4.ReadableState = ReadableState$3;
var EE$3 = events.EventEmitter;
/*<replacement>*/
if (!EE$3.listenerCount) EE$3.listenerCount = function(emitter, type) {
return emitter.listeners(type).length;
};
/*</replacement>*/
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
var StringDecoder$3;
/*<replacement>*/
var debug = util$2;
if (debug && debug.debuglog) {
debug = debug.debuglog('stream');
} else {
debug = function () {};
}
/*</replacement>*/
util.inherits(Readable$4, stream);
function ReadableState$3(options, stream) {
var Duplex = _stream_duplex$3;
options = options || {};
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
var defaultHwm = options.objectMode ? 16 : 16 * 1024;
this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.buffer = [];
this.length = 0;
this.pipes = null;
this.pipesCount = 0;
this.flowing = null;
this.ended = false;
this.endEmitted = false;
this.reading = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// whenever we return null, then we set a flag to say
// that we're awaiting a 'readable' event emission.
this.needReadable = false;
this.emittedReadable = false;
this.readableListening = false;
// object stream flag. Used to make read(n) ignore n and to
// make all the buffer merging and length checks go away
this.objectMode = !!options.objectMode;
if (stream instanceof Duplex)
this.objectMode = this.objectMode || !!options.readableObjectMode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// when piping, we only care about 'readable' events that happen
// after read()ing all the bytes and not getting any pushback.
this.ranOut = false;
// the number of writers that are awaiting a drain event in .pipe()s
this.awaitDrain = 0;
// if true, a maybeReadMore has been scheduled
this.readingMore = false;
this.decoder = null;
this.encoding = null;
if (options.encoding) {
if (!StringDecoder$3)
StringDecoder$3 = string_decoder$3.StringDecoder;
this.decoder = new StringDecoder$3(options.encoding);
this.encoding = options.encoding;
}
}
function Readable$4(options) {
if (!(this instanceof Readable$4))
return new Readable$4(options);
this._readableState = new ReadableState$3(options, this);
// legacy
this.readable = true;
stream.call(this);
}
// Manually shove something into the read() buffer.
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
Readable$4.prototype.push = function(chunk, encoding) {
var state = this._readableState;
if (util.isString(chunk) && !state.objectMode) {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
chunk = new Buffer$8(chunk, encoding);
encoding = '';
}
}
return readableAddChunk$3(this, state, chunk, encoding, false);
};
// Unshift should *always* be something directly out of read()
Readable$4.prototype.unshift = function(chunk) {
var state = this._readableState;
return readableAddChunk$3(this, state, chunk, '', true);
};
function readableAddChunk$3(stream, state, chunk, encoding, addToFront) {
var er = chunkInvalid$3(state, chunk);
if (er) {
stream.emit('error', er);
} else if (util.isNullOrUndefined(chunk)) {
state.reading = false;
if (!state.ended)
onEofChunk$3(stream, state);
} else if (state.objectMode || chunk && chunk.length > 0) {
if (state.ended && !addToFront) {
var e = new Error('stream.push() after EOF');
stream.emit('error', e);
} else if (state.endEmitted && addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
if (state.decoder && !addToFront && !encoding)
chunk = state.decoder.write(chunk);
if (!addToFront)
state.reading = false;
// if we want the data now, just emit it.
if (state.flowing && state.length === 0 && !state.sync) {
stream.emit('data', chunk);
stream.read(0);
} else {
// update the buffer info.
state.length += state.objectMode ? 1 : chunk.length;
if (addToFront)
state.buffer.unshift(chunk);
else
state.buffer.push(chunk);
if (state.needReadable)
emitReadable$3(stream);
}
maybeReadMore$3(stream, state);
}
} else if (!addToFront) {
state.reading = false;
}
return needMoreData$3(state);
}
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
// such as the repl. Also, if the push() triggered a
// readable event, and the user called read(largeNumber) such that
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData$3(state) {
return !state.ended &&
(state.needReadable ||
state.length < state.highWaterMark ||
state.length === 0);
}
// backwards compatibility.
Readable$4.prototype.setEncoding = function(enc) {
if (!StringDecoder$3)
StringDecoder$3 = string_decoder$3.StringDecoder;
this._readableState.decoder = new StringDecoder$3(enc);
this._readableState.encoding = enc;
return this;
};
// Don't raise the hwm > 128MB
var MAX_HWM$3 = 0x800000;
function roundUpToNextPowerOf2$3(n) {
if (n >= MAX_HWM$3) {
n = MAX_HWM$3;
} else {
// Get the next highest power of 2
n--;
for (var p = 1; p < 32; p <<= 1) n |= n >> p;
n++;
}
return n;
}
function howMuchToRead$3(n, state) {
if (state.length === 0 && state.ended)
return 0;
if (state.objectMode)
return n === 0 ? 0 : 1;
if (isNaN(n) || util.isNull(n)) {
// only flow one buffer at a time
if (state.flowing && state.buffer.length)
return state.buffer[0].length;
else
return state.length;
}
if (n <= 0)
return 0;
// If we're asking for more than the target buffer level,
// then raise the water mark. Bump up to the next highest
// power of 2, to prevent increasing it excessively in tiny
// amounts.
if (n > state.highWaterMark)
state.highWaterMark = roundUpToNextPowerOf2$3(n);
// don't have that much. return null, unless we've ended.
if (n > state.length) {
if (!state.ended) {
state.needReadable = true;
return 0;
} else
return state.length;
}
return n;
}
// you can override either this method, or the async _read(n) below.
Readable$4.prototype.read = function(n) {
debug('read', n);
var state = this._readableState;
var nOrig = n;
if (!util.isNumber(n) || n > 0)
state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
if (n === 0 &&
state.needReadable &&
(state.length >= state.highWaterMark || state.ended)) {
debug('read: emitReadable', state.length, state.ended);
if (state.length === 0 && state.ended)
endReadable$3(this);
else
emitReadable$3(this);
return null;
}
n = howMuchToRead$3(n, state);
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
if (state.length === 0)
endReadable$3(this);
return null;
}
// All the actual chunk generation logic needs to be
// *below* the call to _read. The reason is that in certain
// synthetic stream cases, such as passthrough streams, _read
// may be a completely synchronous operation which may change
// the state of the read buffer, providing enough data when
// before there was *not* enough.
//
// So, the steps are:
// 1. Figure out what the state of things will be after we do
// a read from the buffer.
//
// 2. If that resulting state will trigger a _read, then call _read.
// Note that this may be asynchronous, or synchronous. Yes, it is
// deeply ugly to write APIs this way, but that still doesn't mean
// that the Readable class should behave improperly, as streams are
// designed to be sync/async agnostic.
// Take note if the _read call is sync or async (ie, if the read call
// has returned yet), so that we know whether or not it's safe to emit
// 'readable' etc.
//
// 3. Actually pull the requested chunks out of the buffer and return.
// if we need a readable event, then we need to do some reading.
var doRead = state.needReadable;
debug('need readable', doRead);
// if we currently have less than the highWaterMark, then also read some
if (state.length === 0 || state.length - n < state.highWaterMark) {
doRead = true;
debug('length less than watermark', doRead);
}
// however, if we've ended, then there's no point, and if we're already
// reading, then it's unnecessary.
if (state.ended || state.reading) {
doRead = false;
debug('reading or ended', doRead);
}
if (doRead) {
debug('do read');
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
if (state.length === 0)
state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
}
// If _read pushed data synchronously, then `reading` will be false,
// and we need to re-evaluate how much data we can return to the user.
if (doRead && !state.reading)
n = howMuchToRead$3(nOrig, state);
var ret;
if (n > 0)
ret = fromList$3(n, state);
else
ret = null;
if (util.isNull(ret)) {
state.needReadable = true;
n = 0;
}
state.length -= n;
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
if (state.length === 0 && !state.ended)
state.needReadable = true;
// If we tried to read() past the EOF, then emit end on the next tick.
if (nOrig !== n && state.ended && state.length === 0)
endReadable$3(this);
if (!util.isNull(ret))
this.emit('data', ret);
return ret;
};
function chunkInvalid$3(state, chunk) {
var er = null;
if (!util.isBuffer(chunk) &&
!util.isString(chunk) &&
!util.isNullOrUndefined(chunk) &&
!state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
function onEofChunk$3(stream, state) {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length) {
state.buffer.push(chunk);
state.length += state.objectMode ? 1 : chunk.length;
}
}
state.ended = true;
// emit 'readable' now to make sure it gets picked up.
emitReadable$3(stream);
}
// Don't emit readable right away in sync mode, because this can trigger
// another read() call => stack overflow. This way, it might trigger
// a nextTick recursion warning, but that's not so bad.
function emitReadable$3(stream) {
var state = stream._readableState;
state.needReadable = false;
if (!state.emittedReadable) {
debug('emitReadable', state.flowing);
state.emittedReadable = true;
if (state.sync)
process.nextTick(function() {
emitReadable_$3(stream);
});
else
emitReadable_$3(stream);
}
}
function emitReadable_$3(stream) {
debug('emit readable');
stream.emit('readable');
flow$3(stream);
}
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
// it's in progress.
// However, if we're not ended, or reading, and the length < hwm,
// then go ahead and try to read some more preemptively.
function maybeReadMore$3(stream, state) {
if (!state.readingMore) {
state.readingMore = true;
process.nextTick(function() {
maybeReadMore_$3(stream, state);
});
}
}
function maybeReadMore_$3(stream, state) {
var len = state.length;
while (!state.reading && !state.flowing && !state.ended &&
state.length < state.highWaterMark) {
debug('maybeReadMore read 0');
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
break;
else
len = state.length;
}
state.readingMore = false;
}
// abstract method. to be overridden in specific implementation classes.
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
Readable$4.prototype._read = function(n) {
this.emit('error', new Error('not implemented'));
};
Readable$4.prototype.pipe = function(dest, pipeOpts) {
var src = this;
var state = this._readableState;
switch (state.pipesCount) {
case 0:
state.pipes = dest;
break;
case 1:
state.pipes = [state.pipes, dest];
break;
default:
state.pipes.push(dest);
break;
}
state.pipesCount += 1;
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
dest !== process.stdout &&
dest !== process.stderr;
var endFn = doEnd ? onend : cleanup;
if (state.endEmitted)
process.nextTick(endFn);
else
src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
debug('onunpipe');
if (readable === src) {
cleanup();
}
}
function onend() {
debug('onend');
dest.end();
}
// when the dest drains, it reduces the awaitDrain counter
// on the source. This would be more elegant with a .once()
// handler in flow(), but adding and removing repeatedly is
// too slow.
var ondrain = pipeOnDrain$3(src);
dest.on('drain', ondrain);
function cleanup() {
debug('cleanup');
// cleanup event handlers once the pipe is broken
dest.removeListener('close', onclose);
dest.removeListener('finish', onfinish);
dest.removeListener('drain', ondrain);
dest.removeListener('error', onerror);
dest.removeListener('unpipe', onunpipe);
src.removeListener('end', onend);
src.removeListener('end', cleanup);
src.removeListener('data', ondata);
// if the reader is waiting for a drain event from this
// specific writer, then it would cause it to never start
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
if (state.awaitDrain &&
(!dest._writableState || dest._writableState.needDrain))
ondrain();
}
src.on('data', ondata);
function ondata(chunk) {
debug('ondata');
var ret = dest.write(chunk);
if (false === ret) {
debug('false write response, pause',
src._readableState.awaitDrain);
src._readableState.awaitDrain++;
src.pause();
}
}
// if the dest has an error, then stop piping into it.
// however, don't suppress the throwing behavior for this.
function onerror(er) {
debug('onerror', er);
unpipe();
dest.removeListener('error', onerror);
if (EE$3.listenerCount(dest, 'error') === 0)
dest.emit('error', er);
}
// This is a brutally ugly hack to make sure that our error handler
// is attached before any userland ones. NEVER DO THIS.
if (!dest._events || !dest._events.error)
dest.on('error', onerror);
else if (isarray$3(dest._events.error))
dest._events.error.unshift(onerror);
else
dest._events.error = [onerror, dest._events.error];
// Both close and finish should trigger unpipe, but only once.
function onclose() {
dest.removeListener('finish', onfinish);
unpipe();
}
dest.once('close', onclose);
function onfinish() {
debug('onfinish');
dest.removeListener('close', onclose);
unpipe();
}
dest.once('finish', onfinish);
function unpipe() {
debug('unpipe');
src.unpipe(dest);
}
// tell the dest that it's being piped to
dest.emit('pipe', src);
// start the flow if it hasn't been started already.
if (!state.flowing) {
debug('pipe resume');
src.resume();
}
return dest;
};
function pipeOnDrain$3(src) {
return function() {
var state = src._readableState;
debug('pipeOnDrain', state.awaitDrain);
if (state.awaitDrain)
state.awaitDrain--;
if (state.awaitDrain === 0 && EE$3.listenerCount(src, 'data')) {
state.flowing = true;
flow$3(src);
}
};
}
Readable$4.prototype.unpipe = function(dest) {
var state = this._readableState;
// if we're not piping anywhere, then do nothing.
if (state.pipesCount === 0)
return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
if (dest && dest !== state.pipes)
return this;
if (!dest)
dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
state.flowing = false;
if (dest)
dest.emit('unpipe', this);
return this;
}
// slow case. multiple pipe destinations.
if (!dest) {
// remove all.
var dests = state.pipes;
var len = state.pipesCount;
state.pipes = null;
state.pipesCount = 0;
state.flowing = false;
for (var i = 0; i < len; i++)
dests[i].emit('unpipe', this);
return this;
}
// try to find the right one.
var i = indexOf$3(state.pipes, dest);
if (i === -1)
return this;
state.pipes.splice(i, 1);
state.pipesCount -= 1;
if (state.pipesCount === 1)
state.pipes = state.pipes[0];
dest.emit('unpipe', this);
return this;
};
// set up data events if they are asked for
// Ensure readable listeners eventually get something
Readable$4.prototype.on = function(ev, fn) {
var res = stream.prototype.on.call(this, ev, fn);
// If listening to data, and it has not explicitly been paused,
// then call resume to start the flow of data on the next tick.
if (ev === 'data' && false !== this._readableState.flowing) {
this.resume();
}
if (ev === 'readable' && this.readable) {
var state = this._readableState;
if (!state.readableListening) {
state.readableListening = true;
state.emittedReadable = false;
state.needReadable = true;
if (!state.reading) {
var self = this;
process.nextTick(function() {
debug('readable nexttick read 0');
self.read(0);
});
} else if (state.length) {
emitReadable$3(this);
}
}
}
return res;
};
Readable$4.prototype.addListener = Readable$4.prototype.on;
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
Readable$4.prototype.resume = function() {
var state = this._readableState;
if (!state.flowing) {
debug('resume');
state.flowing = true;
if (!state.reading) {
debug('resume read 0');
this.read(0);
}
resume(this, state);
}
return this;
};
function resume(stream, state) {
if (!state.resumeScheduled) {
state.resumeScheduled = true;
process.nextTick(function() {
resume_(stream, state);
});
}
}
function resume_(stream, state) {
state.resumeScheduled = false;
stream.emit('resume');
flow$3(stream);
if (state.flowing && !state.reading)
stream.read(0);
}
Readable$4.prototype.pause = function() {
debug('call pause flowing=%j', this._readableState.flowing);
if (false !== this._readableState.flowing) {
debug('pause');
this._readableState.flowing = false;
this.emit('pause');
}
return this;
};
function flow$3(stream) {
var state = stream._readableState;
debug('flow', state.flowing);
if (state.flowing) {
do {
var chunk = stream.read();
} while (null !== chunk && state.flowing);
}
}
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
Readable$4.prototype.wrap = function(stream) {
var state = this._readableState;
var paused = false;
var self = this;
stream.on('end', function() {
debug('wrapped end');
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length)
self.push(chunk);
}
self.push(null);
});
stream.on('data', function(chunk) {
debug('wrapped data');
if (state.decoder)
chunk = state.decoder.write(chunk);
if (!chunk || !state.objectMode && !chunk.length)
return;
var ret = self.push(chunk);
if (!ret) {
paused = true;
stream.pause();
}
});
// proxy all the other methods.
// important when wrapping filters and duplexes.
for (var i in stream) {
if (util.isFunction(stream[i]) && util.isUndefined(this[i])) {
this[i] = function(method) { return function() {
return stream[method].apply(stream, arguments);
}}(i);
}
}
// proxy certain important events.
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
forEach$6(events, function(ev) {
stream.on(ev, self.emit.bind(self, ev));
});
// when we try to consume some more bytes, simply unpause the
// underlying stream.
self._read = function(n) {
debug('wrapped _read', n);
if (paused) {
paused = false;
stream.resume();
}
};
return self;
};
// exposed for testing purposes only.
Readable$4._fromList = fromList$3;
// Pluck off n bytes from an array of buffers.
// Length is the combined lengths of all the buffers in the list.
function fromList$3(n, state) {
var list = state.buffer;
var length = state.length;
var stringMode = !!state.decoder;
var objectMode = !!state.objectMode;
var ret;
// nothing in the list, definitely empty.
if (list.length === 0)
return null;
if (length === 0)
ret = null;
else if (objectMode)
ret = list.shift();
else if (!n || n >= length) {
// read it all, truncate the array.
if (stringMode)
ret = list.join('');
else
ret = Buffer$8.concat(list, length);
list.length = 0;
} else {
// read just some of it.
if (n < list[0].length) {
// just take a part of the first list item.
// slice is the same for buffers and strings.
var buf = list[0];
ret = buf.slice(0, n);
list[0] = buf.slice(n);
} else if (n === list[0].length) {
// first list is a perfect match
ret = list.shift();
} else {
// complex case.
// we have enough to cover it, but it spans past the first buffer.
if (stringMode)
ret = '';
else
ret = new Buffer$8(n);
var c = 0;
for (var i = 0, l = list.length; i < l && c < n; i++) {
var buf = list[0];
var cpy = Math.min(n - c, buf.length);
if (stringMode)
ret += buf.slice(0, cpy);
else
buf.copy(ret, c, 0, cpy);
if (cpy < buf.length)
list[0] = buf.slice(cpy);
else
list.shift();
c += cpy;
}
}
}
return ret;
}
function endReadable$3(stream) {
var state = stream._readableState;
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
if (state.length > 0)
throw new Error('endReadable called on non-empty stream');
if (!state.endEmitted) {
state.ended = true;
process.nextTick(function() {
// Check that we didn't get one last unshift.
if (!state.endEmitted && state.length === 0) {
state.endEmitted = true;
stream.readable = false;
stream.emit('end');
}
});
}
}
function forEach$6 (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
function indexOf$3 (xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
var _stream_duplex$3 = Duplex$3;
/*<replacement>*/
var objectKeys$5 = Object.keys || function (obj) {
var keys = [];
for (var key in obj) keys.push(key);
return keys;
};
/*</replacement>*/
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Duplex$3, _stream_readable$3);
forEach$7(objectKeys$5(_stream_writable$3.prototype), function(method) {
if (!Duplex$3.prototype[method])
Duplex$3.prototype[method] = _stream_writable$3.prototype[method];
});
function Duplex$3(options) {
if (!(this instanceof Duplex$3))
return new Duplex$3(options);
_stream_readable$3.call(this, options);
_stream_writable$3.call(this, options);
if (options && options.readable === false)
this.readable = false;
if (options && options.writable === false)
this.writable = false;
this.allowHalfOpen = true;
if (options && options.allowHalfOpen === false)
this.allowHalfOpen = false;
this.once('end', onend$3);
}
// the no-half-open enforcer
function onend$3() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
if (this.allowHalfOpen || this._writableState.ended)
return;
// no more data can be written.
// But allow more writes to happen in this tick.
process.nextTick(this.end.bind(this));
}
function forEach$7 (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// A bit simpler than readable streams.
// Implement an async ._write(chunk, cb), and it'll handle all
// the drain event emission and buffering.
var _stream_writable$3 = Writable$3;
/*<replacement>*/
var Buffer$9 = buffer.Buffer;
/*</replacement>*/
Writable$3.WritableState = WritableState$3;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Writable$3, stream);
function WriteReq$3(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
}
function WritableState$3(options, stream) {
var Duplex = _stream_duplex$3;
options = options || {};
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
var defaultHwm = options.objectMode ? 16 : 16 * 1024;
this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
if (stream instanceof Duplex)
this.objectMode = this.objectMode || !!options.writableObjectMode;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.needDrain = false;
// at the start of calling end()
this.ending = false;
// when end() has been called, and returned
this.ended = false;
// when 'finish' is emitted
this.finished = false;
// should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0;
// a flag to see when we're in the middle of a write.
this.writing = false;
// when true all writes will be buffered until .uncork() call
this.corked = 0;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
this.onwrite = function(er) {
onwrite$3(stream, er);
};
// the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null;
// the amount that is being written when _write is called.
this.writelen = 0;
this.buffer = [];
// number of pending user-supplied write callbacks
// this must be 0 before 'finish' can be emitted
this.pendingcb = 0;
// emit prefinish if the only thing we're waiting for is _write cbs
// This is relevant for synchronous Transform streams
this.prefinished = false;
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
}
function Writable$3(options) {
var Duplex = _stream_duplex$3;
// Writable ctor is applied to Duplexes, though they're not
// instanceof Writable, they're instanceof Readable.
if (!(this instanceof Writable$3) && !(this instanceof Duplex))
return new Writable$3(options);
this._writableState = new WritableState$3(options, this);
// legacy.
this.writable = true;
stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
Writable$3.prototype.pipe = function() {
this.emit('error', new Error('Cannot pipe. Not readable.'));
};
function writeAfterEnd$3(stream, state, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
}
// If we get something that is not a buffer, string, null, or undefined,
// and we're not in objectMode, then that's an error.
// Otherwise stream chunks are all considered to be of length=1, and the
// watermarks determine how many objects to keep in the buffer, rather than
// how many bytes or characters.
function validChunk$3(stream, state, chunk, cb) {
var valid = true;
if (!util.isBuffer(chunk) &&
!util.isString(chunk) &&
!util.isNullOrUndefined(chunk) &&
!state.objectMode) {
var er = new TypeError('Invalid non-string/buffer chunk');
stream.emit('error', er);
process.nextTick(function() {
cb(er);
});
valid = false;
}
return valid;
}
Writable$3.prototype.write = function(chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
if (util.isFunction(encoding)) {
cb = encoding;
encoding = null;
}
if (util.isBuffer(chunk))
encoding = 'buffer';
else if (!encoding)
encoding = state.defaultEncoding;
if (!util.isFunction(cb))
cb = function() {};
if (state.ended)
writeAfterEnd$3(this, state, cb);
else if (validChunk$3(this, state, chunk, cb)) {
state.pendingcb++;
ret = writeOrBuffer$3(this, state, chunk, encoding, cb);
}
return ret;
};
Writable$3.prototype.cork = function() {
var state = this._writableState;
state.corked++;
};
Writable$3.prototype.uncork = function() {
var state = this._writableState;
if (state.corked) {
state.corked--;
if (!state.writing &&
!state.corked &&
!state.finished &&
!state.bufferProcessing &&
state.buffer.length)
clearBuffer$3(this, state);
}
};
function decodeChunk$3(state, chunk, encoding) {
if (!state.objectMode &&
state.decodeStrings !== false &&
util.isString(chunk)) {
chunk = new Buffer$9(chunk, encoding);
}
return chunk;
}
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer$3(stream, state, chunk, encoding, cb) {
chunk = decodeChunk$3(state, chunk, encoding);
if (util.isBuffer(chunk))
encoding = 'buffer';
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
if (!ret)
state.needDrain = true;
if (state.writing || state.corked)
state.buffer.push(new WriteReq$3(chunk, encoding, cb));
else
doWrite$3(stream, state, false, len, chunk, encoding, cb);
return ret;
}
function doWrite$3(stream, state, writev, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
if (writev)
stream._writev(chunk, state.onwrite);
else
stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError$3(stream, state, sync, er, cb) {
if (sync)
process.nextTick(function() {
state.pendingcb--;
cb(er);
});
else {
state.pendingcb--;
cb(er);
}
stream._writableState.errorEmitted = true;
stream.emit('error', er);
}
function onwriteStateUpdate$3(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite$3(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
onwriteStateUpdate$3(state);
if (er)
onwriteError$3(stream, state, sync, er, cb);
else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish$3(stream, state);
if (!finished &&
!state.corked &&
!state.bufferProcessing &&
state.buffer.length) {
clearBuffer$3(stream, state);
}
if (sync) {
process.nextTick(function() {
afterWrite$3(stream, state, finished, cb);
});
} else {
afterWrite$3(stream, state, finished, cb);
}
}
}
function afterWrite$3(stream, state, finished, cb) {
if (!finished)
onwriteDrain$3(stream, state);
state.pendingcb--;
cb();
finishMaybe$3(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain$3(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
}
// if there's something in the buffer waiting, then process it
function clearBuffer$3(stream, state) {
state.bufferProcessing = true;
if (stream._writev && state.buffer.length > 1) {
// Fast case, write everything using _writev()
var cbs = [];
for (var c = 0; c < state.buffer.length; c++)
cbs.push(state.buffer[c].callback);
// count the one we are adding, as well.
// TODO(isaacs) clean this up
state.pendingcb++;
doWrite$3(stream, state, true, state.length, state.buffer, '', function(err) {
for (var i = 0; i < cbs.length; i++) {
state.pendingcb--;
cbs[i](err);
}
});
// Clear buffer
state.buffer = [];
} else {
// Slow case, write chunks one-by-one
for (var c = 0; c < state.buffer.length; c++) {
var entry = state.buffer[c];
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite$3(stream, state, false, len, chunk, encoding, cb);
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
c++;
break;
}
}
if (c < state.buffer.length)
state.buffer = state.buffer.slice(c);
else
state.buffer.length = 0;
}
state.bufferProcessing = false;
}
Writable$3.prototype._write = function(chunk, encoding, cb) {
cb(new Error('not implemented'));
};
Writable$3.prototype._writev = null;
Writable$3.prototype.end = function(chunk, encoding, cb) {
var state = this._writableState;
if (util.isFunction(chunk)) {
cb = chunk;
chunk = null;
encoding = null;
} else if (util.isFunction(encoding)) {
cb = encoding;
encoding = null;
}
if (!util.isNullOrUndefined(chunk))
this.write(chunk, encoding);
// .end() fully uncorks
if (state.corked) {
state.corked = 1;
this.uncork();
}
// ignore unnecessary end() calls.
if (!state.ending && !state.finished)
endWritable$3(this, state, cb);
};
function needFinish$3(stream, state) {
return (state.ending &&
state.length === 0 &&
!state.finished &&
!state.writing);
}
function prefinish(stream, state) {
if (!state.prefinished) {
state.prefinished = true;
stream.emit('prefinish');
}
}
function finishMaybe$3(stream, state) {
var need = needFinish$3(stream, state);
if (need) {
if (state.pendingcb === 0) {
prefinish(stream, state);
state.finished = true;
stream.emit('finish');
} else
prefinish(stream, state);
}
return need;
}
function endWritable$3(stream, state, cb) {
state.ending = true;
finishMaybe$3(stream, state);
if (cb) {
if (state.finished)
process.nextTick(cb);
else
stream.once('finish', cb);
}
state.ended = true;
}
var writable$1 = _stream_writable$3;
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
var _stream_transform$3 = Transform$3;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Transform$3, _stream_duplex$3);
function TransformState$3(options, stream) {
this.afterTransform = function(er, data) {
return afterTransform$3(stream, er, data);
};
this.needTransform = false;
this.transforming = false;
this.writecb = null;
this.writechunk = null;
}
function afterTransform$3(stream, er, data) {
var ts = stream._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (!cb)
return stream.emit('error', new Error('no writecb in Transform class'));
ts.writechunk = null;
ts.writecb = null;
if (!util.isNullOrUndefined(data))
stream.push(data);
if (cb)
cb(er);
var rs = stream._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
stream._read(rs.highWaterMark);
}
}
function Transform$3(options) {
if (!(this instanceof Transform$3))
return new Transform$3(options);
_stream_duplex$3.call(this, options);
this._transformState = new TransformState$3(options, this);
// when the writable side finishes, then flush out anything remaining.
var stream = this;
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
this.once('prefinish', function() {
if (util.isFunction(this._flush))
this._flush(function(er) {
done$3(stream, er);
});
else
done$3(stream);
});
}
Transform$3.prototype.push = function(chunk, encoding) {
this._transformState.needTransform = false;
return _stream_duplex$3.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform$3.prototype._transform = function(chunk, encoding, cb) {
throw new Error('not implemented');
};
Transform$3.prototype._write = function(chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform ||
rs.needReadable ||
rs.length < rs.highWaterMark)
this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform$3.prototype._read = function(n) {
var ts = this._transformState;
if (!util.isNull(ts.writechunk) && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
function done$3(stream, er) {
if (er)
return stream.emit('error', er);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
var ts = stream._transformState;
if (ws.length)
throw new Error('calling transform done when ws.length != 0');
if (ts.transforming)
throw new Error('calling transform done when still transforming');
return stream.push(null);
}
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
var _stream_passthrough$3 = PassThrough$3;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(PassThrough$3, _stream_transform$3);
function PassThrough$3(options) {
if (!(this instanceof PassThrough$3))
return new PassThrough$3(options);
_stream_transform$3.call(this, options);
}
PassThrough$3.prototype._transform = function(chunk, encoding, cb) {
cb(null, chunk);
};
var readable$3 = createCommonjsModule(function (module, exports) {
exports = module.exports = _stream_readable$3;
exports.Stream = stream;
exports.Readable = exports;
exports.Writable = _stream_writable$3;
exports.Duplex = _stream_duplex$3;
exports.Transform = _stream_transform$3;
exports.PassThrough = _stream_passthrough$3;
if (!process.browser && process.env.READABLE_STREAM === 'disable') {
module.exports = stream;
}
});
var readable_1$3 = readable$3.Stream;
var readable_2$3 = readable$3.Readable;
var readable_3$3 = readable$3.Writable;
var readable_4$3 = readable$3.Duplex;
var readable_5$3 = readable$3.Transform;
var readable_6$3 = readable$3.PassThrough;
var levelFixRange$1 =
function fixRange(opts) {
var reverse = opts.reverse;
var end = opts.end;
var start = opts.start;
var range = [start, end];
if(start != null && end != null)
range.sort();
if(reverse)
range = range.reverse();
opts.start = range[0];
opts.end = range[1];
return opts
};
var levelPeek = createCommonjsModule(function (module, exports) {
//get the first/last record in a range
exports = module.exports = peek;
exports.first = first;
exports.last = last;
function once(emitter, events, listener) {
var remove = [];
events.forEach(function (e) {
function onEvent (arg) {
if(listener(e, arg) === false) return
remove.forEach(function (r) {
r();
});
}
emitter.on(e, onEvent);
remove.push(function () {
emitter.removeListener(e, onEvent);
});
});
return emitter
}
function peek (db, opts, cb) {
opts.limit = opts.reverse ? 2 : 1;
var stream = once(db.createReadStream(opts),
['data', 'error', 'end'],
function (event, data) {
if(opts.reverse && data && opts.start
&& (data.key.toString() > opts.start))
return false
if(event == 'error') cb(data);
else if(event == 'end') cb(new Error('range not found'), null, null);
else cb(null, data.key, data.value);
});
}
function first (db, opts, cb) {
if (!cb) {
cb = opts;
opts = {};
}
opts.reverse = false;
return peek(db, levelFixRange$1(opts), cb)
}
//SO, this is pretty horrible,
//but it's works around an issue here
//https://github.com/rvagg/node-levelup/issues/110
function last (db, opts, cb) {
if (!cb) {
cb = opts;
opts = {};
}
var start = opts.start;
opts.reverse = true;
return peek(db, levelFixRange$1(opts), function (err, key, value) {
if(err) {
var _start = opts.start;
opts.start = null;
peek (db, opts, function (_, key, value) {
if(!key) return cb(err, null, null)
var _key = key.toString();
if(_key <= _start && (!opts.end || _key >= opts.end))
cb(_, key, value);
else cb(err, null, null);
});
}
else cb(err, key, value);
})
}
});
var levelPeek_1 = levelPeek.first;
var levelPeek_2 = levelPeek.last;
// Returns a wrapper function that returns a wrapped callback
// The wrapper function should do some stuff, and return a
// presumably different callback function.
// This makes sure that own properties are retained, so that
// decorations and such are not lost along the way.
var wrappy_1 = wrappy;
function wrappy (fn, cb) {
if (fn && cb) return wrappy(fn)(cb)
if (typeof fn !== 'function')
throw new TypeError('need wrapper function')
Object.keys(fn).forEach(function (k) {
wrapper[k] = fn[k];
});
return wrapper
function wrapper() {
var args = new Array(arguments.length);
for (var i = 0; i < args.length; i++) {
args[i] = arguments[i];
}
var ret = fn.apply(this, args);
var cb = args[args.length-1];
if (typeof ret === 'function' && ret !== cb) {
Object.keys(cb).forEach(function (k) {
ret[k] = cb[k];
});
}
return ret
}
}
var once_1 = wrappy_1(once);
var strict = wrappy_1(onceStrict);
once.proto = once(function () {
Object.defineProperty(Function.prototype, 'once', {
value: function () {
return once(this)
},
configurable: true
});
Object.defineProperty(Function.prototype, 'onceStrict', {
value: function () {
return onceStrict(this)
},
configurable: true
});
});
function once (fn) {
var f = function () {
if (f.called) return f.value
f.called = true;
return f.value = fn.apply(this, arguments)
};
f.called = false;
return f
}
function onceStrict (fn) {
var f = function () {
if (f.called)
throw new Error(f.onceError)
f.called = true;
return f.value = fn.apply(this, arguments)
};
var name = fn.name || 'Function wrapped with `once`';
f.onceError = name + " shouldn't be called more than once";
f.called = false;
return f
}
once_1.strict = strict;
var EMPTY = new Buffer(0);
var ENCODER = {
encode: function(data) {
return typeof data === 'string' ? data = new Buffer(data) : data;
},
decode: function(data) {
return Buffer.isBuffer(data) ? data : new Buffer(data);
},
buffer: true,
type: 'raw'
};
var noop = function() {};
var pad = function(n) {
n = n.toString(16);
return '00000000'.slice(0, -n.length)+n;
};
var expand = function(buf, len) {
var tmp = new Buffer(len);
buf.copy(tmp);
return tmp;
};
var levelBlobs = function(db, opts) {
if (!opts) opts = {};
var blobs = {};
var blockSize = opts.blockSize || 65536;
var maxBatch = opts.batch || 100;
var blank = new Buffer(blockSize);
db.put('\x00', 'ignore', noop); // memdown#12 workaround
var reservations = {};
var mutateBlock = function(key, offset, block, append, cb) {
var release = function() {
if (!--reservations[key].locks) delete reservations[key];
};
var onreservation = function(r) {
r.locks++;
if (!r.block && !offset) {
r.block = block;
cb(null, r.block, release);
return;
}
if (!r.block) r.block = new Buffer(blockSize);
if (r.block.length < offset + block.length) r.block = expand(r.block, offset + block.length);
block.copy(r.block, offset);
if (!append && offset + block.length < r.block.length) r.block = r.block.slice(0, offset+block.length);
cb(null, r.block, release);
};
if (reservations[key]) return onreservation(reservations[key]);
db.get(key, {valueEncoding:ENCODER}, function(err, block) {
if (err && !err.notFound) return cb(err);
if (!reservations[key]) reservations[key] = {locks:0, block:block};
onreservation(reservations[key]);
});
};
var WriteStream = function(name, opts) {
if (!(this instanceof WriteStream)) return new WriteStream(name, opts);
if (!opts) opts = {};
this.name = name;
this.blocks = [];
this.batch = [];
this.bytesWritten = 0;
this.truncate = !opts.append;
this.append = opts.append;
this._shouldInitAppend = this.append && opts.start === undefined;
this._destroyed = false;
this._init(opts.start || 0);
writable$1.call(this);
};
util$2.inherits(WriteStream, writable$1);
WriteStream.prototype._init = function(start) {
this.blockIndex = (start / blockSize) | 0;
this.blockOffset = start - this.blockIndex * blockSize;
this.blockLength = this.blockOffset;
};
WriteStream.prototype._flush = function(cb) {
if (!this.batch.length) return cb();
var key = this.batch[this.batch.length-1].key;
var batch = this.batch;
this.batch = [];
if (!this.truncate) return db.batch(batch, cb);
this.truncate = false;
this._truncate(batch, key, cb);
};
WriteStream.prototype._truncate = function(batch, key, cb) {
cb = once_1(cb);
var dels = [];
var keys = db.createKeyStream({
start: key,
end: this.name+'\xff\xff'
});
keys.on('error', cb);
keys.on('data', function(key) {
dels.push({type:'del', key:key});
});
keys.on('end', function() {
dels.push.apply(dels, batch);
db.batch(dels, cb);
});
};
WriteStream.prototype._writeBlock = function(cb) {
var block = this.blocks.length === 1 ? this.blocks[0] : Buffer.concat(this.blocks, this.blockLength - this.blockOffset);
var index = this.blockIndex;
var offset = this.blockOffset;
var self = this;
this.blockOffset = 0;
this.blockLength = 0;
this.blockIndex++;
this.blocks = [];
var key = this.name+'\xff'+pad(index);
var append = function(block, force, cb) {
if (block.length) {
self.batch.push({
type: 'put',
key: key,
value: block,
valueEncoding: ENCODER
});
}
if (!force && self.batch.length < maxBatch) return cb();
return self._flush(cb);
};
if (!offset && block.length === blockSize) return append(block, false, cb);
if (!offset && !this.append) return append(block, false, cb);
// partial write
mutateBlock(key, offset, block, this.append, function(err, block, release) {
if (err) return cb(err);
append(block, true, function(err) {
release();
cb(err);
});
});
};
WriteStream.prototype._initAppend = function(data, enc, cb) {
var self = this;
this._shouldInitAppend = false;
blobs.size(this.name, function(err, size) {
if (err) return cb(err);
self._init(size);
self._write(data, enc, cb);
});
};
WriteStream.prototype._write = function(data, enc, cb) {
if (!data.length || this._destroyed) return cb();
if (this._shouldInitAppend) return this._initAppend(data, enc, cb);
var self = this;
var overflow;
var free = blockSize - this.blockLength;
var done = function(err) {
if (err) return cb(err);
if (overflow) return self._write(overflow, enc, cb);
cb();
};
if (data.length > free) {
overflow = data.slice(free);
data = data.slice(0, free);
}
this.bytesWritten += data.length;
this.blockLength += data.length;
this.blocks.push(data);
if (data.length < free) return done();
this._writeBlock(done);
};
WriteStream.prototype.destroy = function() {
if (this._destroyed) return;
this._destroyed = true;
process.nextTick(this.emit.bind(this, 'close'));
};
WriteStream.prototype.end = function(data) {
var self = this;
var args = arguments;
if (data && typeof data !== 'function') {
this.write(data);
data = EMPTY;
}
this.write(EMPTY, function() {
self._writeBlock(function(err) {
if (err) return self.emit('error', err);
self._flush(function(err) {
if (err) return self.emit('error', err);
writable$1.prototype.end.apply(self, args);
});
});
});
};
var ReadStream = function(name, opts) {
if (!opts) opts = {};
var self = this;
var start = opts.start || 0;
var blockIndex = (start / blockSize) | 0;
var blockOffset = start - blockIndex * blockSize;
var key = name+'\xff'+pad(blockIndex);
this.name = name;
this._missing = (typeof opts.end === 'number' ? opts.end : Infinity) - start + 1;
this._paused = false;
this._destroyed = false;
this._reader = db.createReadStream({
start: key,
end: name+'\xff\xff',
valueEncoding: ENCODER
});
var onblock = function(val) {
key = name+'\xff'+pad(++blockIndex);
if (!self._missing) return false;
if (blockOffset) {
val = val.slice(blockOffset);
blockOffset = 0;
if (!val.length) return true;
}
if (val.length > self._missing) val = val.slice(0, self._missing);
self._missing -= val.length;
self._pause(!self.push(val));
return !!self._missing;
};
this._reader.on('data', function(data) {
while (data.key > key) {
if (!onblock(blank)) return;
}
onblock(data.value);
});
this._reader.on('error', function(err) {
self.emit('error', err);
});
this._reader.on('end', function() {
self.push(null);
});
readable$3.call(this);
};
util$2.inherits(ReadStream, readable$3);
ReadStream.prototype.destroy = function() {
if (this._destroyed) return;
this._destroyed = true;
this._reader.destroy();
process.nextTick(this.emit.bind(this, 'close'));
};
ReadStream.prototype._pause = function(paused) {
if (this._paused === paused) return;
this._paused = paused;
if (this._paused) this._reader.pause();
else this._reader.resume();
};
ReadStream.prototype._read = function() {
this._pause(false);
};
blobs.remove = function(name, cb) {
cb = once_1(cb || noop);
var batch = [];
var keys = db.createKeyStream({
start: name+'\xff',
end: name+'\xff\xff'
});
keys.on('error', cb);
keys.on('data', function(key) {
batch.push({type:'del', key:key});
});
keys.on('end', function() {
db.batch(batch, cb);
});
};
blobs.size = function(name, cb) {
levelPeek.last(db, {
start: name+'\xff',
end: name+'\xff\xff',
valueEncoding:ENCODER
}, function(err, latest, val) {
if (err && err.message === 'range not found') return cb(null, 0);
if (err) return cb(err);
if (latest.slice(0, name.length+1) !== name+'\xff') return cb(null, 0);
cb(null, parseInt(latest.toString().slice(name.length+1), 16) * blockSize + val.length);
});
};
blobs.write = function(name, data, opts, cb) {
if (typeof opts === 'function') return blobs.write(name, data, null, opts);
if (!opts) opts = {};
if (!cb) cb = noop;
var ws = blobs.createWriteStream(name, opts);
ws.on('error', cb);
ws.on('finish', function() {
cb();
});
ws.write(data);
ws.end();
};
blobs.read = function(name, opts, cb) {
if (typeof opts === 'function') return blobs.read(name, null, opts);
if (!opts) opts = {};
var rs = blobs.createReadStream(name, opts);
var list = [];
rs.on('error', cb);
rs.on('data', function(data) {
list.push(data);
});
rs.on('end', function() {
cb(null, list.length === 1 ? list[0] : Buffer.concat(list));
});
};
blobs.createReadStream = function(name, opts) {
return new ReadStream(name, opts);
};
blobs.createWriteStream = function(name, opts) {
return new WriteStream(name, opts);
};
return blobs;
};
var octal = function (num, base) {
return parseInt(num.toString(), base || 8)
};
var errno_1$1 = createCommonjsModule(function (module, exports) {
Object.keys(errno.code).forEach(function(code) {
var e = errno.code[code];
exports[code] = function(path) {
var err = new Error(code+', '+e.description+(path ? ' \''+path+'\'' : ''));
err.errno = e.errno;
err.code = code;
err.path = path;
return err;
};
});
});
var processNextickArgs = createCommonjsModule(function (module) {
if (!process.version ||
process.version.indexOf('v0.') === 0 ||
process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) {
module.exports = { nextTick: nextTick };
} else {
module.exports = process;
}
function nextTick(fn, arg1, arg2, arg3) {
if (typeof fn !== 'function') {
throw new TypeError('"callback" argument must be a function');
}
var len = arguments.length;
var args, i;
switch (len) {
case 0:
case 1:
return process.nextTick(fn);
case 2:
return process.nextTick(function afterTickOne() {
fn.call(null, arg1);
});
case 3:
return process.nextTick(function afterTickTwo() {
fn.call(null, arg1, arg2);
});
case 4:
return process.nextTick(function afterTickThree() {
fn.call(null, arg1, arg2, arg3);
});
default:
args = new Array(len - 1);
i = 0;
while (i < args.length) {
args[i++] = arguments[i];
}
return process.nextTick(function afterTick() {
fn.apply(null, args);
});
}
}
});
var processNextickArgs_1 = processNextickArgs.nextTick;
var toString$3 = {}.toString;
var isarray$4 = Array.isArray || function (arr) {
return toString$3.call(arr) == '[object Array]';
};
var streamBrowser = events.EventEmitter;
var safeBuffer = createCommonjsModule(function (module, exports) {
/* eslint-disable node/no-deprecated-api */
var Buffer = buffer.Buffer;
// alternative to using Object.keys for old browsers
function copyProps (src, dst) {
for (var key in src) {
dst[key] = src[key];
}
}
if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
module.exports = buffer;
} else {
// Copy properties from require('buffer')
copyProps(buffer, exports);
exports.Buffer = SafeBuffer;
}
function SafeBuffer (arg, encodingOrOffset, length) {
return Buffer(arg, encodingOrOffset, length)
}
// Copy static methods from Buffer
copyProps(Buffer, SafeBuffer);
SafeBuffer.from = function (arg, encodingOrOffset, length) {
if (typeof arg === 'number') {
throw new TypeError('Argument must not be a number')
}
return Buffer(arg, encodingOrOffset, length)
};
SafeBuffer.alloc = function (size, fill, encoding) {
if (typeof size !== 'number') {
throw new TypeError('Argument must be a number')
}
var buf = Buffer(size);
if (fill !== undefined) {
if (typeof encoding === 'string') {
buf.fill(fill, encoding);
} else {
buf.fill(fill);
}
} else {
buf.fill(0);
}
return buf
};
SafeBuffer.allocUnsafe = function (size) {
if (typeof size !== 'number') {
throw new TypeError('Argument must be a number')
}
return Buffer(size)
};
SafeBuffer.allocUnsafeSlow = function (size) {
if (typeof size !== 'number') {
throw new TypeError('Argument must be a number')
}
return buffer.SlowBuffer(size)
};
});
var safeBuffer_1 = safeBuffer.Buffer;
var BufferList$1 = createCommonjsModule(function (module) {
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
var Buffer = safeBuffer.Buffer;
function copyBuffer(src, target, offset) {
src.copy(target, offset);
}
module.exports = function () {
function BufferList() {
_classCallCheck(this, BufferList);
this.head = null;
this.tail = null;
this.length = 0;
}
BufferList.prototype.push = function push(v) {
var entry = { data: v, next: null };
if (this.length > 0) this.tail.next = entry;else this.head = entry;
this.tail = entry;
++this.length;
};
BufferList.prototype.unshift = function unshift(v) {
var entry = { data: v, next: this.head };
if (this.length === 0) this.tail = entry;
this.head = entry;
++this.length;
};
BufferList.prototype.shift = function shift() {
if (this.length === 0) return;
var ret = this.head.data;
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
--this.length;
return ret;
};
BufferList.prototype.clear = function clear() {
this.head = this.tail = null;
this.length = 0;
};
BufferList.prototype.join = function join(s) {
if (this.length === 0) return '';
var p = this.head;
var ret = '' + p.data;
while (p = p.next) {
ret += s + p.data;
}return ret;
};
BufferList.prototype.concat = function concat(n) {
if (this.length === 0) return Buffer.alloc(0);
if (this.length === 1) return this.head.data;
var ret = Buffer.allocUnsafe(n >>> 0);
var p = this.head;
var i = 0;
while (p) {
copyBuffer(p.data, ret, i);
i += p.data.length;
p = p.next;
}
return ret;
};
return BufferList;
}();
if (util$2 && util$2.inspect && util$2.inspect.custom) {
module.exports.prototype[util$2.inspect.custom] = function () {
var obj = util$2.inspect({ length: this.length });
return this.constructor.name + ' ' + obj;
};
}
});
/*<replacement>*/
/*</replacement>*/
// undocumented cb() API, needed for core, not for public API
function destroy$1(err, cb) {
var _this = this;
var readableDestroyed = this._readableState && this._readableState.destroyed;
var writableDestroyed = this._writableState && this._writableState.destroyed;
if (readableDestroyed || writableDestroyed) {
if (cb) {
cb(err);
} else if (err && (!this._writableState || !this._writableState.errorEmitted)) {
processNextickArgs.nextTick(emitErrorNT, this, err);
}
return this;
}
// we set destroyed to true before firing error callbacks in order
// to make it re-entrance safe in case destroy() is called within callbacks
if (this._readableState) {
this._readableState.destroyed = true;
}
// if this is a duplex stream mark the writable part as destroyed as well
if (this._writableState) {
this._writableState.destroyed = true;
}
this._destroy(err || null, function (err) {
if (!cb && err) {
processNextickArgs.nextTick(emitErrorNT, _this, err);
if (_this._writableState) {
_this._writableState.errorEmitted = true;
}
} else if (cb) {
cb(err);
}
});
return this;
}
function undestroy() {
if (this._readableState) {
this._readableState.destroyed = false;
this._readableState.reading = false;
this._readableState.ended = false;
this._readableState.endEmitted = false;
}
if (this._writableState) {
this._writableState.destroyed = false;
this._writableState.ended = false;
this._writableState.ending = false;
this._writableState.finished = false;
this._writableState.errorEmitted = false;
}
}
function emitErrorNT(self, err) {
self.emit('error', err);
}
var destroy_1 = {
destroy: destroy$1,
undestroy: undestroy
};
/**
* Module exports.
*/
var browser$1 = deprecate;
/**
* Mark that a method should not be used.
* Returns a modified function which warns once by default.
*
* If `localStorage.noDeprecation = true` is set, then it is a no-op.
*
* If `localStorage.throwDeprecation = true` is set, then deprecated functions
* will throw an Error when invoked.
*
* If `localStorage.traceDeprecation = true` is set, then deprecated functions
* will invoke `console.trace()` instead of `console.error()`.
*
* @param {Function} fn - the function to deprecate
* @param {String} msg - the string to print to the console when `fn` is invoked
* @returns {Function} a new "deprecated" version of `fn`
* @api public
*/
function deprecate (fn, msg) {
if (config('noDeprecation')) {
return fn;
}
var warned = false;
function deprecated() {
if (!warned) {
if (config('throwDeprecation')) {
throw new Error(msg);
} else if (config('traceDeprecation')) {
console.trace(msg);
} else {
console.warn(msg);
}
warned = true;
}
return fn.apply(this, arguments);
}
return deprecated;
}
/**
* Checks `localStorage` for boolean values for the given `name`.
*
* @param {String} name
* @returns {Boolean}
* @api private
*/
function config (name) {
// accessing global.localStorage can trigger a DOMException in sandboxed iframes
try {
if (!commonjsGlobal.localStorage) return false;
} catch (_) {
return false;
}
var val = commonjsGlobal.localStorage[name];
if (null == val) return false;
return String(val).toLowerCase() === 'true';
}
/*<replacement>*/
/*</replacement>*/
var _stream_writable$4 = Writable$4;
// It seems a linked list but it is not
// there will be only 2 of these for each stream
function CorkedRequest(state) {
var _this = this;
this.next = null;
this.entry = null;
this.finish = function () {
onCorkedFinish(_this, state);
};
}
/* </replacement> */
/*<replacement>*/
var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextickArgs.nextTick;
/*</replacement>*/
/*<replacement>*/
var Duplex$4;
/*</replacement>*/
Writable$4.WritableState = WritableState$4;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
/*<replacement>*/
var internalUtil = {
deprecate: browser$1
};
/*</replacement>*/
/*<replacement>*/
/*</replacement>*/
/*<replacement>*/
var Buffer$a = safeBuffer.Buffer;
var OurUint8Array = commonjsGlobal.Uint8Array || function () {};
function _uint8ArrayToBuffer(chunk) {
return Buffer$a.from(chunk);
}
function _isUint8Array(obj) {
return Buffer$a.isBuffer(obj) || obj instanceof OurUint8Array;
}
/*</replacement>*/
util.inherits(Writable$4, streamBrowser);
function nop() {}
function WritableState$4(options, stream) {
Duplex$4 = Duplex$4 || _stream_duplex$4;
options = options || {};
// Duplex streams are both readable and writable, but share
// the same options object.
// However, some cases require setting options to different
// values for the readable and the writable sides of the duplex stream.
// These options can be provided separately as readableXXX and writableXXX.
var isDuplex = stream instanceof Duplex$4;
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
var writableHwm = options.writableHighWaterMark;
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;
// cast to ints.
this.highWaterMark = Math.floor(this.highWaterMark);
// if _final has been called
this.finalCalled = false;
// drain event flag.
this.needDrain = false;
// at the start of calling end()
this.ending = false;
// when end() has been called, and returned
this.ended = false;
// when 'finish' is emitted
this.finished = false;
// has it been destroyed
this.destroyed = false;
// should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0;
// a flag to see when we're in the middle of a write.
this.writing = false;
// when true all writes will be buffered until .uncork() call
this.corked = 0;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, because any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
this.onwrite = function (er) {
onwrite$4(stream, er);
};
// the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null;
// the amount that is being written when _write is called.
this.writelen = 0;
this.bufferedRequest = null;
this.lastBufferedRequest = null;
// number of pending user-supplied write callbacks
// this must be 0 before 'finish' can be emitted
this.pendingcb = 0;
// emit prefinish if the only thing we're waiting for is _write cbs
// This is relevant for synchronous Transform streams
this.prefinished = false;
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
// count buffered requests
this.bufferedRequestCount = 0;
// allocate the first CorkedRequest, there is always
// one allocated and free to use, and we maintain at most two
this.corkedRequestsFree = new CorkedRequest(this);
}
WritableState$4.prototype.getBuffer = function getBuffer() {
var current = this.bufferedRequest;
var out = [];
while (current) {
out.push(current);
current = current.next;
}
return out;
};
(function () {
try {
Object.defineProperty(WritableState$4.prototype, 'buffer', {
get: internalUtil.deprecate(function () {
return this.getBuffer();
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
});
} catch (_) {}
})();
// Test _writableState for inheritance to account for Duplex streams,
// whose prototype chain only points to Readable.
var realHasInstance;
if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
realHasInstance = Function.prototype[Symbol.hasInstance];
Object.defineProperty(Writable$4, Symbol.hasInstance, {
value: function (object) {
if (realHasInstance.call(this, object)) return true;
if (this !== Writable$4) return false;
return object && object._writableState instanceof WritableState$4;
}
});
} else {
realHasInstance = function (object) {
return object instanceof this;
};
}
function Writable$4(options) {
Duplex$4 = Duplex$4 || _stream_duplex$4;
// Writable ctor is applied to Duplexes, too.
// `realHasInstance` is necessary because using plain `instanceof`
// would return false, as no `_writableState` property is attached.
// Trying to use the custom `instanceof` for Writable here will also break the
// Node.js LazyTransform implementation, which has a non-trivial getter for
// `_writableState` that would lead to infinite recursion.
if (!realHasInstance.call(Writable$4, this) && !(this instanceof Duplex$4)) {
return new Writable$4(options);
}
this._writableState = new WritableState$4(options, this);
// legacy.
this.writable = true;
if (options) {
if (typeof options.write === 'function') this._write = options.write;
if (typeof options.writev === 'function') this._writev = options.writev;
if (typeof options.destroy === 'function') this._destroy = options.destroy;
if (typeof options.final === 'function') this._final = options.final;
}
streamBrowser.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
Writable$4.prototype.pipe = function () {
this.emit('error', new Error('Cannot pipe, not readable'));
};
function writeAfterEnd$4(stream, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
processNextickArgs.nextTick(cb, er);
}
// Checks that a user-supplied chunk is valid, especially for the particular
// mode the stream is in. Currently this means that `null` is never accepted
// and undefined/non-string values are only allowed in object mode.
function validChunk$4(stream, state, chunk, cb) {
var valid = true;
var er = false;
if (chunk === null) {
er = new TypeError('May not write null values to stream');
} else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
if (er) {
stream.emit('error', er);
processNextickArgs.nextTick(cb, er);
valid = false;
}
return valid;
}
Writable$4.prototype.write = function (chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
var isBuf = !state.objectMode && _isUint8Array(chunk);
if (isBuf && !Buffer$a.isBuffer(chunk)) {
chunk = _uint8ArrayToBuffer(chunk);
}
if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
if (typeof cb !== 'function') cb = nop;
if (state.ended) writeAfterEnd$4(this, cb);else if (isBuf || validChunk$4(this, state, chunk, cb)) {
state.pendingcb++;
ret = writeOrBuffer$4(this, state, isBuf, chunk, encoding, cb);
}
return ret;
};
Writable$4.prototype.cork = function () {
var state = this._writableState;
state.corked++;
};
Writable$4.prototype.uncork = function () {
var state = this._writableState;
if (state.corked) {
state.corked--;
if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer$4(this, state);
}
};
Writable$4.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
// node::ParseEncoding() requires lower case.
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
this._writableState.defaultEncoding = encoding;
return this;
};
function decodeChunk$4(state, chunk, encoding) {
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
chunk = Buffer$a.from(chunk, encoding);
}
return chunk;
}
Object.defineProperty(Writable$4.prototype, 'writableHighWaterMark', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function () {
return this._writableState.highWaterMark;
}
});
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer$4(stream, state, isBuf, chunk, encoding, cb) {
if (!isBuf) {
var newChunk = decodeChunk$4(state, chunk, encoding);
if (chunk !== newChunk) {
isBuf = true;
encoding = 'buffer';
chunk = newChunk;
}
}
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
if (!ret) state.needDrain = true;
if (state.writing || state.corked) {
var last = state.lastBufferedRequest;
state.lastBufferedRequest = {
chunk: chunk,
encoding: encoding,
isBuf: isBuf,
callback: cb,
next: null
};
if (last) {
last.next = state.lastBufferedRequest;
} else {
state.bufferedRequest = state.lastBufferedRequest;
}
state.bufferedRequestCount += 1;
} else {
doWrite$4(stream, state, false, len, chunk, encoding, cb);
}
return ret;
}
function doWrite$4(stream, state, writev, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError$4(stream, state, sync, er, cb) {
--state.pendingcb;
if (sync) {
// defer the callback if we are being called synchronously
// to avoid piling up things on the stack
processNextickArgs.nextTick(cb, er);
// this can emit finish, and it will always happen
// after error
processNextickArgs.nextTick(finishMaybe$4, stream, state);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
} else {
// the caller expect this to happen before if
// it is async
cb(er);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
// this can emit finish, but finish must
// always follow error
finishMaybe$4(stream, state);
}
}
function onwriteStateUpdate$4(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite$4(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
onwriteStateUpdate$4(state);
if (er) onwriteError$4(stream, state, sync, er, cb);else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish$4(state);
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
clearBuffer$4(stream, state);
}
if (sync) {
/*<replacement>*/
asyncWrite(afterWrite$4, stream, state, finished, cb);
/*</replacement>*/
} else {
afterWrite$4(stream, state, finished, cb);
}
}
}
function afterWrite$4(stream, state, finished, cb) {
if (!finished) onwriteDrain$4(stream, state);
state.pendingcb--;
cb();
finishMaybe$4(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain$4(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
}
// if there's something in the buffer waiting, then process it
function clearBuffer$4(stream, state) {
state.bufferProcessing = true;
var entry = state.bufferedRequest;
if (stream._writev && entry && entry.next) {
// Fast case, write everything using _writev()
var l = state.bufferedRequestCount;
var buffer = new Array(l);
var holder = state.corkedRequestsFree;
holder.entry = entry;
var count = 0;
var allBuffers = true;
while (entry) {
buffer[count] = entry;
if (!entry.isBuf) allBuffers = false;
entry = entry.next;
count += 1;
}
buffer.allBuffers = allBuffers;
doWrite$4(stream, state, true, state.length, buffer, '', holder.finish);
// doWrite is almost always async, defer these to save a bit of time
// as the hot path ends with doWrite
state.pendingcb++;
state.lastBufferedRequest = null;
if (holder.next) {
state.corkedRequestsFree = holder.next;
holder.next = null;
} else {
state.corkedRequestsFree = new CorkedRequest(state);
}
state.bufferedRequestCount = 0;
} else {
// Slow case, write chunks one-by-one
while (entry) {
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite$4(stream, state, false, len, chunk, encoding, cb);
entry = entry.next;
state.bufferedRequestCount--;
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
break;
}
}
if (entry === null) state.lastBufferedRequest = null;
}
state.bufferedRequest = entry;
state.bufferProcessing = false;
}
Writable$4.prototype._write = function (chunk, encoding, cb) {
cb(new Error('_write() is not implemented'));
};
Writable$4.prototype._writev = null;
Writable$4.prototype.end = function (chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
cb = chunk;
chunk = null;
encoding = null;
} else if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
// .end() fully uncorks
if (state.corked) {
state.corked = 1;
this.uncork();
}
// ignore unnecessary end() calls.
if (!state.ending && !state.finished) endWritable$4(this, state, cb);
};
function needFinish$4(state) {
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
}
function callFinal(stream, state) {
stream._final(function (err) {
state.pendingcb--;
if (err) {
stream.emit('error', err);
}
state.prefinished = true;
stream.emit('prefinish');
finishMaybe$4(stream, state);
});
}
function prefinish$1(stream, state) {
if (!state.prefinished && !state.finalCalled) {
if (typeof stream._final === 'function') {
state.pendingcb++;
state.finalCalled = true;
processNextickArgs.nextTick(callFinal, stream, state);
} else {
state.prefinished = true;
stream.emit('prefinish');
}
}
}
function finishMaybe$4(stream, state) {
var need = needFinish$4(state);
if (need) {
prefinish$1(stream, state);
if (state.pendingcb === 0) {
state.finished = true;
stream.emit('finish');
}
}
return need;
}
function endWritable$4(stream, state, cb) {
state.ending = true;
finishMaybe$4(stream, state);
if (cb) {
if (state.finished) processNextickArgs.nextTick(cb);else stream.once('finish', cb);
}
state.ended = true;
stream.writable = false;
}
function onCorkedFinish(corkReq, state, err) {
var entry = corkReq.entry;
corkReq.entry = null;
while (entry) {
var cb = entry.callback;
state.pendingcb--;
cb(err);
entry = entry.next;
}
if (state.corkedRequestsFree) {
state.corkedRequestsFree.next = corkReq;
} else {
state.corkedRequestsFree = corkReq;
}
}
Object.defineProperty(Writable$4.prototype, 'destroyed', {
get: function () {
if (this._writableState === undefined) {
return false;
}
return this._writableState.destroyed;
},
set: function (value) {
// we ignore the value if the stream
// has not been initialized yet
if (!this._writableState) {
return;
}
// backward compatibility, the user is explicitly
// managing destroyed
this._writableState.destroyed = value;
}
});
Writable$4.prototype.destroy = destroy_1.destroy;
Writable$4.prototype._undestroy = destroy_1.undestroy;
Writable$4.prototype._destroy = function (err, cb) {
this.end();
cb(err);
};
/*<replacement>*/
/*</replacement>*/
/*<replacement>*/
var objectKeys$6 = Object.keys || function (obj) {
var keys = [];
for (var key in obj) {
keys.push(key);
}return keys;
};
/*</replacement>*/
var _stream_duplex$4 = Duplex$5;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Duplex$5, _stream_readable$4);
{
// avoid scope creep, the keys array can then be collected
var keys = objectKeys$6(_stream_writable$4.prototype);
for (var v = 0; v < keys.length; v++) {
var method = keys[v];
if (!Duplex$5.prototype[method]) Duplex$5.prototype[method] = _stream_writable$4.prototype[method];
}
}
function Duplex$5(options) {
if (!(this instanceof Duplex$5)) return new Duplex$5(options);
_stream_readable$4.call(this, options);
_stream_writable$4.call(this, options);
if (options && options.readable === false) this.readable = false;
if (options && options.writable === false) this.writable = false;
this.allowHalfOpen = true;
if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
this.once('end', onend$4);
}
Object.defineProperty(Duplex$5.prototype, 'writableHighWaterMark', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function () {
return this._writableState.highWaterMark;
}
});
// the no-half-open enforcer
function onend$4() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
if (this.allowHalfOpen || this._writableState.ended) return;
// no more data can be written.
// But allow more writes to happen in this tick.
processNextickArgs.nextTick(onEndNT, this);
}
function onEndNT(self) {
self.end();
}
Object.defineProperty(Duplex$5.prototype, 'destroyed', {
get: function () {
if (this._readableState === undefined || this._writableState === undefined) {
return false;
}
return this._readableState.destroyed && this._writableState.destroyed;
},
set: function (value) {
// we ignore the value if the stream
// has not been initialized yet
if (this._readableState === undefined || this._writableState === undefined) {
return;
}
// backward compatibility, the user is explicitly
// managing destroyed
this._readableState.destroyed = value;
this._writableState.destroyed = value;
}
});
Duplex$5.prototype._destroy = function (err, cb) {
this.push(null);
this.end();
processNextickArgs.nextTick(cb, err);
};
/*<replacement>*/
var Buffer$b = safeBuffer.Buffer;
/*</replacement>*/
var isEncoding = Buffer$b.isEncoding || function (encoding) {
encoding = '' + encoding;
switch (encoding && encoding.toLowerCase()) {
case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
return true;
default:
return false;
}
};
function _normalizeEncoding(enc) {
if (!enc) return 'utf8';
var retried;
while (true) {
switch (enc) {
case 'utf8':
case 'utf-8':
return 'utf8';
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return 'utf16le';
case 'latin1':
case 'binary':
return 'latin1';
case 'base64':
case 'ascii':
case 'hex':
return enc;
default:
if (retried) return; // undefined
enc = ('' + enc).toLowerCase();
retried = true;
}
}
}
// Do not cache `Buffer.isEncoding` when checking encoding names as some
// modules monkey-patch it to support additional encodings
function normalizeEncoding(enc) {
var nenc = _normalizeEncoding(enc);
if (typeof nenc !== 'string' && (Buffer$b.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
return nenc || enc;
}
// StringDecoder provides an interface for efficiently splitting a series of
// buffers into a series of JS strings without breaking apart multi-byte
// characters.
var StringDecoder_1 = StringDecoder$4;
function StringDecoder$4(encoding) {
this.encoding = normalizeEncoding(encoding);
var nb;
switch (this.encoding) {
case 'utf16le':
this.text = utf16Text;
this.end = utf16End;
nb = 4;
break;
case 'utf8':
this.fillLast = utf8FillLast;
nb = 4;
break;
case 'base64':
this.text = base64Text;
this.end = base64End;
nb = 3;
break;
default:
this.write = simpleWrite;
this.end = simpleEnd;
return;
}
this.lastNeed = 0;
this.lastTotal = 0;
this.lastChar = Buffer$b.allocUnsafe(nb);
}
StringDecoder$4.prototype.write = function (buf) {
if (buf.length === 0) return '';
var r;
var i;
if (this.lastNeed) {
r = this.fillLast(buf);
if (r === undefined) return '';
i = this.lastNeed;
this.lastNeed = 0;
} else {
i = 0;
}
if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
return r || '';
};
StringDecoder$4.prototype.end = utf8End;
// Returns only complete characters in a Buffer
StringDecoder$4.prototype.text = utf8Text;
// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
StringDecoder$4.prototype.fillLast = function (buf) {
if (this.lastNeed <= buf.length) {
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
}
buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
this.lastNeed -= buf.length;
};
// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
// continuation byte. If an invalid byte is detected, -2 is returned.
function utf8CheckByte(byte) {
if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
return byte >> 6 === 0x02 ? -1 : -2;
}
// Checks at most 3 bytes at the end of a Buffer in order to detect an
// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
// needed to complete the UTF-8 character (if applicable) are returned.
function utf8CheckIncomplete(self, buf, i) {
var j = buf.length - 1;
if (j < i) return 0;
var nb = utf8CheckByte(buf[j]);
if (nb >= 0) {
if (nb > 0) self.lastNeed = nb - 1;
return nb;
}
if (--j < i || nb === -2) return 0;
nb = utf8CheckByte(buf[j]);
if (nb >= 0) {
if (nb > 0) self.lastNeed = nb - 2;
return nb;
}
if (--j < i || nb === -2) return 0;
nb = utf8CheckByte(buf[j]);
if (nb >= 0) {
if (nb > 0) {
if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
}
return nb;
}
return 0;
}
// Validates as many continuation bytes for a multi-byte UTF-8 character as
// needed or are available. If we see a non-continuation byte where we expect
// one, we "replace" the validated continuation bytes we've seen so far with
// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
// behavior. The continuation byte check is included three times in the case
// where all of the continuation bytes for a character exist in the same buffer.
// It is also done this way as a slight performance increase instead of using a
// loop.
function utf8CheckExtraBytes(self, buf, p) {
if ((buf[0] & 0xC0) !== 0x80) {
self.lastNeed = 0;
return '\ufffd';
}
if (self.lastNeed > 1 && buf.length > 1) {
if ((buf[1] & 0xC0) !== 0x80) {
self.lastNeed = 1;
return '\ufffd';
}
if (self.lastNeed > 2 && buf.length > 2) {
if ((buf[2] & 0xC0) !== 0x80) {
self.lastNeed = 2;
return '\ufffd';
}
}
}
}
// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
function utf8FillLast(buf) {
var p = this.lastTotal - this.lastNeed;
var r = utf8CheckExtraBytes(this, buf);
if (r !== undefined) return r;
if (this.lastNeed <= buf.length) {
buf.copy(this.lastChar, p, 0, this.lastNeed);
return this.lastChar.toString(this.encoding, 0, this.lastTotal);
}
buf.copy(this.lastChar, p, 0, buf.length);
this.lastNeed -= buf.length;
}
// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
// partial character, the character's bytes are buffered until the required
// number of bytes are available.
function utf8Text(buf, i) {
var total = utf8CheckIncomplete(this, buf, i);
if (!this.lastNeed) return buf.toString('utf8', i);
this.lastTotal = total;
var end = buf.length - (total - this.lastNeed);
buf.copy(this.lastChar, 0, end);
return buf.toString('utf8', i, end);
}
// For UTF-8, a replacement character is added when ending on a partial
// character.
function utf8End(buf) {
var r = buf && buf.length ? this.write(buf) : '';
if (this.lastNeed) return r + '\ufffd';
return r;
}
// UTF-16LE typically needs two bytes per character, but even if we have an even
// number of bytes available, we need to check if we end on a leading/high
// surrogate. In that case, we need to wait for the next two bytes in order to
// decode the last character properly.
function utf16Text(buf, i) {
if ((buf.length - i) % 2 === 0) {
var r = buf.toString('utf16le', i);
if (r) {
var c = r.charCodeAt(r.length - 1);
if (c >= 0xD800 && c <= 0xDBFF) {
this.lastNeed = 2;
this.lastTotal = 4;
this.lastChar[0] = buf[buf.length - 2];
this.lastChar[1] = buf[buf.length - 1];
return r.slice(0, -1);
}
}
return r;
}
this.lastNeed = 1;
this.lastTotal = 2;
this.lastChar[0] = buf[buf.length - 1];
return buf.toString('utf16le', i, buf.length - 1);
}
// For UTF-16LE we do not explicitly append special replacement characters if we
// end on a partial character, we simply let v8 handle that.
function utf16End(buf) {
var r = buf && buf.length ? this.write(buf) : '';
if (this.lastNeed) {
var end = this.lastTotal - this.lastNeed;
return r + this.lastChar.toString('utf16le', 0, end);
}
return r;
}
function base64Text(buf, i) {
var n = (buf.length - i) % 3;
if (n === 0) return buf.toString('base64', i);
this.lastNeed = 3 - n;
this.lastTotal = 3;
if (n === 1) {
this.lastChar[0] = buf[buf.length - 1];
} else {
this.lastChar[0] = buf[buf.length - 2];
this.lastChar[1] = buf[buf.length - 1];
}
return buf.toString('base64', i, buf.length - n);
}
function base64End(buf) {
var r = buf && buf.length ? this.write(buf) : '';
if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
return r;
}
// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
function simpleWrite(buf) {
return buf.toString(this.encoding);
}
function simpleEnd(buf) {
return buf && buf.length ? this.write(buf) : '';
}
var string_decoder$4 = {
StringDecoder: StringDecoder_1
};
/*<replacement>*/
/*</replacement>*/
var _stream_readable$4 = Readable$5;
/*<replacement>*/
/*</replacement>*/
/*<replacement>*/
var Duplex$6;
/*</replacement>*/
Readable$5.ReadableState = ReadableState$4;
/*<replacement>*/
var EE$4 = events.EventEmitter;
var EElistenerCount = function (emitter, type) {
return emitter.listeners(type).length;
};
/*</replacement>*/
/*<replacement>*/
/*</replacement>*/
/*<replacement>*/
var Buffer$c = safeBuffer.Buffer;
var OurUint8Array$1 = commonjsGlobal.Uint8Array || function () {};
function _uint8ArrayToBuffer$1(chunk) {
return Buffer$c.from(chunk);
}
function _isUint8Array$1(obj) {
return Buffer$c.isBuffer(obj) || obj instanceof OurUint8Array$1;
}
/*</replacement>*/
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
/*<replacement>*/
var debug$1 = void 0;
if (util$2 && util$2.debuglog) {
debug$1 = util$2.debuglog('stream');
} else {
debug$1 = function () {};
}
/*</replacement>*/
var StringDecoder$5;
util.inherits(Readable$5, streamBrowser);
var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
function prependListener(emitter, event, fn) {
// Sadly this is not cacheable as some libraries bundle their own
// event emitter implementation with them.
if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);
// This is a hack to make sure that our error handler is attached before any
// userland ones. NEVER DO THIS. This is here only because this code needs
// to continue to work with older versions of Node.js that do not include
// the prependListener() method. The goal is to eventually remove this hack.
if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isarray$4(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
}
function ReadableState$4(options, stream) {
Duplex$6 = Duplex$6 || _stream_duplex$4;
options = options || {};
// Duplex streams are both readable and writable, but share
// the same options object.
// However, some cases require setting options to different
// values for the readable and the writable sides of the duplex stream.
// These options can be provided separately as readableXXX and writableXXX.
var isDuplex = stream instanceof Duplex$6;
// object stream flag. Used to make read(n) ignore n and to
// make all the buffer merging and length checks go away
this.objectMode = !!options.objectMode;
if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
var readableHwm = options.readableHighWaterMark;
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm;
// cast to ints.
this.highWaterMark = Math.floor(this.highWaterMark);
// A linked list is used to store data chunks instead of an array because the
// linked list can remove elements from the beginning faster than
// array.shift()
this.buffer = new BufferList$1();
this.length = 0;
this.pipes = null;
this.pipesCount = 0;
this.flowing = null;
this.ended = false;
this.endEmitted = false;
this.reading = false;
// a flag to be able to tell if the event 'readable'/'data' is emitted
// immediately, or on a later tick. We set this to true at first, because
// any actions that shouldn't happen until "later" should generally also
// not happen before the first read call.
this.sync = true;
// whenever we return null, then we set a flag to say
// that we're awaiting a 'readable' event emission.
this.needReadable = false;
this.emittedReadable = false;
this.readableListening = false;
this.resumeScheduled = false;
// has it been destroyed
this.destroyed = false;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// the number of writers that are awaiting a drain event in .pipe()s
this.awaitDrain = 0;
// if true, a maybeReadMore has been scheduled
this.readingMore = false;
this.decoder = null;
this.encoding = null;
if (options.encoding) {
if (!StringDecoder$5) StringDecoder$5 = string_decoder$4.StringDecoder;
this.decoder = new StringDecoder$5(options.encoding);
this.encoding = options.encoding;
}
}
function Readable$5(options) {
Duplex$6 = Duplex$6 || _stream_duplex$4;
if (!(this instanceof Readable$5)) return new Readable$5(options);
this._readableState = new ReadableState$4(options, this);
// legacy
this.readable = true;
if (options) {
if (typeof options.read === 'function') this._read = options.read;
if (typeof options.destroy === 'function') this._destroy = options.destroy;
}
streamBrowser.call(this);
}
Object.defineProperty(Readable$5.prototype, 'destroyed', {
get: function () {
if (this._readableState === undefined) {
return false;
}
return this._readableState.destroyed;
},
set: function (value) {
// we ignore the value if the stream
// has not been initialized yet
if (!this._readableState) {
return;
}
// backward compatibility, the user is explicitly
// managing destroyed
this._readableState.destroyed = value;
}
});
Readable$5.prototype.destroy = destroy_1.destroy;
Readable$5.prototype._undestroy = destroy_1.undestroy;
Readable$5.prototype._destroy = function (err, cb) {
this.push(null);
cb(err);
};
// Manually shove something into the read() buffer.
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
Readable$5.prototype.push = function (chunk, encoding) {
var state = this._readableState;
var skipChunkCheck;
if (!state.objectMode) {
if (typeof chunk === 'string') {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
chunk = Buffer$c.from(chunk, encoding);
encoding = '';
}
skipChunkCheck = true;
}
} else {
skipChunkCheck = true;
}
return readableAddChunk$4(this, chunk, encoding, false, skipChunkCheck);
};
// Unshift should *always* be something directly out of read()
Readable$5.prototype.unshift = function (chunk) {
return readableAddChunk$4(this, chunk, null, true, false);
};
function readableAddChunk$4(stream, chunk, encoding, addToFront, skipChunkCheck) {
var state = stream._readableState;
if (chunk === null) {
state.reading = false;
onEofChunk$4(stream, state);
} else {
var er;
if (!skipChunkCheck) er = chunkInvalid$4(state, chunk);
if (er) {
stream.emit('error', er);
} else if (state.objectMode || chunk && chunk.length > 0) {
if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer$c.prototype) {
chunk = _uint8ArrayToBuffer$1(chunk);
}
if (addToFront) {
if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true);
} else if (state.ended) {
stream.emit('error', new Error('stream.push() after EOF'));
} else {
state.reading = false;
if (state.decoder && !encoding) {
chunk = state.decoder.write(chunk);
if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore$4(stream, state);
} else {
addChunk(stream, state, chunk, false);
}
}
} else if (!addToFront) {
state.reading = false;
}
}
return needMoreData$4(state);
}
function addChunk(stream, state, chunk, addToFront) {
if (state.flowing && state.length === 0 && !state.sync) {
stream.emit('data', chunk);
stream.read(0);
} else {
// update the buffer info.
state.length += state.objectMode ? 1 : chunk.length;
if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
if (state.needReadable) emitReadable$4(stream);
}
maybeReadMore$4(stream, state);
}
function chunkInvalid$4(state, chunk) {
var er;
if (!_isUint8Array$1(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
// such as the repl. Also, if the push() triggered a
// readable event, and the user called read(largeNumber) such that
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData$4(state) {
return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
}
Readable$5.prototype.isPaused = function () {
return this._readableState.flowing === false;
};
// backwards compatibility.
Readable$5.prototype.setEncoding = function (enc) {
if (!StringDecoder$5) StringDecoder$5 = string_decoder$4.StringDecoder;
this._readableState.decoder = new StringDecoder$5(enc);
this._readableState.encoding = enc;
return this;
};
// Don't raise the hwm > 8MB
var MAX_HWM$4 = 0x800000;
function computeNewHighWaterMark(n) {
if (n >= MAX_HWM$4) {
n = MAX_HWM$4;
} else {
// Get the next highest power of 2 to prevent increasing hwm excessively in
// tiny amounts
n--;
n |= n >>> 1;
n |= n >>> 2;
n |= n >>> 4;
n |= n >>> 8;
n |= n >>> 16;
n++;
}
return n;
}
// This function is designed to be inlinable, so please take care when making
// changes to the function body.
function howMuchToRead$4(n, state) {
if (n <= 0 || state.length === 0 && state.ended) return 0;
if (state.objectMode) return 1;
if (n !== n) {
// Only flow one buffer at a time
if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
}
// If we're asking for more than the current hwm, then raise the hwm.
if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
if (n <= state.length) return n;
// Don't have enough
if (!state.ended) {
state.needReadable = true;
return 0;
}
return state.length;
}
// you can override either this method, or the async _read(n) below.
Readable$5.prototype.read = function (n) {
debug$1('read', n);
n = parseInt(n, 10);
var state = this._readableState;
var nOrig = n;
if (n !== 0) state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
debug$1('read: emitReadable', state.length, state.ended);
if (state.length === 0 && state.ended) endReadable$4(this);else emitReadable$4(this);
return null;
}
n = howMuchToRead$4(n, state);
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
if (state.length === 0) endReadable$4(this);
return null;
}
// All the actual chunk generation logic needs to be
// *below* the call to _read. The reason is that in certain
// synthetic stream cases, such as passthrough streams, _read
// may be a completely synchronous operation which may change
// the state of the read buffer, providing enough data when
// before there was *not* enough.
//
// So, the steps are:
// 1. Figure out what the state of things will be after we do
// a read from the buffer.
//
// 2. If that resulting state will trigger a _read, then call _read.
// Note that this may be asynchronous, or synchronous. Yes, it is
// deeply ugly to write APIs this way, but that still doesn't mean
// that the Readable class should behave improperly, as streams are
// designed to be sync/async agnostic.
// Take note if the _read call is sync or async (ie, if the read call
// has returned yet), so that we know whether or not it's safe to emit
// 'readable' etc.
//
// 3. Actually pull the requested chunks out of the buffer and return.
// if we need a readable event, then we need to do some reading.
var doRead = state.needReadable;
debug$1('need readable', doRead);
// if we currently have less than the highWaterMark, then also read some
if (state.length === 0 || state.length - n < state.highWaterMark) {
doRead = true;
debug$1('length less than watermark', doRead);
}
// however, if we've ended, then there's no point, and if we're already
// reading, then it's unnecessary.
if (state.ended || state.reading) {
doRead = false;
debug$1('reading or ended', doRead);
} else if (doRead) {
debug$1('do read');
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
if (state.length === 0) state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
// If _read pushed data synchronously, then `reading` will be false,
// and we need to re-evaluate how much data we can return to the user.
if (!state.reading) n = howMuchToRead$4(nOrig, state);
}
var ret;
if (n > 0) ret = fromList$4(n, state);else ret = null;
if (ret === null) {
state.needReadable = true;
n = 0;
} else {
state.length -= n;
}
if (state.length === 0) {
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
if (!state.ended) state.needReadable = true;
// If we tried to read() past the EOF, then emit end on the next tick.
if (nOrig !== n && state.ended) endReadable$4(this);
}
if (ret !== null) this.emit('data', ret);
return ret;
};
function onEofChunk$4(stream, state) {
if (state.ended) return;
if (state.decoder) {
var chunk = state.decoder.end();
if (chunk && chunk.length) {
state.buffer.push(chunk);
state.length += state.objectMode ? 1 : chunk.length;
}
}
state.ended = true;
// emit 'readable' now to make sure it gets picked up.
emitReadable$4(stream);
}
// Don't emit readable right away in sync mode, because this can trigger
// another read() call => stack overflow. This way, it might trigger
// a nextTick recursion warning, but that's not so bad.
function emitReadable$4(stream) {
var state = stream._readableState;
state.needReadable = false;
if (!state.emittedReadable) {
debug$1('emitReadable', state.flowing);
state.emittedReadable = true;
if (state.sync) processNextickArgs.nextTick(emitReadable_$4, stream);else emitReadable_$4(stream);
}
}
function emitReadable_$4(stream) {
debug$1('emit readable');
stream.emit('readable');
flow$4(stream);
}
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
// it's in progress.
// However, if we're not ended, or reading, and the length < hwm,
// then go ahead and try to read some more preemptively.
function maybeReadMore$4(stream, state) {
if (!state.readingMore) {
state.readingMore = true;
processNextickArgs.nextTick(maybeReadMore_$4, stream, state);
}
}
function maybeReadMore_$4(stream, state) {
var len = state.length;
while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
debug$1('maybeReadMore read 0');
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
break;else len = state.length;
}
state.readingMore = false;
}
// abstract method. to be overridden in specific implementation classes.
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
Readable$5.prototype._read = function (n) {
this.emit('error', new Error('_read() is not implemented'));
};
Readable$5.prototype.pipe = function (dest, pipeOpts) {
var src = this;
var state = this._readableState;
switch (state.pipesCount) {
case 0:
state.pipes = dest;
break;
case 1:
state.pipes = [state.pipes, dest];
break;
default:
state.pipes.push(dest);
break;
}
state.pipesCount += 1;
debug$1('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
var endFn = doEnd ? onend : unpipe;
if (state.endEmitted) processNextickArgs.nextTick(endFn);else src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable, unpipeInfo) {
debug$1('onunpipe');
if (readable === src) {
if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
unpipeInfo.hasUnpiped = true;
cleanup();
}
}
}
function onend() {
debug$1('onend');
dest.end();
}
// when the dest drains, it reduces the awaitDrain counter
// on the source. This would be more elegant with a .once()
// handler in flow(), but adding and removing repeatedly is
// too slow.
var ondrain = pipeOnDrain$4(src);
dest.on('drain', ondrain);
var cleanedUp = false;
function cleanup() {
debug$1('cleanup');
// cleanup event handlers once the pipe is broken
dest.removeListener('close', onclose);
dest.removeListener('finish', onfinish);
dest.removeListener('drain', ondrain);
dest.removeListener('error', onerror);
dest.removeListener('unpipe', onunpipe);
src.removeListener('end', onend);
src.removeListener('end', unpipe);
src.removeListener('data', ondata);
cleanedUp = true;
// if the reader is waiting for a drain event from this
// specific writer, then it would cause it to never start
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
}
// If the user pushes more data while we're writing to dest then we'll end up
// in ondata again. However, we only want to increase awaitDrain once because
// dest will only emit one 'drain' event for the multiple writes.
// => Introduce a guard on increasing awaitDrain.
var increasedAwaitDrain = false;
src.on('data', ondata);
function ondata(chunk) {
debug$1('ondata');
increasedAwaitDrain = false;
var ret = dest.write(chunk);
if (false === ret && !increasedAwaitDrain) {
// If the user unpiped during `dest.write()`, it is possible
// to get stuck in a permanently paused state if that write
// also returned false.
// => Check whether `dest` is still a piping destination.
if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf$4(state.pipes, dest) !== -1) && !cleanedUp) {
debug$1('false write response, pause', src._readableState.awaitDrain);
src._readableState.awaitDrain++;
increasedAwaitDrain = true;
}
src.pause();
}
}
// if the dest has an error, then stop piping into it.
// however, don't suppress the throwing behavior for this.
function onerror(er) {
debug$1('onerror', er);
unpipe();
dest.removeListener('error', onerror);
if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
}
// Make sure our error handler is attached before userland ones.
prependListener(dest, 'error', onerror);
// Both close and finish should trigger unpipe, but only once.
function onclose() {
dest.removeListener('finish', onfinish);
unpipe();
}
dest.once('close', onclose);
function onfinish() {
debug$1('onfinish');
dest.removeListener('close', onclose);
unpipe();
}
dest.once('finish', onfinish);
function unpipe() {
debug$1('unpipe');
src.unpipe(dest);
}
// tell the dest that it's being piped to
dest.emit('pipe', src);
// start the flow if it hasn't been started already.
if (!state.flowing) {
debug$1('pipe resume');
src.resume();
}
return dest;
};
function pipeOnDrain$4(src) {
return function () {
var state = src._readableState;
debug$1('pipeOnDrain', state.awaitDrain);
if (state.awaitDrain) state.awaitDrain--;
if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
state.flowing = true;
flow$4(src);
}
};
}
Readable$5.prototype.unpipe = function (dest) {
var state = this._readableState;
var unpipeInfo = { hasUnpiped: false };
// if we're not piping anywhere, then do nothing.
if (state.pipesCount === 0) return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
if (dest && dest !== state.pipes) return this;
if (!dest) dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
state.flowing = false;
if (dest) dest.emit('unpipe', this, unpipeInfo);
return this;
}
// slow case. multiple pipe destinations.
if (!dest) {
// remove all.
var dests = state.pipes;
var len = state.pipesCount;
state.pipes = null;
state.pipesCount = 0;
state.flowing = false;
for (var i = 0; i < len; i++) {
dests[i].emit('unpipe', this, unpipeInfo);
}return this;
}
// try to find the right one.
var index = indexOf$4(state.pipes, dest);
if (index === -1) return this;
state.pipes.splice(index, 1);
state.pipesCount -= 1;
if (state.pipesCount === 1) state.pipes = state.pipes[0];
dest.emit('unpipe', this, unpipeInfo);
return this;
};
// set up data events if they are asked for
// Ensure readable listeners eventually get something
Readable$5.prototype.on = function (ev, fn) {
var res = streamBrowser.prototype.on.call(this, ev, fn);
if (ev === 'data') {
// Start flowing on next tick if stream isn't explicitly paused
if (this._readableState.flowing !== false) this.resume();
} else if (ev === 'readable') {
var state = this._readableState;
if (!state.endEmitted && !state.readableListening) {
state.readableListening = state.needReadable = true;
state.emittedReadable = false;
if (!state.reading) {
processNextickArgs.nextTick(nReadingNextTick, this);
} else if (state.length) {
emitReadable$4(this);
}
}
}
return res;
};
Readable$5.prototype.addListener = Readable$5.prototype.on;
function nReadingNextTick(self) {
debug$1('readable nexttick read 0');
self.read(0);
}
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
Readable$5.prototype.resume = function () {
var state = this._readableState;
if (!state.flowing) {
debug$1('resume');
state.flowing = true;
resume$1(this, state);
}
return this;
};
function resume$1(stream, state) {
if (!state.resumeScheduled) {
state.resumeScheduled = true;
processNextickArgs.nextTick(resume_$1, stream, state);
}
}
function resume_$1(stream, state) {
if (!state.reading) {
debug$1('resume read 0');
stream.read(0);
}
state.resumeScheduled = false;
state.awaitDrain = 0;
stream.emit('resume');
flow$4(stream);
if (state.flowing && !state.reading) stream.read(0);
}
Readable$5.prototype.pause = function () {
debug$1('call pause flowing=%j', this._readableState.flowing);
if (false !== this._readableState.flowing) {
debug$1('pause');
this._readableState.flowing = false;
this.emit('pause');
}
return this;
};
function flow$4(stream) {
var state = stream._readableState;
debug$1('flow', state.flowing);
while (state.flowing && stream.read() !== null) {}
}
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
Readable$5.prototype.wrap = function (stream) {
var _this = this;
var state = this._readableState;
var paused = false;
stream.on('end', function () {
debug$1('wrapped end');
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length) _this.push(chunk);
}
_this.push(null);
});
stream.on('data', function (chunk) {
debug$1('wrapped data');
if (state.decoder) chunk = state.decoder.write(chunk);
// don't skip over falsy values in objectMode
if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
var ret = _this.push(chunk);
if (!ret) {
paused = true;
stream.pause();
}
});
// proxy all the other methods.
// important when wrapping filters and duplexes.
for (var i in stream) {
if (this[i] === undefined && typeof stream[i] === 'function') {
this[i] = function (method) {
return function () {
return stream[method].apply(stream, arguments);
};
}(i);
}
}
// proxy certain important events.
for (var n = 0; n < kProxyEvents.length; n++) {
stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
}
// when we try to consume some more bytes, simply unpause the
// underlying stream.
this._read = function (n) {
debug$1('wrapped _read', n);
if (paused) {
paused = false;
stream.resume();
}
};
return this;
};
Object.defineProperty(Readable$5.prototype, 'readableHighWaterMark', {
// making it explicit this property is not enumerable
// because otherwise some prototype manipulation in
// userland will fail
enumerable: false,
get: function () {
return this._readableState.highWaterMark;
}
});
// exposed for testing purposes only.
Readable$5._fromList = fromList$4;
// Pluck off n bytes from an array of buffers.
// Length is the combined lengths of all the buffers in the list.
// This function is designed to be inlinable, so please take care when making
// changes to the function body.
function fromList$4(n, state) {
// nothing buffered
if (state.length === 0) return null;
var ret;
if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
// read it all, truncate the list
if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
state.buffer.clear();
} else {
// read part of list
ret = fromListPartial(n, state.buffer, state.decoder);
}
return ret;
}
// Extracts only enough buffered data to satisfy the amount requested.
// This function is designed to be inlinable, so please take care when making
// changes to the function body.
function fromListPartial(n, list, hasStrings) {
var ret;
if (n < list.head.data.length) {
// slice is the same for buffers and strings
ret = list.head.data.slice(0, n);
list.head.data = list.head.data.slice(n);
} else if (n === list.head.data.length) {
// first chunk is a perfect match
ret = list.shift();
} else {
// result spans more than one buffer
ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
}
return ret;
}
// Copies a specified amount of characters from the list of buffered data
// chunks.
// This function is designed to be inlinable, so please take care when making
// changes to the function body.
function copyFromBufferString(n, list) {
var p = list.head;
var c = 1;
var ret = p.data;
n -= ret.length;
while (p = p.next) {
var str = p.data;
var nb = n > str.length ? str.length : n;
if (nb === str.length) ret += str;else ret += str.slice(0, n);
n -= nb;
if (n === 0) {
if (nb === str.length) {
++c;
if (p.next) list.head = p.next;else list.head = list.tail = null;
} else {
list.head = p;
p.data = str.slice(nb);
}
break;
}
++c;
}
list.length -= c;
return ret;
}
// Copies a specified amount of bytes from the list of buffered data chunks.
// This function is designed to be inlinable, so please take care when making
// changes to the function body.
function copyFromBuffer(n, list) {
var ret = Buffer$c.allocUnsafe(n);
var p = list.head;
var c = 1;
p.data.copy(ret);
n -= p.data.length;
while (p = p.next) {
var buf = p.data;
var nb = n > buf.length ? buf.length : n;
buf.copy(ret, ret.length - n, 0, nb);
n -= nb;
if (n === 0) {
if (nb === buf.length) {
++c;
if (p.next) list.head = p.next;else list.head = list.tail = null;
} else {
list.head = p;
p.data = buf.slice(nb);
}
break;
}
++c;
}
list.length -= c;
return ret;
}
function endReadable$4(stream) {
var state = stream._readableState;
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
if (!state.endEmitted) {
state.ended = true;
processNextickArgs.nextTick(endReadableNT, state, stream);
}
}
function endReadableNT(state, stream) {
// Check that we didn't get one last unshift.
if (!state.endEmitted && state.length === 0) {
state.endEmitted = true;
stream.readable = false;
stream.emit('end');
}
}
function indexOf$4(xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
}
var _stream_transform$4 = Transform$4;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(Transform$4, _stream_duplex$4);
function afterTransform$4(er, data) {
var ts = this._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (!cb) {
return this.emit('error', new Error('write callback called multiple times'));
}
ts.writechunk = null;
ts.writecb = null;
if (data != null) // single equals check for both `null` and `undefined`
this.push(data);
cb(er);
var rs = this._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
this._read(rs.highWaterMark);
}
}
function Transform$4(options) {
if (!(this instanceof Transform$4)) return new Transform$4(options);
_stream_duplex$4.call(this, options);
this._transformState = {
afterTransform: afterTransform$4.bind(this),
needTransform: false,
transforming: false,
writecb: null,
writechunk: null,
writeencoding: null
};
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
if (options) {
if (typeof options.transform === 'function') this._transform = options.transform;
if (typeof options.flush === 'function') this._flush = options.flush;
}
// When the writable side finishes, then flush out anything remaining.
this.on('prefinish', prefinish$2);
}
function prefinish$2() {
var _this = this;
if (typeof this._flush === 'function') {
this._flush(function (er, data) {
done$4(_this, er, data);
});
} else {
done$4(this, null, null);
}
}
Transform$4.prototype.push = function (chunk, encoding) {
this._transformState.needTransform = false;
return _stream_duplex$4.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform$4.prototype._transform = function (chunk, encoding, cb) {
throw new Error('_transform() is not implemented');
};
Transform$4.prototype._write = function (chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform$4.prototype._read = function (n) {
var ts = this._transformState;
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
Transform$4.prototype._destroy = function (err, cb) {
var _this2 = this;
_stream_duplex$4.prototype._destroy.call(this, err, function (err2) {
cb(err2);
_this2.emit('close');
});
};
function done$4(stream, er, data) {
if (er) return stream.emit('error', er);
if (data != null) // single equals check for both `null` and `undefined`
stream.push(data);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');
if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');
return stream.push(null);
}
var _stream_passthrough$4 = PassThrough$4;
/*<replacement>*/
util.inherits = inherits_browser;
/*</replacement>*/
util.inherits(PassThrough$4, _stream_transform$4);
function PassThrough$4(options) {
if (!(this instanceof PassThrough$4)) return new PassThrough$4(options);
_stream_transform$4.call(this, options);
}
PassThrough$4.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
};
var readableBrowser = createCommonjsModule(function (module, exports) {
exports = module.exports = _stream_readable$4;
exports.Stream = exports;
exports.Readable = exports;
exports.Writable = _stream_writable$4;
exports.Duplex = _stream_duplex$4;
exports.Transform = _stream_transform$4;
exports.PassThrough = _stream_passthrough$4;
});
var readableBrowser_1 = readableBrowser.Stream;
var readableBrowser_2 = readableBrowser.Readable;
var readableBrowser_3 = readableBrowser.Writable;
var readableBrowser_4 = readableBrowser.Duplex;
var readableBrowser_5 = readableBrowser.Transform;
var readableBrowser_6 = readableBrowser.PassThrough;
var toString$4 = Object.prototype.toString;
var isModern = (
typeof Buffer.alloc === 'function' &&
typeof Buffer.allocUnsafe === 'function' &&
typeof Buffer.from === 'function'
);
function isArrayBuffer (input) {
return toString$4.call(input).slice(8, -1) === 'ArrayBuffer'
}
function fromArrayBuffer (obj, byteOffset, length) {
byteOffset >>>= 0;
var maxLength = obj.byteLength - byteOffset;
if (maxLength < 0) {
throw new RangeError("'offset' is out of bounds")
}
if (length === undefined) {
length = maxLength;
} else {
length >>>= 0;
if (length > maxLength) {
throw new RangeError("'length' is out of bounds")
}
}
return isModern
? Buffer.from(obj.slice(byteOffset, byteOffset + length))
: new Buffer(new Uint8Array(obj.slice(byteOffset, byteOffset + length)))
}
function fromString (string, encoding) {
if (typeof encoding !== 'string' || encoding === '') {
encoding = 'utf8';
}
if (!Buffer.isEncoding(encoding)) {
throw new TypeError('"encoding" must be a valid string encoding')
}
return isModern
? Buffer.from(string, encoding)
: new Buffer(string, encoding)
}
function bufferFrom (value, encodingOrOffset, length) {
if (typeof value === 'number') {
throw new TypeError('"value" argument must not be a number')
}
if (isArrayBuffer(value)) {
return fromArrayBuffer(value, encodingOrOffset, length)
}
if (typeof value === 'string') {
return fromString(value, encodingOrOffset)
}
return isModern
? Buffer.from(value)
: new Buffer(value)
}
var bufferFrom_1 = bufferFrom;
var typedarray = createCommonjsModule(function (module, exports) {
var undefined$1 = (void 0); // Paranoia
// Beyond this value, index getters/setters (i.e. array[0], array[1]) are so slow to
// create, and consume so much memory, that the browser appears frozen.
var MAX_ARRAY_LENGTH = 1e5;
// Approximations of internal ECMAScript conversion functions
var ECMAScript = (function() {
// Stash a copy in case other scripts modify these
var opts = Object.prototype.toString,
ophop = Object.prototype.hasOwnProperty;
return {
// Class returns internal [[Class]] property, used to avoid cross-frame instanceof issues:
Class: function(v) { return opts.call(v).replace(/^\[object *|\]$/g, ''); },
HasProperty: function(o, p) { return p in o; },
HasOwnProperty: function(o, p) { return ophop.call(o, p); },
IsCallable: function(o) { return typeof o === 'function'; },
ToInt32: function(v) { return v >> 0; },
ToUint32: function(v) { return v >>> 0; }
};
}());
// Snapshot intrinsics
var LN2 = Math.LN2,
abs = Math.abs,
floor = Math.floor,
log = Math.log,
min = Math.min,
pow = Math.pow,
round = Math.round;
// ES5: lock down object properties
function configureProperties(obj) {
if (getOwnPropNames && defineProp) {
var props = getOwnPropNames(obj), i;
for (i = 0; i < props.length; i += 1) {
defineProp(obj, props[i], {
value: obj[props[i]],
writable: false,
enumerable: false,
configurable: false
});
}
}
}
// emulate ES5 getter/setter API using legacy APIs
// http://blogs.msdn.com/b/ie/archive/2010/09/07/transitioning-existing-code-to-the-es5-getter-setter-apis.aspx
// (second clause tests for Object.defineProperty() in IE<9 that only supports extending DOM prototypes, but
// note that IE<9 does not support __defineGetter__ or __defineSetter__ so it just renders the method harmless)
var defineProp;
if (Object.defineProperty && (function() {
try {
Object.defineProperty({}, 'x', {});
return true;
} catch (e) {
return false;
}
})()) {
defineProp = Object.defineProperty;
} else {
defineProp = function(o, p, desc) {
if (!o === Object(o)) throw new TypeError("Object.defineProperty called on non-object");
if (ECMAScript.HasProperty(desc, 'get') && Object.prototype.__defineGetter__) { Object.prototype.__defineGetter__.call(o, p, desc.get); }
if (ECMAScript.HasProperty(desc, 'set') && Object.prototype.__defineSetter__) { Object.prototype.__defineSetter__.call(o, p, desc.set); }
if (ECMAScript.HasProperty(desc, 'value')) { o[p] = desc.value; }
return o;
};
}
var getOwnPropNames = Object.getOwnPropertyNames || function (o) {
if (o !== Object(o)) throw new TypeError("Object.getOwnPropertyNames called on non-object");
var props = [], p;
for (p in o) {
if (ECMAScript.HasOwnProperty(o, p)) {
props.push(p);
}
}
return props;
};
// ES5: Make obj[index] an alias for obj._getter(index)/obj._setter(index, value)
// for index in 0 ... obj.length
function makeArrayAccessors(obj) {
if (!defineProp) { return; }
if (obj.length > MAX_ARRAY_LENGTH) throw new RangeError("Array too large for polyfill");
function makeArrayAccessor(index) {
defineProp(obj, index, {
'get': function() { return obj._getter(index); },
'set': function(v) { obj._setter(index, v); },
enumerable: true,
configurable: false
});
}
var i;
for (i = 0; i < obj.length; i += 1) {
makeArrayAccessor(i);
}
}
// Internal conversion functions:
// pack<Type>() - take a number (interpreted as Type), output a byte array
// unpack<Type>() - take a byte array, output a Type-like number
function as_signed(value, bits) { var s = 32 - bits; return (value << s) >> s; }
function as_unsigned(value, bits) { var s = 32 - bits; return (value << s) >>> s; }
function packI8(n) { return [n & 0xff]; }
function unpackI8(bytes) { return as_signed(bytes[0], 8); }
function packU8(n) { return [n & 0xff]; }
function unpackU8(bytes) { return as_unsigned(bytes[0], 8); }
function packU8Clamped(n) { n = round(Number(n)); return [n < 0 ? 0 : n > 0xff ? 0xff : n & 0xff]; }
function packI16(n) { return [(n >> 8) & 0xff, n & 0xff]; }
function unpackI16(bytes) { return as_signed(bytes[0] << 8 | bytes[1], 16); }
function packU16(n) { return [(n >> 8) & 0xff, n & 0xff]; }
function unpackU16(bytes) { return as_unsigned(bytes[0] << 8 | bytes[1], 16); }
function packI32(n) { return [(n >> 24) & 0xff, (n >> 16) & 0xff, (n >> 8) & 0xff, n & 0xff]; }
function unpackI32(bytes) { return as_signed(bytes[0] << 24 | bytes[1] << 16 | bytes[2] << 8 | bytes[3], 32); }
function packU32(n) { return [(n >> 24) & 0xff, (n >> 16) & 0xff, (n >> 8) & 0xff, n & 0xff]; }
function unpackU32(bytes) { return as_unsigned(bytes[0] << 24 | bytes[1] << 16 | bytes[2] << 8 | bytes[3], 32); }
function packIEEE754(v, ebits, fbits) {
var bias = (1 << (ebits - 1)) - 1,
s, e, f, i, bits, str, bytes;
function roundToEven(n) {
var w = floor(n), f = n - w;
if (f < 0.5)
return w;
if (f > 0.5)
return w + 1;
return w % 2 ? w + 1 : w;
}
// Compute sign, exponent, fraction
if (v !== v) {
// NaN
// http://dev.w3.org/2006/webapi/WebIDL/#es-type-mapping
e = (1 << ebits) - 1; f = pow(2, fbits - 1); s = 0;
} else if (v === Infinity || v === -Infinity) {
e = (1 << ebits) - 1; f = 0; s = (v < 0) ? 1 : 0;
} else if (v === 0) {
e = 0; f = 0; s = (1 / v === -Infinity) ? 1 : 0;
} else {
s = v < 0;
v = abs(v);
if (v >= pow(2, 1 - bias)) {
e = min(floor(log(v) / LN2), 1023);
f = roundToEven(v / pow(2, e) * pow(2, fbits));
if (f / pow(2, fbits) >= 2) {
e = e + 1;
f = 1;
}
if (e > bias) {
// Overflow
e = (1 << ebits) - 1;
f = 0;
} else {
// Normalized
e = e + bias;
f = f - pow(2, fbits);
}
} else {
// Denormalized
e = 0;
f = roundToEven(v / pow(2, 1 - bias - fbits));
}
}
// Pack sign, exponent, fraction
bits = [];
for (i = fbits; i; i -= 1) { bits.push(f % 2 ? 1 : 0); f = floor(f / 2); }
for (i = ebits; i; i -= 1) { bits.push(e % 2 ? 1 : 0); e = floor(e / 2); }
bits.push(s ? 1 : 0);
bits.reverse();
str = bits.join('');
// Bits to bytes
bytes = [];
while (str.length) {
bytes.push(parseInt(str.substring(0, 8), 2));
str = str.substring(8);
}
return bytes;
}
function unpackIEEE754(bytes, ebits, fbits) {
// Bytes to bits
var bits = [], i, j, b, str,
bias, s, e, f;
for (i = bytes.length; i; i -= 1) {
b = bytes[i - 1];
for (j = 8; j; j -= 1) {
bits.push(b % 2 ? 1 : 0); b = b >> 1;
}
}
bits.reverse();
str = bits.join('');
// Unpack sign, exponent, fraction
bias = (1 << (ebits - 1)) - 1;
s = parseInt(str.substring(0, 1), 2) ? -1 : 1;
e = parseInt(str.substring(1, 1 + ebits), 2);
f = parseInt(str.substring(1 + ebits), 2);
// Produce number
if (e === (1 << ebits) - 1) {
return f !== 0 ? NaN : s * Infinity;
} else if (e > 0) {
// Normalized
return s * pow(2, e - bias) * (1 + f / pow(2, fbits));
} else if (f !== 0) {
// Denormalized
return s * pow(2, -(bias - 1)) * (f / pow(2, fbits));
} else {
return s < 0 ? -0 : 0;
}
}
function unpackF64(b) { return unpackIEEE754(b, 11, 52); }
function packF64(v) { return packIEEE754(v, 11, 52); }
function unpackF32(b) { return unpackIEEE754(b, 8, 23); }
function packF32(v) { return packIEEE754(v, 8, 23); }
//
// 3 The ArrayBuffer Type
//
(function() {
/** @constructor */
var ArrayBuffer = function ArrayBuffer(length) {
length = ECMAScript.ToInt32(length);
if (length < 0) throw new RangeError('ArrayBuffer size is not a small enough positive integer');
this.byteLength = length;
this._bytes = [];
this._bytes.length = length;
var i;
for (i = 0; i < this.byteLength; i += 1) {
this._bytes[i] = 0;
}
configureProperties(this);
};
exports.ArrayBuffer = exports.ArrayBuffer || ArrayBuffer;
//
// 4 The ArrayBufferView Type
//
// NOTE: this constructor is not exported
/** @constructor */
var ArrayBufferView = function ArrayBufferView() {
//this.buffer = null;
//this.byteOffset = 0;
//this.byteLength = 0;
};
//
// 5 The Typed Array View Types
//
function makeConstructor(bytesPerElement, pack, unpack) {
// Each TypedArray type requires a distinct constructor instance with
// identical logic, which this produces.
var ctor;
ctor = function(buffer, byteOffset, length) {
var array, sequence, i, s;
if (!arguments.length || typeof arguments[0] === 'number') {
// Constructor(unsigned long length)
this.length = ECMAScript.ToInt32(arguments[0]);
if (length < 0) throw new RangeError('ArrayBufferView size is not a small enough positive integer');
this.byteLength = this.length * this.BYTES_PER_ELEMENT;
this.buffer = new ArrayBuffer(this.byteLength);
this.byteOffset = 0;
} else if (typeof arguments[0] === 'object' && arguments[0].constructor === ctor) {
// Constructor(TypedArray array)
array = arguments[0];
this.length = array.length;
this.byteLength = this.length * this.BYTES_PER_ELEMENT;
this.buffer = new ArrayBuffer(this.byteLength);
this.byteOffset = 0;
for (i = 0; i < this.length; i += 1) {
this._setter(i, array._getter(i));
}
} else if (typeof arguments[0] === 'object' &&
!(arguments[0] instanceof ArrayBuffer || ECMAScript.Class(arguments[0]) === 'ArrayBuffer')) {
// Constructor(sequence<type> array)
sequence = arguments[0];
this.length = ECMAScript.ToUint32(sequence.length);
this.byteLength = this.length * this.BYTES_PER_ELEMENT;
this.buffer = new ArrayBuffer(this.byteLength);
this.byteOffset = 0;
for (i = 0; i < this.length; i += 1) {
s = sequence[i];
this._setter(i, Number(s));
}
} else if (typeof arguments[0] === 'object' &&
(arguments[0] instanceof ArrayBuffer || ECMAScript.Class(arguments[0]) === 'ArrayBuffer')) {
// Constructor(ArrayBuffer buffer,
// optional unsigned long byteOffset, optional unsigned long length)
this.buffer = buffer;
this.byteOffset = ECMAScript.ToUint32(byteOffset);
if (this.byteOffset > this.buffer.byteLength) {
throw new RangeError("byteOffset out of range");
}
if (this.byteOffset % this.BYTES_PER_ELEMENT) {
// The given byteOffset must be a multiple of the element
// size of the specific type, otherwise an exception is raised.
throw new RangeError("ArrayBuffer length minus the byteOffset is not a multiple of the element size.");
}
if (arguments.length < 3) {
this.byteLength = this.buffer.byteLength - this.byteOffset;
if (this.byteLength % this.BYTES_PER_ELEMENT) {
throw new RangeError("length of buffer minus byteOffset not a multiple of the element size");
}
this.length = this.byteLength / this.BYTES_PER_ELEMENT;
} else {
this.length = ECMAScript.ToUint32(length);
this.byteLength = this.length * this.BYTES_PER_ELEMENT;
}
if ((this.byteOffset + this.byteLength) > this.buffer.byteLength) {
throw new RangeError("byteOffset and length reference an area beyond the end of the buffer");
}
} else {
throw new TypeError("Unexpected argument type(s)");
}
this.constructor = ctor;
configureProperties(this);
makeArrayAccessors(this);
};
ctor.prototype = new ArrayBufferView();
ctor.prototype.BYTES_PER_ELEMENT = bytesPerElement;
ctor.prototype._pack = pack;
ctor.prototype._unpack = unpack;
ctor.BYTES_PER_ELEMENT = bytesPerElement;
// getter type (unsigned long index);
ctor.prototype._getter = function(index) {
if (arguments.length < 1) throw new SyntaxError("Not enough arguments");
index = ECMAScript.ToUint32(index);
if (index >= this.length) {
return undefined$1;
}
var bytes = [], i, o;
for (i = 0, o = this.byteOffset + index * this.BYTES_PER_ELEMENT;
i < this.BYTES_PER_ELEMENT;
i += 1, o += 1) {
bytes.push(this.buffer._bytes[o]);
}
return this._unpack(bytes);
};
// NONSTANDARD: convenience alias for getter: type get(unsigned long index);
ctor.prototype.get = ctor.prototype._getter;
// setter void (unsigned long index, type value);
ctor.prototype._setter = function(index, value) {
if (arguments.length < 2) throw new SyntaxError("Not enough arguments");
index = ECMAScript.ToUint32(index);
if (index >= this.length) {
return undefined$1;
}
var bytes = this._pack(value), i, o;
for (i = 0, o = this.byteOffset + index * this.BYTES_PER_ELEMENT;
i < this.BYTES_PER_ELEMENT;
i += 1, o += 1) {
this.buffer._bytes[o] = bytes[i];
}
};
// void set(TypedArray array, optional unsigned long offset);
// void set(sequence<type> array, optional unsigned long offset);
ctor.prototype.set = function(index, value) {
if (arguments.length < 1) throw new SyntaxError("Not enough arguments");
var array, sequence, offset, len,
i, s, d,
byteOffset, byteLength, tmp;
if (typeof arguments[0] === 'object' && arguments[0].constructor === this.constructor) {
// void set(TypedArray array, optional unsigned long offset);
array = arguments[0];
offset = ECMAScript.ToUint32(arguments[1]);
if (offset + array.length > this.length) {
throw new RangeError("Offset plus length of array is out of range");
}
byteOffset = this.byteOffset + offset * this.BYTES_PER_ELEMENT;
byteLength = array.length * this.BYTES_PER_ELEMENT;
if (array.buffer === this.buffer) {
tmp = [];
for (i = 0, s = array.byteOffset; i < byteLength; i += 1, s += 1) {
tmp[i] = array.buffer._bytes[s];
}
for (i = 0, d = byteOffset; i < byteLength; i += 1, d += 1) {
this.buffer._bytes[d] = tmp[i];
}
} else {
for (i = 0, s = array.byteOffset, d = byteOffset;
i < byteLength; i += 1, s += 1, d += 1) {
this.buffer._bytes[d] = array.buffer._bytes[s];
}
}
} else if (typeof arguments[0] === 'object' && typeof arguments[0].length !== 'undefined') {
// void set(sequence<type> array, optional unsigned long offset);
sequence = arguments[0];
len = ECMAScript.ToUint32(sequence.length);
offset = ECMAScript.ToUint32(arguments[1]);
if (offset + len > this.length) {
throw new RangeError("Offset plus length of array is out of range");
}
for (i = 0; i < len; i += 1) {
s = sequence[i];
this._setter(offset + i, Number(s));
}
} else {
throw new TypeError("Unexpected argument type(s)");
}
};
// TypedArray subarray(long begin, optional long end);
ctor.prototype.subarray = function(start, end) {
function clamp(v, min, max) { return v < min ? min : v > max ? max : v; }
start = ECMAScript.ToInt32(start);
end = ECMAScript.ToInt32(end);
if (arguments.length < 1) { start = 0; }
if (arguments.length < 2) { end = this.length; }
if (start < 0) { start = this.length + start; }
if (end < 0) { end = this.length + end; }
start = clamp(start, 0, this.length);
end = clamp(end, 0, this.length);
var len = end - start;
if (len < 0) {
len = 0;
}
return new this.constructor(
this.buffer, this.byteOffset + start * this.BYTES_PER_ELEMENT, len);
};
return ctor;
}
var Int8Array = makeConstructor(1, packI8, unpackI8);
var Uint8Array = makeConstructor(1, packU8, unpackU8);
var Uint8ClampedArray = makeConstructor(1, packU8Clamped, unpackU8);
var Int16Array = makeConstructor(2, packI16, unpackI16);
var Uint16Array = makeConstructor(2, packU16, unpackU16);
var Int32Array = makeConstructor(4, packI32, unpackI32);
var Uint32Array = makeConstructor(4, packU32, unpackU32);
var Float32Array = makeConstructor(4, packF32, unpackF32);
var Float64Array = makeConstructor(8, packF64, unpackF64);
exports.Int8Array = exports.Int8Array || Int8Array;
exports.Uint8Array = exports.Uint8Array || Uint8Array;
exports.Uint8ClampedArray = exports.Uint8ClampedArray || Uint8ClampedArray;
exports.Int16Array = exports.Int16Array || Int16Array;
exports.Uint16Array = exports.Uint16Array || Uint16Array;
exports.Int32Array = exports.Int32Array || Int32Array;
exports.Uint32Array = exports.Uint32Array || Uint32Array;
exports.Float32Array = exports.Float32Array || Float32Array;
exports.Float64Array = exports.Float64Array || Float64Array;
}());
//
// 6 The DataView View Type
//
(function() {
function r(array, index) {
return ECMAScript.IsCallable(array.get) ? array.get(index) : array[index];
}
var IS_BIG_ENDIAN = (function() {
var u16array = new(exports.Uint16Array)([0x1234]),
u8array = new(exports.Uint8Array)(u16array.buffer);
return r(u8array, 0) === 0x12;
}());
// Constructor(ArrayBuffer buffer,
// optional unsigned long byteOffset,
// optional unsigned long byteLength)
/** @constructor */
var DataView = function DataView(buffer, byteOffset, byteLength) {
if (arguments.length === 0) {
buffer = new exports.ArrayBuffer(0);
} else if (!(buffer instanceof exports.ArrayBuffer || ECMAScript.Class(buffer) === 'ArrayBuffer')) {
throw new TypeError("TypeError");
}
this.buffer = buffer || new exports.ArrayBuffer(0);
this.byteOffset = ECMAScript.ToUint32(byteOffset);
if (this.byteOffset > this.buffer.byteLength) {
throw new RangeError("byteOffset out of range");
}
if (arguments.length < 3) {
this.byteLength = this.buffer.byteLength - this.byteOffset;
} else {
this.byteLength = ECMAScript.ToUint32(byteLength);
}
if ((this.byteOffset + this.byteLength) > this.buffer.byteLength) {
throw new RangeError("byteOffset and length reference an area beyond the end of the buffer");
}
configureProperties(this);
};
function makeGetter(arrayType) {
return function(byteOffset, littleEndian) {
byteOffset = ECMAScript.ToUint32(byteOffset);
if (byteOffset + arrayType.BYTES_PER_ELEMENT > this.byteLength) {
throw new RangeError("Array index out of range");
}
byteOffset += this.byteOffset;
var uint8Array = new exports.Uint8Array(this.buffer, byteOffset, arrayType.BYTES_PER_ELEMENT),
bytes = [], i;
for (i = 0; i < arrayType.BYTES_PER_ELEMENT; i += 1) {
bytes.push(r(uint8Array, i));
}
if (Boolean(littleEndian) === Boolean(IS_BIG_ENDIAN)) {
bytes.reverse();
}
return r(new arrayType(new exports.Uint8Array(bytes).buffer), 0);
};
}
DataView.prototype.getUint8 = makeGetter(exports.Uint8Array);
DataView.prototype.getInt8 = makeGetter(exports.Int8Array);
DataView.prototype.getUint16 = makeGetter(exports.Uint16Array);
DataView.prototype.getInt16 = makeGetter(exports.Int16Array);
DataView.prototype.getUint32 = makeGetter(exports.Uint32Array);
DataView.prototype.getInt32 = makeGetter(exports.Int32Array);
DataView.prototype.getFloat32 = makeGetter(exports.Float32Array);
DataView.prototype.getFloat64 = makeGetter(exports.Float64Array);
function makeSetter(arrayType) {
return function(byteOffset, value, littleEndian) {
byteOffset = ECMAScript.ToUint32(byteOffset);
if (byteOffset + arrayType.BYTES_PER_ELEMENT > this.byteLength) {
throw new RangeError("Array index out of range");
}
// Get bytes
var typeArray = new arrayType([value]),
byteArray = new exports.Uint8Array(typeArray.buffer),
bytes = [], i, byteView;
for (i = 0; i < arrayType.BYTES_PER_ELEMENT; i += 1) {
bytes.push(r(byteArray, i));
}
// Flip if necessary
if (Boolean(littleEndian) === Boolean(IS_BIG_ENDIAN)) {
bytes.reverse();
}
// Write them
byteView = new exports.Uint8Array(this.buffer, byteOffset, arrayType.BYTES_PER_ELEMENT);
byteView.set(bytes);
};
}
DataView.prototype.setUint8 = makeSetter(exports.Uint8Array);
DataView.prototype.setInt8 = makeSetter(exports.Int8Array);
DataView.prototype.setUint16 = makeSetter(exports.Uint16Array);
DataView.prototype.setInt16 = makeSetter(exports.Int16Array);
DataView.prototype.setUint32 = makeSetter(exports.Uint32Array);
DataView.prototype.setInt32 = makeSetter(exports.Int32Array);
DataView.prototype.setFloat32 = makeSetter(exports.Float32Array);
DataView.prototype.setFloat64 = makeSetter(exports.Float64Array);
exports.DataView = exports.DataView || DataView;
}());
});
var typedarray_1 = typedarray.ArrayBuffer;
var typedarray_2 = typedarray.Int8Array;
var typedarray_3 = typedarray.Uint8Array;
var typedarray_4 = typedarray.Uint8ClampedArray;
var typedarray_5 = typedarray.Int16Array;
var typedarray_6 = typedarray.Uint16Array;
var typedarray_7 = typedarray.Int32Array;
var typedarray_8 = typedarray.Uint32Array;
var typedarray_9 = typedarray.Float32Array;
var typedarray_10 = typedarray.Float64Array;
var typedarray_11 = typedarray.DataView;
var Writable$5 = readableBrowser.Writable;
if (typeof Uint8Array === 'undefined') {
var U8 = typedarray.Uint8Array;
} else {
var U8 = Uint8Array;
}
function ConcatStream(opts, cb) {
if (!(this instanceof ConcatStream)) return new ConcatStream(opts, cb)
if (typeof opts === 'function') {
cb = opts;
opts = {};
}
if (!opts) opts = {};
var encoding = opts.encoding;
var shouldInferEncoding = false;
if (!encoding) {
shouldInferEncoding = true;
} else {
encoding = String(encoding).toLowerCase();
if (encoding === 'u8' || encoding === 'uint8') {
encoding = 'uint8array';
}
}
Writable$5.call(this, { objectMode: true });
this.encoding = encoding;
this.shouldInferEncoding = shouldInferEncoding;
if (cb) this.on('finish', function () { cb(this.getBody()); });
this.body = [];
}
var concatStream = ConcatStream;
inherits_browser(ConcatStream, Writable$5);
ConcatStream.prototype._write = function(chunk, enc, next) {
this.body.push(chunk);
next();
};
ConcatStream.prototype.inferEncoding = function (buff) {
var firstBuffer = buff === undefined ? this.body[0] : buff;
if (Buffer.isBuffer(firstBuffer)) return 'buffer'
if (typeof Uint8Array !== 'undefined' && firstBuffer instanceof Uint8Array) return 'uint8array'
if (Array.isArray(firstBuffer)) return 'array'
if (typeof firstBuffer === 'string') return 'string'
if (Object.prototype.toString.call(firstBuffer) === "[object Object]") return 'object'
return 'buffer'
};
ConcatStream.prototype.getBody = function () {
if (!this.encoding && this.body.length === 0) return []
if (this.shouldInferEncoding) this.encoding = this.inferEncoding();
if (this.encoding === 'array') return arrayConcat(this.body)
if (this.encoding === 'string') return stringConcat(this.body)
if (this.encoding === 'buffer') return bufferConcat(this.body)
if (this.encoding === 'uint8array') return u8Concat(this.body)
return this.body
};
function isArrayish (arr) {
return /Array\]$/.test(Object.prototype.toString.call(arr))
}
function isBufferish (p) {
return typeof p === 'string' || isArrayish(p) || (p && typeof p.subarray === 'function')
}
function stringConcat (parts) {
var strings = [];
for (var i = 0; i < parts.length; i++) {
var p = parts[i];
if (typeof p === 'string') {
strings.push(p);
} else if (Buffer.isBuffer(p)) {
strings.push(p);
} else if (isBufferish(p)) {
strings.push(bufferFrom_1(p));
} else {
strings.push(bufferFrom_1(String(p)));
}
}
if (Buffer.isBuffer(parts[0])) {
strings = Buffer.concat(strings);
strings = strings.toString('utf8');
} else {
strings = strings.join('');
}
return strings
}
function bufferConcat (parts) {
var bufs = [];
for (var i = 0; i < parts.length; i++) {
var p = parts[i];
if (Buffer.isBuffer(p)) {
bufs.push(p);
} else if (isBufferish(p)) {
bufs.push(bufferFrom_1(p));
} else {
bufs.push(bufferFrom_1(String(p)));
}
}
return Buffer.concat(bufs)
}
function arrayConcat (parts) {
var res = [];
for (var i = 0; i < parts.length; i++) {
res.push.apply(res, parts[i]);
}
return res
}
function u8Concat (parts) {
var len = 0;
for (var i = 0; i < parts.length; i++) {
if (typeof parts[i] === 'string') {
parts[i] = bufferFrom_1(parts[i]);
}
len += parts[i].length;
}
var u8 = new U8(len);
for (var i = 0, offset = 0; i < parts.length; i++) {
var part = parts[i];
for (var j = 0; j < part.length; j++) {
u8[offset++] = part[j];
}
}
return u8
}
var toDate = function(date) {
if (!date) return new Date();
if (typeof date === 'string') return new Date(date);
return date;
};
var Stat = function(opts) {
this.uid = opts.uid || 0;
this.gid = opts.gid || 0;
this.mode = opts.mode || 0;
this.size = opts.size || 0;
this.mtime = toDate(opts.mtime);
this.atime = toDate(opts.atime);
this.ctime = toDate(opts.ctime);
this.type = opts.type;
this.target = opts.target;
this.link = opts.link;
this.blob = opts.blob;
};
Stat.prototype.isDirectory = function() {
return this.type === 'directory';
};
Stat.prototype.isFile = function() {
return this.type === 'file';
};
Stat.prototype.isBlockDevice = function() {
return false;
};
Stat.prototype.isCharacterDevice = function() {
return false;
};
Stat.prototype.isSymbolicLink = function() {
return this.type === 'symlink';
};
Stat.prototype.isFIFO = function() {
return false;
};
Stat.prototype.isSocket = function() {
return false;
};
var stat = function(opts) {
return new Stat(opts);
};
var hasKeys_1$2 = hasKeys$2;
function hasKeys$2(source) {
return source !== null &&
(typeof source === "object" ||
typeof source === "function")
}
var xtend$4 = extend$4;
function extend$4() {
var target = {};
for (var i = 0; i < arguments.length; i++) {
var source = arguments[i];
if (!hasKeys_1$2(source)) {
continue
}
for (var key in source) {
if (source.hasOwnProperty(key)) {
target[key] = source[key];
}
}
}
return target
}
var ROOT = stat({
type: 'directory',
mode: octal(777),
size: 4096
});
var normalize = function(key) {
key = key[0] === '/' ? key : '/' + key;
key = path.normalize(key);
if (key === '/') return key;
return key[key.length-1] === '/' ? key.slice(0, -1) : key;
};
var prefix = function(key) {
var depth = key.split('/').length.toString(36);
return '0000000000'.slice(depth.length)+depth+key;
};
var paths = function(db) {
var that = {};
that.normalize = normalize;
that.get = function(key, cb) {
key = normalize(key);
if (key === '/') return process.nextTick(cb.bind(null, null, ROOT, '/'));
db.get(prefix(key), {valueEncoding:'json'}, function(err, doc) {
if (err && err.notFound) return cb(errno_1$1.ENOENT(key), null, key);
if (err) return cb(err, null, key);
cb(null, stat(doc), key);
});
};
that.writable = function(key, cb) {
key = normalize(key);
if (key === '/') return process.nextTick(cb.bind(null, errno_1$1.EPERM(key)));
that.follow(path.dirname(key), function(err, parent) {
if (err) return cb(err);
if (!parent.isDirectory()) return cb(errno_1$1.ENOTDIR(key));
cb(null, key);
});
};
that.list = function(key, cb) {
key = normalize(key);
var start = prefix(key === '/' ? key : key + '/');
var keys = db.createKeyStream({start: start, end: start+'\xff'});
cb = once_1(cb);
keys.on('error', cb);
keys.pipe(concatStream({encoding:'object'}, function(files) {
files = files.map(function(file) {
return file.split('/').pop();
});
cb(null, files);
}));
};
var resolve = function(dir, cb) {
var root = '/';
var parts = dir.split('/').slice(1);
var loop = function() {
that.get(path.join(root, parts.shift()), function(err, doc, key) {
if (err) return cb(err, doc, dir);
root = doc.target || key;
if (!parts.length) return cb(null, doc, key);
loop();
});
};
loop();
};
that.follow = function(key, cb) {
resolve(normalize(key), function loop(err, doc, key) {
if (err) return cb(err, null, key);
if (doc.target) return that.get(doc.target, loop);
cb(null, stat(doc), key);
});
};
that.update = function(key, opts, cb) {
that.get(key, function(err, doc, key) {
if (err) return cb(err);
if (key === '/') return cb(errno_1$1.EPERM(key));
that.put(key, xtend$4(doc, opts), cb);
});
};
that.put = function(key, opts, cb) {
that.writable(key, function(err, key) {
if (err) return cb(err);
db.put(prefix(key), stat(opts), {valueEncoding:'json'}, cb);
});
};
that.del = function(key, cb) {
key = normalize(key);
if (key === '/') return process.nextTick(cb.bind(null, errno_1$1.EPERM(key)));
db.del(prefix(key), cb);
};
return that;
};
var watchers = function() {
var listeners = {};
var that = new events.EventEmitter();
that.watch = function(key, cb) {
if (!listeners[key]) {
listeners[key] = new events.EventEmitter();
listeners[key].setMaxListeners(0);
}
if (cb) listeners[key].on('change', cb);
return listeners[key];
};
that.watcher = function(key, cb) {
var watcher = new events.EventEmitter();
var onchange = function() {
watcher.emit('change', 'change', key);
};
that.watch(key, onchange);
if (cb) watcher.on('change', cb);
watcher.close = function() {
that.unwatch(key, onchange);
};
return watcher;
};
that.unwatch = function(key, cb) {
if (!listeners[key]) return;
if (cb) listeners[key].removeListener('change', cb);
else listeners[key].removeAllListeners('change');
if (!listeners[key].listeners('change').length) delete listeners[key]; };
that.change = function(key) {
if (listeners[key]) listeners[key].emit('change');
that.emit('change', key);
};
that.cb = function(key, cb) {
return function(err, val) {
if (key) that.change(key);
if (cb) cb(err, val);
};
};
return that;
};
var nextTick = function(cb, err, val) {
process.nextTick(function() {
cb(err, val);
});
};
var noop$1 = function() {};
var levelFilesystem = function(db, opts) {
var fs = {};
db = levelSublevel(db);
var bl = levelBlobs(db.sublevel('blobs'), opts);
var ps = paths(db.sublevel('stats'));
var links = db.sublevel('links');
var listeners = watchers();
var fds = [];
var now = Date.now();
var inc = function() {
return ++now;
};
fs.mkdir = function(key, mode, cb) {
if (typeof mode === 'function') return fs.mkdir(key, null, mode);
if (!mode) mode = octal(777);
if (!cb) cb = noop$1;
ps.follow(key, function(err, stat, key) {
if (err && err.code !== 'ENOENT') return cb(err);
if (stat) return cb(errno_1$1.EEXIST(key));
ps.put(key, {
type:'directory',
mode: mode,
size: 4096
}, listeners.cb(key, cb));
});
};
fs.rmdir = function(key, cb) {
if (!cb) cb = noop$1;
ps.follow(key, function(err, stat, key) {
if (err) return cb(err);
fs.readdir(key, function(err, files) {
if (err) return cb(err);
if (files.length) return cb(errno_1$1.ENOTEMPTY(key));
ps.del(key, listeners.cb(key, cb));
});
});
};
fs.readdir = function(key, cb) {
ps.follow(key, function(err, stat, key) {
if (err) return cb(err);
if (!stat) return cb(errno_1$1.ENOENT(key));
if (!stat.isDirectory()) return cb(errno_1$1.ENOTDIR(key));
ps.list(key, cb);
});
};
var stat = function(key, lookup, cb) {
lookup(key, function(err, stat, key) {
if (err) return cb(err);
if (!stat.isFile()) return cb(null, stat);
var blob = stat && stat.blob || key;
bl.size(blob, function(err, size) {
if (err) return cb(err);
stat.size = size;
cb(null, stat);
});
});
};
fs.stat = function(key, cb) {
stat(key, ps.follow, cb);
};
fs.lstat = function(key, cb) {
stat(key, ps.get, cb);
};
fs.exists = function(key, cb) {
ps.follow(key, function(err) {
cb(!err);
});
};
var chmod = function(key, lookup, mode, cb) {
if (!cb) cb = noop$1;
lookup(key, function(err, stat, key) {
if (err) return cb(err);
ps.update(key, {mode:mode}, listeners.cb(key, cb));
});
};
fs.chmod = function(key, mode, cb) {
chmod(key, ps.follow, mode, cb);
};
fs.lchmod = function(key, mode, cb) {
chmod(key, ps.get, mode, cb);
};
var chown = function(key, lookup, uid, gid, cb) {
if (!cb) cb = noop$1;
lookup(key, function(err, stat, key) {
if (err) return cb(err);
ps.update(key, {uid:uid, gid:gid}, listeners.cb(key, cb));
});
};
fs.chown = function(key, uid, gid, cb) {
chown(key, ps.follow, uid, gid, cb);
};
fs.lchown = function(key, uid, gid, cb) {
chown(key, ps.get, uid, gid, cb);
};
fs.utimes = function(key, atime, mtime, cb) {
if (!cb) cb = noop$1;
ps.follow(key, function(err, stat, key) {
if (err) return cb(err);
var upd = {};
if (atime) upd.atime = atime;
if (mtime) upd.mtime = mtime;
ps.update(key, upd, listeners.cb(key, cb));
});
};
fs.rename = function(from, to, cb) {
if (!cb) cb = noop$1;
ps.follow(from, function(err, statFrom, from) {
if (err) return cb(err);
var rename = function() {
cb = listeners.cb(to, listeners.cb(from, cb));
ps.put(to, statFrom, function(err) {
if (err) return cb(err);
ps.del(from, cb);
});
};
ps.follow(to, function(err, statTo, to) {
if (err && err.code !== 'ENOENT') return cb(err);
if (!statTo) return rename();
if (statFrom.isDirectory() !== statTo.isDirectory()) return cb(errno_1$1.EISDIR(from));
if (statTo.isDirectory()) {
fs.readdir(to, function(err, list) {
if (err) return cb(err);
if (list.length) return cb(errno_1$1.ENOTEMPTY(from));
rename();
});
return;
}
rename();
});
});
};
fs.realpath = function(key, cache, cb) {
if (typeof cache === 'function') return fs.realpath(key, null, cache);
ps.follow(key, function(err, stat, key) {
if (err) return cb(err);
cb(null, key);
});
};
fs.writeFile = function(key, data, opts, cb) {
if (typeof opts === 'function') return fs.writeFile(key, data, null, opts);
if (typeof opts === 'string') opts = {encoding:opts};
if (!opts) opts = {};
if (!cb) cb = noop$1;
if (!Buffer.isBuffer(data)) data = new Buffer(data, opts.encoding || 'utf-8');
var flags = opts.flags || 'w';
opts.append = flags[0] !== 'w';
ps.follow(key, function(err, stat, key) {
if (err && err.code !== 'ENOENT') return cb(err);
if (stat && stat.isDirectory()) return cb(errno_1$1.EISDIR(key));
if (stat && flags[1] === 'x') return cb(errno_1$1.EEXIST(key));
var blob = stat && stat.blob || key;
ps.writable(key, function(err) {
if (err) return cb(err);
bl.write(blob, data, opts, function(err) {
if (err) return cb(err);
ps.put(key, {
ctime: stat && stat.ctime,
mtime: new Date(),
mode: opts.mode || octal(666),
type:'file'
}, listeners.cb(key, cb));
});
});
});
};
fs.appendFile = function(key, data, opts, cb) {
if (typeof opts === 'function') return fs.appendFile(key, data, null, opts);
if (typeof opts === 'string') opts = {encoding:opts};
if (!opts) opts = {};
opts.flags = 'a';
fs.writeFile(key, data, opts, cb);
};
fs.unlink = function(key, cb) {
if (!cb) cb = noop$1;
ps.get(key, function(err, stat, key) {
if (err) return cb(err);
if (stat.isDirectory()) return cb(errno_1$1.EISDIR(key));
var clean = function(target) {
levelPeek(links, {start:target+'\xff', end:target+'\xff\xff'}, function(err) {
if (err) return bl.remove(target, cb); // no more links
cb();
});
};
var onlink = function() {
var target = stat.link.slice(0, stat.link.indexOf('\xff'));
links.del(stat.link, function(err) {
if (err) return cb(err);
clean(target);
});
};
ps.del(key, listeners.cb(key, function(err) {
if (err) return cb(err);
if (stat.link) return onlink();
links.del(key+'\xff', function(err) {
if (err) return cb(err);
clean(key);
});
}));
});
};
fs.readFile = function(key, opts, cb) {
if (typeof opts === 'function') return fs.readFile(key, null, opts);
if (typeof opts === 'string') opts = {encoding:opts};
if (!opts) opts = {};
var encoding = opts.encoding || 'binary';
var flag = opts.flag || 'r';
ps.follow(key, function(err, stat, key) {
if (err) return cb(err);
if (stat.isDirectory()) return cb(errno_1$1.EISDIR(key));
var blob = stat && stat.blob || key;
bl.read(blob, function(err, data) {
if (err) return cb(err);
cb(null, opts.encoding ? data.toString(opts.encoding) : data);
});
});
};
fs.createReadStream = function(key, opts) {
if (!opts) opts = {};
var closed = false;
var rs = fwdStream.readable(function(cb) {
ps.follow(key, function(err, stat, key) {
if (err) return cb(err);
if (stat.isDirectory()) return cb(errno_1$1.EISDIR(key));
var blob = stat && stat.blob || key;
var r = bl.createReadStream(blob, opts);
rs.emit('open');
r.on('end', function() {
process.nextTick(function() {
if (!closed) rs.emit('close');
});
});
cb(null, r);
});
});
rs.on('close', function() {
closed = true;
});
return rs;
};
fs.createWriteStream = function(key, opts) {
if (!opts) opts = {};
var flags = opts.flags || 'w';
var closed = false;
var mode = opts.mode || octal(666);
opts.append = flags[0] === 'a';
var ws = fwdStream.writable(function(cb) {
ps.follow(key, function(err, stat, key) {
if (err && err.code !== 'ENOENT') return cb(err);
if (stat && stat.isDirectory()) return cb(errno_1$1.EISDIR(key));
if (stat && flags[1] === 'x') return cb(errno_1$1.EEXIST(key));
var blob = stat && stat.blob || key;
ps.writable(blob, function(err) {
if (err) return cb(err);
var ctime = stat ? stat.ctime : new Date();
var s = {
ctime: ctime,
mtime: new Date(),
mode: mode,
type:'file'
};
ps.put(key, s, function(err) {
if (err) return cb(err);
var w = bl.createWriteStream(blob, opts);
ws.emit('open');
w.on('finish', function() {
s.mtime = new Date();
ps.put(key, s, function() {
listeners.change(key);
if (!closed) ws.emit('close');
});
});
cb(null, w);
});
});
});
});
ws.on('close', function() {
closed = true;
});
return ws;
};
fs.truncate = function(key, len, cb) {
ps.follow(key, function(err, stat, key) {
if (err) return cb(err);
var blob = stat && stat.blob || key;
bl.size(blob, function(err, size) {
if (err) return cb(err);
ps.writable(key, function(err) {
if (err) return cb(err);
cb = once_1(listeners.cb(key, cb));
if (!len) return bl.remove(blob, cb);
var ws = bl.createWriteStream(blob, {
start:size < len ? len-1 : len
});
ws.on('error', cb);
ws.on('finish', cb);
if (size < len) ws.write(new Buffer([0]));
ws.end();
});
});
});
};
fs.watchFile = function(key, opts, cb) {
if (typeof opts === 'function') return fs.watchFile(key, null, opts);
return listeners.watch(ps.normalize(key), cb);
};
fs.unwatchFile = function(key, cb) {
listeners.unwatch(ps.normalize(key), cb);
};
fs.watch = function(key, opts, cb) {
if (typeof opts === 'function') return fs.watch(key, null, opts)
return listeners.watcher(ps.normalize(key), cb);
};
fs.notify = function(cb) {
listeners.on('change', cb);
};
fs.open = function(key, flags, mode, cb) {
if (typeof mode === 'function') return fs.open(key, flags, null, mode);
ps.follow(key, function(err, stat, key) {
if (err && err.code !== 'ENOENT') return cb(err);
var fl = flags[0];
var plus = flags[1] === '+' || flags[2] === '+';
var blob = stat && stat.blob || key;
var f = {
key: key,
blob: blob,
mode: mode || octal(666),
readable: fl === 'r' || ((fl === 'w' || fl === 'a') && plus),
writable: fl === 'w' || fl === 'a' || (fl === 'r' && plus),
append: fl === 'a'
};
if (fl === 'r' && err) return cb(err);
if (flags[1] === 'x' && stat) return cb(errno_1$1.EEXIST(key));
if (stat && stat.isDirectory()) return cb(errno_1$1.EISDIR(key));
bl.size(blob, function(err, size) {
if (err) return cb(err);
if (f.append) f.writePos = size;
ps.writable(key, function(err) {
if (err) return cb(err);
var onready = function(err) {
if (err) return cb(err);
var i = fds.indexOf(null);
if (i === -1) i = 10+fds.push(fds.length+10)-1;
f.fd = i;
fds[i] = f;
listeners.change(key);
cb(null, f.fd);
};
var ontruncate = function(err) {
if (err) return cb(err);
if (stat) return onready();
ps.put(blob, {ctime:stat && stat.ctime, type:'file'}, onready);
};
if (!f.append && f.writable) return bl.remove(blob, ontruncate);
ontruncate();
});
});
});
};
fs.close = function(fd, cb) {
var f = fds[fd];
if (!f) return nextTick(cb, errno_1$1.EBADF());
fds[fd] = null;
nextTick(listeners.cb(f.key, cb));
};
fs.write = function(fd, buf, off, len, pos, cb) {
var f = fds[fd];
if (!cb) cb = noop$1;
if (!f || !f.writable) return nextTick(cb, errno_1$1.EBADF());
if (pos === null) pos = f.writePos || 0;
var slice = buf.slice(off, off+len);
f.writePos = pos + slice.length;
bl.write(f.blob, slice, {start:pos, append:true}, function(err) {
if (err) return cb(err);
cb(null, len, buf);
});
};
fs.read = function(fd, buf, off, len, pos, cb) {
var f = fds[fd];
if (!cb) cb = noop$1;
if (!f || !f.readable) return nextTick(cb, errno_1$1.EBADF());
if (pos === null) pos = fs.readPos || 0;
bl.read(f.blob, {start:pos, end:pos+len-1}, function(err, read) {
if (err) return cb(err);
var slice = read.slice(0, len);
slice.copy(buf, off);
fs.readPos = pos+slice.length;
cb(null, slice.length, buf);
});
};
fs.fsync = function(fd, cb) {
var f = fds[fd];
if (!cb) cb = noop$1;
if (!f || !f.writable) return nextTick(cb, errno_1$1.EBADF());
nextTick(cb);
};
fs.ftruncate = function(fd, len, cb) {
var f = fds[fd];
if (!cb) cb = noop$1;
if (!f) return nextTick(cb, errno_1$1.EBADF());
fs.truncate(f.blob, len, cb);
};
fs.fchown = function(fd, uid, gid, cb) {
var f = fds[fd];
if (!cb) cb = noop$1;
if (!f) return nextTick(cb, errno_1$1.EBADF());
fs.chown(f.key, uid, gid, cb);
};
fs.fchmod = function(fd, mode, cb) {
var f = fds[fd];
if (!cb) cb = noop$1;
if (!f) return nextTick(cb, errno_1$1.EBADF());
fs.chmod(f.key, mode, cb);
};
fs.futimes = function(fd, atime, mtime, cb) {
var f = fds[fd];
if (!cb) cb = noop$1;
if (!f) return nextTick(cb, errno_1$1.EBADF());
fs.utimes(f.key, atime, mtime, cb);
};
fs.fstat = function(fd, cb) {
var f = fds[fd];
if (!f) return nextTick(cb, errno_1$1.EBADF());
fs.stat(f.key, cb);
};
fs.symlink = function(target, name, cb) {
if (!cb) cb = noop$1;
ps.follow(target, function(err, stat, target) {
if (err) return cb(err);
ps.get(name, function(err, stat) {
if (err && err.code !== 'ENOENT') return cb(err);
if (stat) return cb(errno_1$1.EEXIST(name));
ps.put(name, {type:'symlink', target:target, mode:octal(777)}, cb);
});
});
};
fs.readlink = function(key, cb) {
ps.get(key, function(err, stat) {
if (err) return cb(err);
if (!stat.target) return cb(errno_1$1.EINVAL(key));
cb(null, stat.target);
});
};
fs.link = function(target, name, cb) {
if (!cb) cb = noop$1;
ps.follow(target, function(err, stat, target) {
if (err) return cb(err);
if (!stat.isFile()) return cb(errno_1$1.EINVAL(target));
ps.get(name, function(err, st) {
if (err && err.code !== 'ENOENT') return cb(err);
if (st) return cb(errno_1$1.EEXIST(name));
var link = target+'\xff'+inc();
links.put(target+'\xff', target, function(err) {
if (err) return cb(err);
links.put(link, target, function(err) {
if (err) return cb(err);
ps.put(name, {type:'file', link:link, blob:target, mode:stat.mode}, cb);
});
});
});
});
};
return fs;
};
var db$1 = levelup('level-filesystem', {db:levelJs});
var browserifyFs = levelFilesystem(db$1);
export default browserifyFs;