mirror of
https://github.com/JonasunderscoreJones/jonas_jones-api.git
synced 2025-10-23 17:19:18 +02:00
some progress
This commit is contained in:
parent
aea93a5527
commit
e3c15bd288
1388 changed files with 306946 additions and 68323 deletions
232
node_modules/mongoose/.eslintrc.js
generated
vendored
Normal file
232
node_modules/mongoose/.eslintrc.js
generated
vendored
Normal file
|
@ -0,0 +1,232 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = {
|
||||
extends: [
|
||||
'eslint:recommended'
|
||||
],
|
||||
ignorePatterns: [
|
||||
'tools',
|
||||
'dist',
|
||||
'website.js',
|
||||
'test/files/*',
|
||||
'benchmarks',
|
||||
'*.min.js',
|
||||
'docs/js/native.js'
|
||||
],
|
||||
overrides: [
|
||||
{
|
||||
files: [
|
||||
'**/*.{ts,tsx}',
|
||||
'**/*.md/*.ts',
|
||||
'**/*.md/*.typescript'
|
||||
],
|
||||
extends: [
|
||||
'plugin:@typescript-eslint/eslint-recommended',
|
||||
'plugin:@typescript-eslint/recommended'
|
||||
],
|
||||
plugins: [
|
||||
'@typescript-eslint'
|
||||
],
|
||||
rules: {
|
||||
'@typescript-eslint/triple-slash-reference': 'off',
|
||||
'@typescript-eslint/no-non-null-assertion': 'off',
|
||||
'@typescript-eslint/no-empty-function': 'off',
|
||||
'spaced-comment': [
|
||||
'error',
|
||||
'always',
|
||||
{
|
||||
block: {
|
||||
markers: [
|
||||
'!'
|
||||
],
|
||||
balanced: true
|
||||
},
|
||||
markers: [
|
||||
'/'
|
||||
]
|
||||
}
|
||||
],
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/ban-types': 'off',
|
||||
'@typescript-eslint/no-unused-vars': 'off',
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
'@typescript-eslint/indent': [
|
||||
'warn',
|
||||
2,
|
||||
{
|
||||
SwitchCase: 1,
|
||||
ignoredNodes: ['TSTypeParameterInstantiation']
|
||||
}
|
||||
],
|
||||
'@typescript-eslint/prefer-optional-chain': 'error',
|
||||
'@typescript-eslint/brace-style': 'error',
|
||||
'@typescript-eslint/no-dupe-class-members': 'error',
|
||||
'@typescript-eslint/no-redeclare': 'error',
|
||||
'@typescript-eslint/type-annotation-spacing': 'error',
|
||||
'@typescript-eslint/object-curly-spacing': [
|
||||
'error',
|
||||
'always'
|
||||
],
|
||||
'@typescript-eslint/semi': 'error',
|
||||
'@typescript-eslint/space-before-function-paren': [
|
||||
'error',
|
||||
'never'
|
||||
],
|
||||
'@typescript-eslint/space-infix-ops': 'off'
|
||||
}
|
||||
},
|
||||
{
|
||||
files: [
|
||||
'docs/js/**/*.js'
|
||||
],
|
||||
env: {
|
||||
node: false,
|
||||
browser: true
|
||||
}
|
||||
}
|
||||
// // eslint-plugin-markdown has been disabled because of out-standing issues, see https://github.com/eslint/eslint-plugin-markdown/issues/214
|
||||
// {
|
||||
// files: ['**/*.md'],
|
||||
// processor: 'markdown/markdown'
|
||||
// },
|
||||
// {
|
||||
// files: ['**/*.md/*.js', '**/*.md/*.javascript', '**/*.md/*.ts', '**/*.md/*.typescript'],
|
||||
// parserOptions: {
|
||||
// ecmaFeatures: {
|
||||
// impliedStrict: true
|
||||
// },
|
||||
// sourceType: 'module', // required to allow "import" statements
|
||||
// ecmaVersion: 'latest' // required to allow top-level await
|
||||
// },
|
||||
// rules: {
|
||||
// 'no-undef': 'off',
|
||||
// 'no-unused-expressions': 'off',
|
||||
// 'no-unused-vars': 'off',
|
||||
// 'no-redeclare': 'off',
|
||||
// '@typescript-eslint/no-redeclare': 'off'
|
||||
// }
|
||||
// }
|
||||
],
|
||||
plugins: [
|
||||
'mocha-no-only'
|
||||
// 'markdown'
|
||||
],
|
||||
parserOptions: {
|
||||
ecmaVersion: 2020
|
||||
},
|
||||
env: {
|
||||
node: true,
|
||||
es6: true
|
||||
},
|
||||
rules: {
|
||||
'comma-style': 'error',
|
||||
indent: [
|
||||
'error',
|
||||
2,
|
||||
{
|
||||
SwitchCase: 1,
|
||||
VariableDeclarator: 2
|
||||
}
|
||||
],
|
||||
'keyword-spacing': 'error',
|
||||
'no-whitespace-before-property': 'error',
|
||||
'no-buffer-constructor': 'warn',
|
||||
'no-console': 'off',
|
||||
'no-constant-condition': 'off',
|
||||
'no-multi-spaces': 'error',
|
||||
'func-call-spacing': 'error',
|
||||
'no-trailing-spaces': 'error',
|
||||
'no-undef': 'error',
|
||||
'no-unneeded-ternary': 'error',
|
||||
'no-const-assign': 'error',
|
||||
'no-useless-rename': 'error',
|
||||
'no-dupe-keys': 'error',
|
||||
'space-in-parens': [
|
||||
'error',
|
||||
'never'
|
||||
],
|
||||
'spaced-comment': [
|
||||
'error',
|
||||
'always',
|
||||
{
|
||||
block: {
|
||||
markers: [
|
||||
'!'
|
||||
],
|
||||
balanced: true
|
||||
}
|
||||
}
|
||||
],
|
||||
'key-spacing': [
|
||||
'error',
|
||||
{
|
||||
beforeColon: false,
|
||||
afterColon: true
|
||||
}
|
||||
],
|
||||
'comma-spacing': [
|
||||
'error',
|
||||
{
|
||||
before: false,
|
||||
after: true
|
||||
}
|
||||
],
|
||||
'array-bracket-spacing': 1,
|
||||
'arrow-spacing': [
|
||||
'error',
|
||||
{
|
||||
before: true,
|
||||
after: true
|
||||
}
|
||||
],
|
||||
'object-curly-spacing': [
|
||||
'error',
|
||||
'always'
|
||||
],
|
||||
'comma-dangle': [
|
||||
'error',
|
||||
'never'
|
||||
],
|
||||
'no-unreachable': 'error',
|
||||
quotes: [
|
||||
'error',
|
||||
'single'
|
||||
],
|
||||
'quote-props': [
|
||||
'error',
|
||||
'as-needed'
|
||||
],
|
||||
semi: 'error',
|
||||
'no-extra-semi': 'error',
|
||||
'semi-spacing': 'error',
|
||||
'no-spaced-func': 'error',
|
||||
'no-throw-literal': 'error',
|
||||
'space-before-blocks': 'error',
|
||||
'space-before-function-paren': [
|
||||
'error',
|
||||
'never'
|
||||
],
|
||||
'space-infix-ops': 'error',
|
||||
'space-unary-ops': 'error',
|
||||
'no-var': 'warn',
|
||||
'prefer-const': 'warn',
|
||||
strict: [
|
||||
'error',
|
||||
'global'
|
||||
],
|
||||
'no-restricted-globals': [
|
||||
'error',
|
||||
{
|
||||
name: 'context',
|
||||
message: 'Don\'t use Mocha\'s global context'
|
||||
}
|
||||
],
|
||||
'no-prototype-builtins': 'off',
|
||||
'mocha-no-only/mocha-no-only': [
|
||||
'error'
|
||||
],
|
||||
'no-empty': 'off',
|
||||
'eol-last': 'warn',
|
||||
'no-multiple-empty-lines': ['warn', { max: 2 }]
|
||||
}
|
||||
};
|
4
node_modules/mongoose/.mocharc.yml
generated
vendored
Normal file
4
node_modules/mongoose/.mocharc.yml
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
reporter: spec # better to identify failing / slow tests than "dot"
|
||||
ui: bdd # explicitly setting, even though it is mocha default
|
||||
require:
|
||||
- test/mocha-fixtures.js
|
22
node_modules/mongoose/LICENSE.md
generated
vendored
Normal file
22
node_modules/mongoose/LICENSE.md
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
# MIT License
|
||||
|
||||
Copyright (c) 2010-2013 LearnBoost <dev@learnboost.com>
|
||||
Copyright (c) 2013-2021 Automattic
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
387
node_modules/mongoose/README.md
generated
vendored
Normal file
387
node_modules/mongoose/README.md
generated
vendored
Normal file
|
@ -0,0 +1,387 @@
|
|||
# Mongoose
|
||||
|
||||
Mongoose is a [MongoDB](https://www.mongodb.org/) object modeling tool designed to work in an asynchronous environment. Mongoose supports [Node.js](https://nodejs.org/en/) and [Deno](https://deno.land/) (alpha).
|
||||
|
||||
[](https://github.com/Automattic/mongoose)
|
||||
[](http://badge.fury.io/js/mongoose)
|
||||
[](https://deno.land/x/mongoose)
|
||||
[](https://deno.land/x/mongoose)
|
||||
|
||||
[](https://www.npmjs.com/package/mongoose)
|
||||
|
||||
## Documentation
|
||||
|
||||
The official documentation website is [mongoosejs.com](http://mongoosejs.com/).
|
||||
|
||||
Mongoose 7.0.0 was released on February 27, 2023. You can find more details on [backwards breaking changes in 7.0.0 on our docs site](https://mongoosejs.com/docs/migrating_to_7.html).
|
||||
|
||||
## Support
|
||||
|
||||
- [Stack Overflow](http://stackoverflow.com/questions/tagged/mongoose)
|
||||
- [Bug Reports](https://github.com/Automattic/mongoose/issues/)
|
||||
- [Mongoose Slack Channel](http://slack.mongoosejs.io/)
|
||||
- [Help Forum](http://groups.google.com/group/mongoose-orm)
|
||||
- [MongoDB Support](https://www.mongodb.com/docs/manual/support/)
|
||||
|
||||
## Plugins
|
||||
|
||||
Check out the [plugins search site](http://plugins.mongoosejs.io/) to see hundreds of related modules from the community. Next, learn how to write your own plugin from the [docs](http://mongoosejs.com/docs/plugins.html) or [this blog post](http://thecodebarbarian.com/2015/03/06/guide-to-mongoose-plugins).
|
||||
|
||||
## Contributors
|
||||
|
||||
Pull requests are always welcome! Please base pull requests against the `master`
|
||||
branch and follow the [contributing guide](https://github.com/Automattic/mongoose/blob/master/CONTRIBUTING.md).
|
||||
|
||||
If your pull requests makes documentation changes, please do **not**
|
||||
modify any `.html` files. The `.html` files are compiled code, so please make
|
||||
your changes in `docs/*.pug`, `lib/*.js`, or `test/docs/*.js`.
|
||||
|
||||
View all 400+ [contributors](https://github.com/Automattic/mongoose/graphs/contributors).
|
||||
|
||||
## Installation
|
||||
|
||||
First install [Node.js](http://nodejs.org/) and [MongoDB](https://www.mongodb.org/downloads). Then:
|
||||
|
||||
```sh
|
||||
$ npm install mongoose
|
||||
```
|
||||
|
||||
Mongoose 6.8.0 also includes alpha support for [Deno](https://deno.land/).
|
||||
|
||||
## Importing
|
||||
|
||||
```javascript
|
||||
// Using Node.js `require()`
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
// Using ES6 imports
|
||||
import mongoose from 'mongoose';
|
||||
```
|
||||
|
||||
Or, using [Deno's `createRequire()` for CommonJS support](https://deno.land/std@0.113.0/node/README.md?source=#commonjs-modules-loading) as follows.
|
||||
|
||||
```javascript
|
||||
import { createRequire } from 'https://deno.land/std/node/module.ts';
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
const mongoose = require('mongoose');
|
||||
|
||||
mongoose.connect('mongodb://127.0.0.1:27017/test')
|
||||
.then(() => console.log('Connected!'));
|
||||
```
|
||||
|
||||
You can then run the above script using the following.
|
||||
|
||||
```
|
||||
deno run --allow-net --allow-read --allow-sys --allow-env mongoose-test.js
|
||||
```
|
||||
|
||||
## Mongoose for Enterprise
|
||||
|
||||
Available as part of the Tidelift Subscription
|
||||
|
||||
The maintainers of mongoose and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-mongoose?utm_source=npm-mongoose&utm_medium=referral&utm_campaign=enterprise&utm_term=repo)
|
||||
|
||||
## Overview
|
||||
|
||||
### Connecting to MongoDB
|
||||
|
||||
First, we need to define a connection. If your app uses only one database, you should use `mongoose.connect`. If you need to create additional connections, use `mongoose.createConnection`.
|
||||
|
||||
Both `connect` and `createConnection` take a `mongodb://` URI, or the parameters `host, database, port, options`.
|
||||
|
||||
```js
|
||||
await mongoose.connect('mongodb://127.0.0.1/my_database');
|
||||
```
|
||||
|
||||
Once connected, the `open` event is fired on the `Connection` instance. If you're using `mongoose.connect`, the `Connection` is `mongoose.connection`. Otherwise, `mongoose.createConnection` return value is a `Connection`.
|
||||
|
||||
**Note:** _If the local connection fails then try using 127.0.0.1 instead of localhost. Sometimes issues may arise when the local hostname has been changed._
|
||||
|
||||
**Important!** Mongoose buffers all the commands until it's connected to the database. This means that you don't have to wait until it connects to MongoDB in order to define models, run queries, etc.
|
||||
|
||||
### Defining a Model
|
||||
|
||||
Models are defined through the `Schema` interface.
|
||||
|
||||
```js
|
||||
const Schema = mongoose.Schema;
|
||||
const ObjectId = Schema.ObjectId;
|
||||
|
||||
const BlogPost = new Schema({
|
||||
author: ObjectId,
|
||||
title: String,
|
||||
body: String,
|
||||
date: Date
|
||||
});
|
||||
```
|
||||
|
||||
Aside from defining the structure of your documents and the types of data you're storing, a Schema handles the definition of:
|
||||
|
||||
* [Validators](http://mongoosejs.com/docs/validation.html) (async and sync)
|
||||
* [Defaults](http://mongoosejs.com/docs/api/schematype.html#schematype_SchemaType-default)
|
||||
* [Getters](http://mongoosejs.com/docs/api/schematype.html#schematype_SchemaType-get)
|
||||
* [Setters](http://mongoosejs.com/docs/api/schematype.html#schematype_SchemaType-set)
|
||||
* [Indexes](http://mongoosejs.com/docs/guide.html#indexes)
|
||||
* [Middleware](http://mongoosejs.com/docs/middleware.html)
|
||||
* [Methods](http://mongoosejs.com/docs/guide.html#methods) definition
|
||||
* [Statics](http://mongoosejs.com/docs/guide.html#statics) definition
|
||||
* [Plugins](http://mongoosejs.com/docs/plugins.html)
|
||||
* [pseudo-JOINs](http://mongoosejs.com/docs/populate.html)
|
||||
|
||||
The following example shows some of these features:
|
||||
|
||||
```js
|
||||
const Comment = new Schema({
|
||||
name: { type: String, default: 'hahaha' },
|
||||
age: { type: Number, min: 18, index: true },
|
||||
bio: { type: String, match: /[a-z]/ },
|
||||
date: { type: Date, default: Date.now },
|
||||
buff: Buffer
|
||||
});
|
||||
|
||||
// a setter
|
||||
Comment.path('name').set(function(v) {
|
||||
return capitalize(v);
|
||||
});
|
||||
|
||||
// middleware
|
||||
Comment.pre('save', function(next) {
|
||||
notify(this.get('email'));
|
||||
next();
|
||||
});
|
||||
```
|
||||
|
||||
Take a look at the example in [`examples/schema/schema.js`](https://github.com/Automattic/mongoose/blob/master/examples/schema/schema.js) for an end-to-end example of a typical setup.
|
||||
|
||||
### Accessing a Model
|
||||
|
||||
Once we define a model through `mongoose.model('ModelName', mySchema)`, we can access it through the same function
|
||||
|
||||
```js
|
||||
const MyModel = mongoose.model('ModelName');
|
||||
```
|
||||
|
||||
Or just do it all at once
|
||||
|
||||
```js
|
||||
const MyModel = mongoose.model('ModelName', mySchema);
|
||||
```
|
||||
|
||||
The first argument is the _singular_ name of the collection your model is for. **Mongoose automatically looks for the _plural_ version of your model name.** For example, if you use
|
||||
|
||||
```js
|
||||
const MyModel = mongoose.model('Ticket', mySchema);
|
||||
```
|
||||
|
||||
Then `MyModel` will use the __tickets__ collection, not the __ticket__ collection. For more details read the [model docs](https://mongoosejs.com/docs/api/mongoose.html#mongoose_Mongoose-model).
|
||||
|
||||
Once we have our model, we can then instantiate it, and save it:
|
||||
|
||||
```js
|
||||
const instance = new MyModel();
|
||||
instance.my.key = 'hello';
|
||||
instance.save(function(err) {
|
||||
//
|
||||
});
|
||||
```
|
||||
|
||||
Or we can find documents from the same collection
|
||||
|
||||
```js
|
||||
MyModel.find({}, function(err, docs) {
|
||||
// docs.forEach
|
||||
});
|
||||
```
|
||||
|
||||
You can also `findOne`, `findById`, `update`, etc.
|
||||
|
||||
```js
|
||||
const instance = await MyModel.findOne({ /* ... */ });
|
||||
console.log(instance.my.key); // 'hello'
|
||||
```
|
||||
|
||||
For more details check out [the docs](http://mongoosejs.com/docs/queries.html).
|
||||
|
||||
**Important!** If you opened a separate connection using `mongoose.createConnection()` but attempt to access the model through `mongoose.model('ModelName')` it will not work as expected since it is not hooked up to an active db connection. In this case access your model through the connection you created:
|
||||
|
||||
```js
|
||||
const conn = mongoose.createConnection('your connection string');
|
||||
const MyModel = conn.model('ModelName', schema);
|
||||
const m = new MyModel;
|
||||
m.save(); // works
|
||||
```
|
||||
|
||||
vs
|
||||
|
||||
```js
|
||||
const conn = mongoose.createConnection('your connection string');
|
||||
const MyModel = mongoose.model('ModelName', schema);
|
||||
const m = new MyModel;
|
||||
m.save(); // does not work b/c the default connection object was never connected
|
||||
```
|
||||
|
||||
### Embedded Documents
|
||||
|
||||
In the first example snippet, we defined a key in the Schema that looks like:
|
||||
|
||||
```
|
||||
comments: [Comment]
|
||||
```
|
||||
|
||||
Where `Comment` is a `Schema` we created. This means that creating embedded documents is as simple as:
|
||||
|
||||
```js
|
||||
// retrieve my model
|
||||
const BlogPost = mongoose.model('BlogPost');
|
||||
|
||||
// create a blog post
|
||||
const post = new BlogPost();
|
||||
|
||||
// create a comment
|
||||
post.comments.push({ title: 'My comment' });
|
||||
|
||||
post.save(function(err) {
|
||||
if (!err) console.log('Success!');
|
||||
});
|
||||
```
|
||||
|
||||
The same goes for removing them:
|
||||
|
||||
```js
|
||||
BlogPost.findById(myId, function(err, post) {
|
||||
if (!err) {
|
||||
post.comments[0].remove();
|
||||
post.save(function(err) {
|
||||
// do something
|
||||
});
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
Embedded documents enjoy all the same features as your models. Defaults, validators, middleware. Whenever an error occurs, it's bubbled to the `save()` error callback, so error handling is a snap!
|
||||
|
||||
|
||||
### Middleware
|
||||
|
||||
See the [docs](http://mongoosejs.com/docs/middleware.html) page.
|
||||
|
||||
#### Intercepting and mutating method arguments
|
||||
|
||||
You can intercept method arguments via middleware.
|
||||
|
||||
For example, this would allow you to broadcast changes about your Documents every time someone `set`s a path in your Document to a new value:
|
||||
|
||||
```js
|
||||
schema.pre('set', function(next, path, val, typel) {
|
||||
// `this` is the current Document
|
||||
this.emit('set', path, val);
|
||||
|
||||
// Pass control to the next pre
|
||||
next();
|
||||
});
|
||||
```
|
||||
|
||||
Moreover, you can mutate the incoming `method` arguments so that subsequent middleware see different values for those arguments. To do so, just pass the new values to `next`:
|
||||
|
||||
```js
|
||||
schema.pre(method, function firstPre(next, methodArg1, methodArg2) {
|
||||
// Mutate methodArg1
|
||||
next('altered-' + methodArg1.toString(), methodArg2);
|
||||
});
|
||||
|
||||
// pre declaration is chainable
|
||||
schema.pre(method, function secondPre(next, methodArg1, methodArg2) {
|
||||
console.log(methodArg1);
|
||||
// => 'altered-originalValOfMethodArg1'
|
||||
|
||||
console.log(methodArg2);
|
||||
// => 'originalValOfMethodArg2'
|
||||
|
||||
// Passing no arguments to `next` automatically passes along the current argument values
|
||||
// i.e., the following `next()` is equivalent to `next(methodArg1, methodArg2)`
|
||||
// and also equivalent to, with the example method arg
|
||||
// values, `next('altered-originalValOfMethodArg1', 'originalValOfMethodArg2')`
|
||||
next();
|
||||
});
|
||||
```
|
||||
|
||||
#### Schema gotcha
|
||||
|
||||
`type`, when used in a schema has special meaning within Mongoose. If your schema requires using `type` as a nested property you must use object notation:
|
||||
|
||||
```js
|
||||
new Schema({
|
||||
broken: { type: Boolean },
|
||||
asset: {
|
||||
name: String,
|
||||
type: String // uh oh, it broke. asset will be interpreted as String
|
||||
}
|
||||
});
|
||||
|
||||
new Schema({
|
||||
works: { type: Boolean },
|
||||
asset: {
|
||||
name: String,
|
||||
type: { type: String } // works. asset is an object with a type property
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### Driver Access
|
||||
|
||||
Mongoose is built on top of the [official MongoDB Node.js driver](https://github.com/mongodb/node-mongodb-native). Each mongoose model keeps a reference to a [native MongoDB driver collection](http://mongodb.github.io/node-mongodb-native/2.1/api/Collection.html). The collection object can be accessed using `YourModel.collection`. However, using the collection object directly bypasses all mongoose features, including hooks, validation, etc. The one
|
||||
notable exception that `YourModel.collection` still buffers
|
||||
commands. As such, `YourModel.collection.find()` will **not**
|
||||
return a cursor.
|
||||
|
||||
## API Docs
|
||||
|
||||
Find the API docs [here](http://mongoosejs.com/docs/api/mongoose.html), generated using [dox](https://github.com/tj/dox)
|
||||
and [acquit](https://github.com/vkarpov15/acquit).
|
||||
|
||||
## Related Projects
|
||||
|
||||
#### MongoDB Runners
|
||||
|
||||
- [run-rs](https://www.npmjs.com/package/run-rs)
|
||||
- [mongodb-memory-server](https://www.npmjs.com/package/mongodb-memory-server)
|
||||
- [mongodb-topology-manager](https://www.npmjs.com/package/mongodb-topology-manager)
|
||||
|
||||
#### Unofficial CLIs
|
||||
|
||||
- [mongoosejs-cli](https://www.npmjs.com/package/mongoosejs-cli)
|
||||
|
||||
#### Data Seeding
|
||||
|
||||
- [dookie](https://www.npmjs.com/package/dookie)
|
||||
- [seedgoose](https://www.npmjs.com/package/seedgoose)
|
||||
- [mongoose-data-seed](https://www.npmjs.com/package/mongoose-data-seed)
|
||||
|
||||
#### Express Session Stores
|
||||
|
||||
- [connect-mongodb-session](https://www.npmjs.com/package/connect-mongodb-session)
|
||||
- [connect-mongo](https://www.npmjs.com/package/connect-mongo)
|
||||
|
||||
## License
|
||||
|
||||
Copyright (c) 2010 LearnBoost <dev@learnboost.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
1
node_modules/mongoose/SECURITY.md
generated
vendored
Normal file
1
node_modules/mongoose/SECURITY.md
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
Please follow the instructions on [Tidelift's security page](https://tidelift.com/docs/security) to report a security issue.
|
8
node_modules/mongoose/browser.js
generated
vendored
Normal file
8
node_modules/mongoose/browser.js
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
/**
|
||||
* Export lib/mongoose
|
||||
*
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = require('./lib/browser');
|
2
node_modules/mongoose/dist/browser.umd.js
generated
vendored
Normal file
2
node_modules/mongoose/dist/browser.umd.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
62
node_modules/mongoose/index.js
generated
vendored
Normal file
62
node_modules/mongoose/index.js
generated
vendored
Normal file
|
@ -0,0 +1,62 @@
|
|||
/**
|
||||
* Export lib/mongoose
|
||||
*
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const mongoose = require('./lib/');
|
||||
|
||||
module.exports = mongoose;
|
||||
module.exports.default = mongoose;
|
||||
module.exports.mongoose = mongoose;
|
||||
|
||||
// Re-export for ESM support
|
||||
module.exports.cast = mongoose.cast;
|
||||
module.exports.STATES = mongoose.STATES;
|
||||
module.exports.setDriver = mongoose.setDriver;
|
||||
module.exports.set = mongoose.set;
|
||||
module.exports.get = mongoose.get;
|
||||
module.exports.createConnection = mongoose.createConnection;
|
||||
module.exports.connect = mongoose.connect;
|
||||
module.exports.disconnect = mongoose.disconnect;
|
||||
module.exports.startSession = mongoose.startSession;
|
||||
module.exports.pluralize = mongoose.pluralize;
|
||||
module.exports.model = mongoose.model;
|
||||
module.exports.deleteModel = mongoose.deleteModel;
|
||||
module.exports.modelNames = mongoose.modelNames;
|
||||
module.exports.plugin = mongoose.plugin;
|
||||
module.exports.connections = mongoose.connections;
|
||||
module.exports.version = mongoose.version;
|
||||
module.exports.Mongoose = mongoose.Mongoose;
|
||||
module.exports.Schema = mongoose.Schema;
|
||||
module.exports.SchemaType = mongoose.SchemaType;
|
||||
module.exports.SchemaTypes = mongoose.SchemaTypes;
|
||||
module.exports.VirtualType = mongoose.VirtualType;
|
||||
module.exports.Types = mongoose.Types;
|
||||
module.exports.Query = mongoose.Query;
|
||||
module.exports.Model = mongoose.Model;
|
||||
module.exports.Document = mongoose.Document;
|
||||
module.exports.ObjectId = mongoose.ObjectId;
|
||||
module.exports.isValidObjectId = mongoose.isValidObjectId;
|
||||
module.exports.isObjectIdOrHexString = mongoose.isObjectIdOrHexString;
|
||||
module.exports.syncIndexes = mongoose.syncIndexes;
|
||||
module.exports.Decimal128 = mongoose.Decimal128;
|
||||
module.exports.Mixed = mongoose.Mixed;
|
||||
module.exports.Date = mongoose.Date;
|
||||
module.exports.Number = mongoose.Number;
|
||||
module.exports.Error = mongoose.Error;
|
||||
module.exports.now = mongoose.now;
|
||||
module.exports.CastError = mongoose.CastError;
|
||||
module.exports.SchemaTypeOptions = mongoose.SchemaTypeOptions;
|
||||
module.exports.mongo = mongoose.mongo;
|
||||
module.exports.mquery = mongoose.mquery;
|
||||
module.exports.sanitizeFilter = mongoose.sanitizeFilter;
|
||||
module.exports.trusted = mongoose.trusted;
|
||||
module.exports.skipMiddlewareFunction = mongoose.skipMiddlewareFunction;
|
||||
module.exports.overwriteMiddlewareResult = mongoose.overwriteMiddlewareResult;
|
||||
|
||||
// The following properties are not exported using ESM because `setDriver()` can mutate these
|
||||
// module.exports.connection = mongoose.connection;
|
||||
// module.exports.Collection = mongoose.Collection;
|
||||
// module.exports.Connection = mongoose.Connection;
|
12
node_modules/mongoose/lgtm.yml
generated
vendored
Normal file
12
node_modules/mongoose/lgtm.yml
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
path_classifiers:
|
||||
src:
|
||||
- lib
|
||||
types:
|
||||
- types
|
||||
test:
|
||||
- test
|
||||
docs:
|
||||
- docs
|
||||
queries:
|
||||
- exclude: "*"
|
||||
- include: lib
|
1179
node_modules/mongoose/lib/aggregate.js
generated
vendored
Normal file
1179
node_modules/mongoose/lib/aggregate.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
132
node_modules/mongoose/lib/browser.js
generated
vendored
Normal file
132
node_modules/mongoose/lib/browser.js
generated
vendored
Normal file
|
@ -0,0 +1,132 @@
|
|||
/* eslint-env browser */
|
||||
|
||||
'use strict';
|
||||
|
||||
require('./driver').set(require('./drivers/browser'));
|
||||
|
||||
const DocumentProvider = require('./document_provider.js');
|
||||
|
||||
DocumentProvider.setBrowser(true);
|
||||
|
||||
/**
|
||||
* The [MongooseError](#error_MongooseError) constructor.
|
||||
*
|
||||
* @method Error
|
||||
* @api public
|
||||
*/
|
||||
|
||||
exports.Error = require('./error/index');
|
||||
|
||||
/**
|
||||
* The Mongoose [Schema](#schema_Schema) constructor
|
||||
*
|
||||
* #### Example:
|
||||
*
|
||||
* const mongoose = require('mongoose');
|
||||
* const Schema = mongoose.Schema;
|
||||
* const CatSchema = new Schema(..);
|
||||
*
|
||||
* @method Schema
|
||||
* @api public
|
||||
*/
|
||||
|
||||
exports.Schema = require('./schema');
|
||||
|
||||
/**
|
||||
* The various Mongoose Types.
|
||||
*
|
||||
* #### Example:
|
||||
*
|
||||
* const mongoose = require('mongoose');
|
||||
* const array = mongoose.Types.Array;
|
||||
*
|
||||
* #### Types:
|
||||
*
|
||||
* - [Array](/docs/schematypes.html#arrays)
|
||||
* - [Buffer](/docs/schematypes.html#buffers)
|
||||
* - [Embedded](/docs/schematypes.html#schemas)
|
||||
* - [DocumentArray](/docs/api/documentarraypath.html)
|
||||
* - [Decimal128](/docs/api/mongoose.html#mongoose_Mongoose-Decimal128)
|
||||
* - [ObjectId](/docs/schematypes.html#objectids)
|
||||
* - [Map](/docs/schematypes.html#maps)
|
||||
* - [Subdocument](/docs/schematypes.html#schemas)
|
||||
*
|
||||
* Using this exposed access to the `ObjectId` type, we can construct ids on demand.
|
||||
*
|
||||
* const ObjectId = mongoose.Types.ObjectId;
|
||||
* const id1 = new ObjectId;
|
||||
*
|
||||
* @property Types
|
||||
* @api public
|
||||
*/
|
||||
exports.Types = require('./types');
|
||||
|
||||
/**
|
||||
* The Mongoose [VirtualType](#virtualtype_VirtualType) constructor
|
||||
*
|
||||
* @method VirtualType
|
||||
* @api public
|
||||
*/
|
||||
exports.VirtualType = require('./virtualtype');
|
||||
|
||||
/**
|
||||
* The various Mongoose SchemaTypes.
|
||||
*
|
||||
* #### Note:
|
||||
*
|
||||
* _Alias of mongoose.Schema.Types for backwards compatibility._
|
||||
*
|
||||
* @property SchemaTypes
|
||||
* @see Schema.SchemaTypes #schema_Schema-Types
|
||||
* @api public
|
||||
*/
|
||||
|
||||
exports.SchemaType = require('./schematype.js');
|
||||
|
||||
/**
|
||||
* Internal utils
|
||||
*
|
||||
* @property utils
|
||||
* @api private
|
||||
*/
|
||||
|
||||
exports.utils = require('./utils.js');
|
||||
|
||||
/**
|
||||
* The Mongoose browser [Document](/api/document.html) constructor.
|
||||
*
|
||||
* @method Document
|
||||
* @api public
|
||||
*/
|
||||
exports.Document = DocumentProvider();
|
||||
|
||||
/**
|
||||
* Return a new browser model. In the browser, a model is just
|
||||
* a simplified document with a schema - it does **not** have
|
||||
* functions like `findOne()`, etc.
|
||||
*
|
||||
* @method model
|
||||
* @api public
|
||||
* @param {String} name
|
||||
* @param {Schema} schema
|
||||
* @return Class
|
||||
*/
|
||||
exports.model = function(name, schema) {
|
||||
class Model extends exports.Document {
|
||||
constructor(obj, fields) {
|
||||
super(obj, schema, fields);
|
||||
}
|
||||
}
|
||||
Model.modelName = name;
|
||||
|
||||
return Model;
|
||||
};
|
||||
|
||||
/*!
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
window.mongoose = module.exports;
|
||||
window.Buffer = Buffer;
|
||||
}
|
101
node_modules/mongoose/lib/browserDocument.js
generated
vendored
Normal file
101
node_modules/mongoose/lib/browserDocument.js
generated
vendored
Normal file
|
@ -0,0 +1,101 @@
|
|||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const NodeJSDocument = require('./document');
|
||||
const EventEmitter = require('events').EventEmitter;
|
||||
const MongooseError = require('./error/index');
|
||||
const Schema = require('./schema');
|
||||
const ObjectId = require('./types/objectid');
|
||||
const ValidationError = MongooseError.ValidationError;
|
||||
const applyHooks = require('./helpers/model/applyHooks');
|
||||
const isObject = require('./helpers/isObject');
|
||||
|
||||
/**
|
||||
* Document constructor.
|
||||
*
|
||||
* @param {Object} obj the values to set
|
||||
* @param {Object} schema
|
||||
* @param {Object} [fields] optional object containing the fields which were selected in the query returning this document and any populated paths data
|
||||
* @param {Boolean} [skipId] bool, should we auto create an ObjectId _id
|
||||
* @inherits NodeJS EventEmitter https://nodejs.org/api/events.html#class-eventemitter
|
||||
* @event `init`: Emitted on a document after it has was retrieved from the db and fully hydrated by Mongoose.
|
||||
* @event `save`: Emitted when the document is successfully saved
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function Document(obj, schema, fields, skipId, skipInit) {
|
||||
if (!(this instanceof Document)) {
|
||||
return new Document(obj, schema, fields, skipId, skipInit);
|
||||
}
|
||||
|
||||
if (isObject(schema) && !schema.instanceOfSchema) {
|
||||
schema = new Schema(schema);
|
||||
}
|
||||
|
||||
// When creating EmbeddedDocument, it already has the schema and he doesn't need the _id
|
||||
schema = this.schema || schema;
|
||||
|
||||
// Generate ObjectId if it is missing, but it requires a scheme
|
||||
if (!this.schema && schema.options._id) {
|
||||
obj = obj || {};
|
||||
|
||||
if (obj._id === undefined) {
|
||||
obj._id = new ObjectId();
|
||||
}
|
||||
}
|
||||
|
||||
if (!schema) {
|
||||
throw new MongooseError.MissingSchemaError();
|
||||
}
|
||||
|
||||
this.$__setSchema(schema);
|
||||
|
||||
NodeJSDocument.call(this, obj, fields, skipId, skipInit);
|
||||
|
||||
applyHooks(this, schema, { decorateDoc: true });
|
||||
|
||||
// apply methods
|
||||
for (const m in schema.methods) {
|
||||
this[m] = schema.methods[m];
|
||||
}
|
||||
// apply statics
|
||||
for (const s in schema.statics) {
|
||||
this[s] = schema.statics[s];
|
||||
}
|
||||
}
|
||||
|
||||
/*!
|
||||
* Inherit from the NodeJS document
|
||||
*/
|
||||
|
||||
Document.prototype = Object.create(NodeJSDocument.prototype);
|
||||
Document.prototype.constructor = Document;
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
Document.events = new EventEmitter();
|
||||
|
||||
/*!
|
||||
* Browser doc exposes the event emitter API
|
||||
*/
|
||||
|
||||
Document.$emitter = new EventEmitter();
|
||||
|
||||
['on', 'once', 'emit', 'listeners', 'removeListener', 'setMaxListeners',
|
||||
'removeAllListeners', 'addListener'].forEach(function(emitterFn) {
|
||||
Document[emitterFn] = function() {
|
||||
return Document.$emitter[emitterFn].apply(Document.$emitter, arguments);
|
||||
};
|
||||
});
|
||||
|
||||
/*!
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
Document.ValidationError = ValidationError;
|
||||
module.exports = exports = Document;
|
402
node_modules/mongoose/lib/cast.js
generated
vendored
Normal file
402
node_modules/mongoose/lib/cast.js
generated
vendored
Normal file
|
@ -0,0 +1,402 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const CastError = require('./error/cast');
|
||||
const StrictModeError = require('./error/strict');
|
||||
const Types = require('./schema/index');
|
||||
const cast$expr = require('./helpers/query/cast$expr');
|
||||
const castTextSearch = require('./schema/operators/text');
|
||||
const get = require('./helpers/get');
|
||||
const getConstructorName = require('./helpers/getConstructorName');
|
||||
const getSchemaDiscriminatorByValue = require('./helpers/discriminator/getSchemaDiscriminatorByValue');
|
||||
const isOperator = require('./helpers/query/isOperator');
|
||||
const util = require('util');
|
||||
const isObject = require('./helpers/isObject');
|
||||
const isMongooseObject = require('./helpers/isMongooseObject');
|
||||
|
||||
const ALLOWED_GEOWITHIN_GEOJSON_TYPES = ['Polygon', 'MultiPolygon'];
|
||||
|
||||
/**
|
||||
* Handles internal casting for query filters.
|
||||
*
|
||||
* @param {Schema} schema
|
||||
* @param {Object} obj Object to cast
|
||||
* @param {Object} [options] the query options
|
||||
* @param {Boolean|"throw"} [options.strict] Wheter to enable all strict options
|
||||
* @param {Boolean|"throw"} [options.strictQuery] Enable strict Queries
|
||||
* @param {Boolean} [options.upsert]
|
||||
* @param {Query} [context] passed to setters
|
||||
* @api private
|
||||
*/
|
||||
module.exports = function cast(schema, obj, options, context) {
|
||||
if (Array.isArray(obj)) {
|
||||
throw new Error('Query filter must be an object, got an array ', util.inspect(obj));
|
||||
}
|
||||
|
||||
if (obj == null) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (schema != null && schema.discriminators != null && obj[schema.options.discriminatorKey] != null) {
|
||||
schema = getSchemaDiscriminatorByValue(schema, obj[schema.options.discriminatorKey]) || schema;
|
||||
}
|
||||
|
||||
const paths = Object.keys(obj);
|
||||
let i = paths.length;
|
||||
let _keys;
|
||||
let any$conditionals;
|
||||
let schematype;
|
||||
let nested;
|
||||
let path;
|
||||
let type;
|
||||
let val;
|
||||
|
||||
options = options || {};
|
||||
|
||||
while (i--) {
|
||||
path = paths[i];
|
||||
val = obj[path];
|
||||
|
||||
if (path === '$or' || path === '$nor' || path === '$and') {
|
||||
if (!Array.isArray(val)) {
|
||||
throw new CastError('Array', val, path);
|
||||
}
|
||||
for (let k = 0; k < val.length; ++k) {
|
||||
if (val[k] == null || typeof val[k] !== 'object') {
|
||||
throw new CastError('Object', val[k], path + '.' + k);
|
||||
}
|
||||
val[k] = cast(schema, val[k], options, context);
|
||||
}
|
||||
} else if (path === '$where') {
|
||||
type = typeof val;
|
||||
|
||||
if (type !== 'string' && type !== 'function') {
|
||||
throw new Error('Must have a string or function for $where');
|
||||
}
|
||||
|
||||
if (type === 'function') {
|
||||
obj[path] = val.toString();
|
||||
}
|
||||
|
||||
continue;
|
||||
} else if (path === '$expr') {
|
||||
val = cast$expr(val, schema);
|
||||
continue;
|
||||
} else if (path === '$elemMatch') {
|
||||
val = cast(schema, val, options, context);
|
||||
} else if (path === '$text') {
|
||||
val = castTextSearch(val, path);
|
||||
} else {
|
||||
if (!schema) {
|
||||
// no casting for Mixed types
|
||||
continue;
|
||||
}
|
||||
|
||||
schematype = schema.path(path);
|
||||
|
||||
// Check for embedded discriminator paths
|
||||
if (!schematype) {
|
||||
const split = path.split('.');
|
||||
let j = split.length;
|
||||
while (j--) {
|
||||
const pathFirstHalf = split.slice(0, j).join('.');
|
||||
const pathLastHalf = split.slice(j).join('.');
|
||||
const _schematype = schema.path(pathFirstHalf);
|
||||
const discriminatorKey = _schematype &&
|
||||
_schematype.schema &&
|
||||
_schematype.schema.options &&
|
||||
_schematype.schema.options.discriminatorKey;
|
||||
|
||||
// gh-6027: if we haven't found the schematype but this path is
|
||||
// underneath an embedded discriminator and the embedded discriminator
|
||||
// key is in the query, use the embedded discriminator schema
|
||||
if (_schematype != null &&
|
||||
(_schematype.schema && _schematype.schema.discriminators) != null &&
|
||||
discriminatorKey != null &&
|
||||
pathLastHalf !== discriminatorKey) {
|
||||
const discriminatorVal = get(obj, pathFirstHalf + '.' + discriminatorKey);
|
||||
if (discriminatorVal != null) {
|
||||
schematype = _schematype.schema.discriminators[discriminatorVal].
|
||||
path(pathLastHalf);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!schematype) {
|
||||
// Handle potential embedded array queries
|
||||
const split = path.split('.');
|
||||
let j = split.length;
|
||||
let pathFirstHalf;
|
||||
let pathLastHalf;
|
||||
let remainingConds;
|
||||
|
||||
// Find the part of the var path that is a path of the Schema
|
||||
while (j--) {
|
||||
pathFirstHalf = split.slice(0, j).join('.');
|
||||
schematype = schema.path(pathFirstHalf);
|
||||
if (schematype) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If a substring of the input path resolves to an actual real path...
|
||||
if (schematype) {
|
||||
// Apply the casting; similar code for $elemMatch in schema/array.js
|
||||
if (schematype.caster && schematype.caster.schema) {
|
||||
remainingConds = {};
|
||||
pathLastHalf = split.slice(j).join('.');
|
||||
remainingConds[pathLastHalf] = val;
|
||||
|
||||
const ret = cast(schematype.caster.schema, remainingConds, options, context)[pathLastHalf];
|
||||
if (ret === void 0) {
|
||||
delete obj[path];
|
||||
} else {
|
||||
obj[path] = ret;
|
||||
}
|
||||
} else {
|
||||
obj[path] = val;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isObject(val)) {
|
||||
// handle geo schemas that use object notation
|
||||
// { loc: { long: Number, lat: Number }
|
||||
|
||||
let geo = '';
|
||||
if (val.$near) {
|
||||
geo = '$near';
|
||||
} else if (val.$nearSphere) {
|
||||
geo = '$nearSphere';
|
||||
} else if (val.$within) {
|
||||
geo = '$within';
|
||||
} else if (val.$geoIntersects) {
|
||||
geo = '$geoIntersects';
|
||||
} else if (val.$geoWithin) {
|
||||
geo = '$geoWithin';
|
||||
}
|
||||
|
||||
if (geo) {
|
||||
const numbertype = new Types.Number('__QueryCasting__');
|
||||
let value = val[geo];
|
||||
|
||||
if (val.$maxDistance != null) {
|
||||
val.$maxDistance = numbertype.castForQuery(
|
||||
null,
|
||||
val.$maxDistance,
|
||||
context
|
||||
);
|
||||
}
|
||||
if (val.$minDistance != null) {
|
||||
val.$minDistance = numbertype.castForQuery(
|
||||
null,
|
||||
val.$minDistance,
|
||||
context
|
||||
);
|
||||
}
|
||||
|
||||
if (geo === '$within') {
|
||||
const withinType = value.$center
|
||||
|| value.$centerSphere
|
||||
|| value.$box
|
||||
|| value.$polygon;
|
||||
|
||||
if (!withinType) {
|
||||
throw new Error('Bad $within parameter: ' + JSON.stringify(val));
|
||||
}
|
||||
|
||||
value = withinType;
|
||||
} else if (geo === '$near' &&
|
||||
typeof value.type === 'string' && Array.isArray(value.coordinates)) {
|
||||
// geojson; cast the coordinates
|
||||
value = value.coordinates;
|
||||
} else if ((geo === '$near' || geo === '$nearSphere' || geo === '$geoIntersects') &&
|
||||
value.$geometry && typeof value.$geometry.type === 'string' &&
|
||||
Array.isArray(value.$geometry.coordinates)) {
|
||||
if (value.$maxDistance != null) {
|
||||
value.$maxDistance = numbertype.castForQuery(
|
||||
null,
|
||||
value.$maxDistance,
|
||||
context
|
||||
);
|
||||
}
|
||||
if (value.$minDistance != null) {
|
||||
value.$minDistance = numbertype.castForQuery(
|
||||
null,
|
||||
value.$minDistance,
|
||||
context
|
||||
);
|
||||
}
|
||||
if (isMongooseObject(value.$geometry)) {
|
||||
value.$geometry = value.$geometry.toObject({
|
||||
transform: false,
|
||||
virtuals: false
|
||||
});
|
||||
}
|
||||
value = value.$geometry.coordinates;
|
||||
} else if (geo === '$geoWithin') {
|
||||
if (value.$geometry) {
|
||||
if (isMongooseObject(value.$geometry)) {
|
||||
value.$geometry = value.$geometry.toObject({ virtuals: false });
|
||||
}
|
||||
const geoWithinType = value.$geometry.type;
|
||||
if (ALLOWED_GEOWITHIN_GEOJSON_TYPES.indexOf(geoWithinType) === -1) {
|
||||
throw new Error('Invalid geoJSON type for $geoWithin "' +
|
||||
geoWithinType + '", must be "Polygon" or "MultiPolygon"');
|
||||
}
|
||||
value = value.$geometry.coordinates;
|
||||
} else {
|
||||
value = value.$box || value.$polygon || value.$center ||
|
||||
value.$centerSphere;
|
||||
if (isMongooseObject(value)) {
|
||||
value = value.toObject({ virtuals: false });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_cast(value, numbertype, context);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (schema.nested[path]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const strict = 'strict' in options ? options.strict : schema.options.strict;
|
||||
const strictQuery = getStrictQuery(options, schema._userProvidedOptions, schema.options, context);
|
||||
if (options.upsert && strict) {
|
||||
if (strict === 'throw') {
|
||||
throw new StrictModeError(path);
|
||||
}
|
||||
throw new StrictModeError(path, 'Path "' + path + '" is not in ' +
|
||||
'schema, strict mode is `true`, and upsert is `true`.');
|
||||
} if (strictQuery === 'throw') {
|
||||
throw new StrictModeError(path, 'Path "' + path + '" is not in ' +
|
||||
'schema and strictQuery is \'throw\'.');
|
||||
} else if (strictQuery) {
|
||||
delete obj[path];
|
||||
}
|
||||
} else if (val == null) {
|
||||
continue;
|
||||
} else if (getConstructorName(val) === 'Object') {
|
||||
any$conditionals = Object.keys(val).some(isOperator);
|
||||
|
||||
if (!any$conditionals) {
|
||||
obj[path] = schematype.castForQuery(
|
||||
null,
|
||||
val,
|
||||
context
|
||||
);
|
||||
} else {
|
||||
const ks = Object.keys(val);
|
||||
let $cond;
|
||||
|
||||
let k = ks.length;
|
||||
|
||||
while (k--) {
|
||||
$cond = ks[k];
|
||||
nested = val[$cond];
|
||||
|
||||
if ($cond === '$not') {
|
||||
if (nested && schematype) {
|
||||
_keys = Object.keys(nested);
|
||||
if (_keys.length && isOperator(_keys[0])) {
|
||||
for (const key in nested) {
|
||||
nested[key] = schematype.castForQuery(
|
||||
key,
|
||||
nested[key],
|
||||
context
|
||||
);
|
||||
}
|
||||
} else {
|
||||
val[$cond] = schematype.castForQuery(
|
||||
$cond,
|
||||
nested,
|
||||
context
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
val[$cond] = schematype.castForQuery(
|
||||
$cond,
|
||||
nested,
|
||||
context
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (Array.isArray(val) && ['Buffer', 'Array'].indexOf(schematype.instance) === -1) {
|
||||
const casted = [];
|
||||
const valuesArray = val;
|
||||
|
||||
for (const _val of valuesArray) {
|
||||
casted.push(schematype.castForQuery(
|
||||
null,
|
||||
_val,
|
||||
context
|
||||
));
|
||||
}
|
||||
|
||||
obj[path] = { $in: casted };
|
||||
} else {
|
||||
obj[path] = schematype.castForQuery(
|
||||
null,
|
||||
val,
|
||||
context
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
};
|
||||
|
||||
function _cast(val, numbertype, context) {
|
||||
if (Array.isArray(val)) {
|
||||
val.forEach(function(item, i) {
|
||||
if (Array.isArray(item) || isObject(item)) {
|
||||
return _cast(item, numbertype, context);
|
||||
}
|
||||
val[i] = numbertype.castForQuery(null, item, context);
|
||||
});
|
||||
} else {
|
||||
const nearKeys = Object.keys(val);
|
||||
let nearLen = nearKeys.length;
|
||||
while (nearLen--) {
|
||||
const nkey = nearKeys[nearLen];
|
||||
const item = val[nkey];
|
||||
if (Array.isArray(item) || isObject(item)) {
|
||||
_cast(item, numbertype, context);
|
||||
val[nkey] = item;
|
||||
} else {
|
||||
val[nkey] = numbertype.castForQuery({ val: item, context: context });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getStrictQuery(queryOptions, schemaUserProvidedOptions, schemaOptions, context) {
|
||||
if ('strictQuery' in queryOptions) {
|
||||
return queryOptions.strictQuery;
|
||||
}
|
||||
if ('strictQuery' in schemaUserProvidedOptions) {
|
||||
return schemaUserProvidedOptions.strictQuery;
|
||||
}
|
||||
const mongooseOptions = context &&
|
||||
context.mongooseCollection &&
|
||||
context.mongooseCollection.conn &&
|
||||
context.mongooseCollection.conn.base &&
|
||||
context.mongooseCollection.conn.base.options;
|
||||
if (mongooseOptions) {
|
||||
if ('strictQuery' in mongooseOptions) {
|
||||
return mongooseOptions.strictQuery;
|
||||
}
|
||||
}
|
||||
return schemaOptions.strictQuery;
|
||||
}
|
32
node_modules/mongoose/lib/cast/boolean.js
generated
vendored
Normal file
32
node_modules/mongoose/lib/cast/boolean.js
generated
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
'use strict';
|
||||
|
||||
const CastError = require('../error/cast');
|
||||
|
||||
/**
|
||||
* Given a value, cast it to a boolean, or throw a `CastError` if the value
|
||||
* cannot be casted. `null` and `undefined` are considered valid.
|
||||
*
|
||||
* @param {Any} value
|
||||
* @param {String} [path] optional the path to set on the CastError
|
||||
* @return {Boolean|null|undefined}
|
||||
* @throws {CastError} if `value` is not one of the allowed values
|
||||
* @api private
|
||||
*/
|
||||
|
||||
module.exports = function castBoolean(value, path) {
|
||||
if (module.exports.convertToTrue.has(value)) {
|
||||
return true;
|
||||
}
|
||||
if (module.exports.convertToFalse.has(value)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
throw new CastError('boolean', value, path);
|
||||
};
|
||||
|
||||
module.exports.convertToTrue = new Set([true, 'true', 1, '1', 'yes']);
|
||||
module.exports.convertToFalse = new Set([false, 'false', 0, '0', 'no']);
|
41
node_modules/mongoose/lib/cast/date.js
generated
vendored
Normal file
41
node_modules/mongoose/lib/cast/date.js
generated
vendored
Normal file
|
@ -0,0 +1,41 @@
|
|||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
|
||||
module.exports = function castDate(value) {
|
||||
// Support empty string because of empty form values. Originally introduced
|
||||
// in https://github.com/Automattic/mongoose/commit/efc72a1898fc3c33a319d915b8c5463a22938dfe
|
||||
if (value == null || value === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (value instanceof Date) {
|
||||
assert.ok(!isNaN(value.valueOf()));
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
let date;
|
||||
|
||||
assert.ok(typeof value !== 'boolean');
|
||||
|
||||
if (value instanceof Number || typeof value === 'number') {
|
||||
date = new Date(value);
|
||||
} else if (typeof value === 'string' && !isNaN(Number(value)) && (Number(value) >= 275761 || Number(value) < -271820)) {
|
||||
// string representation of milliseconds take this path
|
||||
date = new Date(Number(value));
|
||||
} else if (typeof value.valueOf === 'function') {
|
||||
// support for moment.js. This is also the path strings will take because
|
||||
// strings have a `valueOf()`
|
||||
date = new Date(value.valueOf());
|
||||
} else {
|
||||
// fallback
|
||||
date = new Date(value);
|
||||
}
|
||||
|
||||
if (!isNaN(date.valueOf())) {
|
||||
return date;
|
||||
}
|
||||
|
||||
assert.ok(false);
|
||||
};
|
39
node_modules/mongoose/lib/cast/decimal128.js
generated
vendored
Normal file
39
node_modules/mongoose/lib/cast/decimal128.js
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
'use strict';
|
||||
|
||||
const Decimal128Type = require('../types/decimal128');
|
||||
const assert = require('assert');
|
||||
|
||||
module.exports = function castDecimal128(value) {
|
||||
if (value == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (typeof value === 'object' && typeof value.$numberDecimal === 'string') {
|
||||
return Decimal128Type.fromString(value.$numberDecimal);
|
||||
}
|
||||
|
||||
if (value instanceof Decimal128Type) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
return Decimal128Type.fromString(value);
|
||||
}
|
||||
|
||||
if (typeof Buffer === 'function' && Buffer.isBuffer(value)) {
|
||||
return new Decimal128Type(value);
|
||||
}
|
||||
if (typeof Uint8Array === 'function' && value instanceof Uint8Array) {
|
||||
return new Decimal128Type(value);
|
||||
}
|
||||
|
||||
if (typeof value === 'number') {
|
||||
return Decimal128Type.fromString(String(value));
|
||||
}
|
||||
|
||||
if (typeof value.valueOf === 'function' && typeof value.valueOf() === 'string') {
|
||||
return Decimal128Type.fromString(value.valueOf());
|
||||
}
|
||||
|
||||
assert.ok(false);
|
||||
};
|
42
node_modules/mongoose/lib/cast/number.js
generated
vendored
Normal file
42
node_modules/mongoose/lib/cast/number.js
generated
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
|
||||
/**
|
||||
* Given a value, cast it to a number, or throw an `Error` if the value
|
||||
* cannot be casted. `null` and `undefined` are considered valid.
|
||||
*
|
||||
* @param {Any} value
|
||||
* @return {Number}
|
||||
* @throws {Error} if `value` is not one of the allowed values
|
||||
* @api private
|
||||
*/
|
||||
|
||||
module.exports = function castNumber(val) {
|
||||
if (val == null) {
|
||||
return val;
|
||||
}
|
||||
if (val === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (typeof val === 'string' || typeof val === 'boolean') {
|
||||
val = Number(val);
|
||||
}
|
||||
|
||||
assert.ok(!isNaN(val));
|
||||
if (val instanceof Number) {
|
||||
return val.valueOf();
|
||||
}
|
||||
if (typeof val === 'number') {
|
||||
return val;
|
||||
}
|
||||
if (!Array.isArray(val) && typeof val.valueOf === 'function') {
|
||||
return Number(val.valueOf());
|
||||
}
|
||||
if (val.toString && !Array.isArray(val) && val.toString() == Number(val)) {
|
||||
return Number(val);
|
||||
}
|
||||
|
||||
assert.ok(false);
|
||||
};
|
29
node_modules/mongoose/lib/cast/objectid.js
generated
vendored
Normal file
29
node_modules/mongoose/lib/cast/objectid.js
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
'use strict';
|
||||
|
||||
const isBsonType = require('../helpers/isBsonType');
|
||||
const ObjectId = require('../types/objectid');
|
||||
|
||||
module.exports = function castObjectId(value) {
|
||||
if (value == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (isBsonType(value, 'ObjectId')) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (value._id) {
|
||||
if (isBsonType(value._id, 'ObjectId')) {
|
||||
return value._id;
|
||||
}
|
||||
if (value._id.toString instanceof Function) {
|
||||
return new ObjectId(value._id.toString());
|
||||
}
|
||||
}
|
||||
|
||||
if (value.toString instanceof Function) {
|
||||
return new ObjectId(value.toString());
|
||||
}
|
||||
|
||||
return new ObjectId(value);
|
||||
};
|
37
node_modules/mongoose/lib/cast/string.js
generated
vendored
Normal file
37
node_modules/mongoose/lib/cast/string.js
generated
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
'use strict';
|
||||
|
||||
const CastError = require('../error/cast');
|
||||
|
||||
/**
|
||||
* Given a value, cast it to a string, or throw a `CastError` if the value
|
||||
* cannot be casted. `null` and `undefined` are considered valid.
|
||||
*
|
||||
* @param {Any} value
|
||||
* @param {String} [path] optional the path to set on the CastError
|
||||
* @return {string|null|undefined}
|
||||
* @throws {CastError}
|
||||
* @api private
|
||||
*/
|
||||
|
||||
module.exports = function castString(value, path) {
|
||||
// If null or undefined
|
||||
if (value == null) {
|
||||
return value;
|
||||
}
|
||||
|
||||
// handle documents being passed
|
||||
if (value._id && typeof value._id === 'string') {
|
||||
return value._id;
|
||||
}
|
||||
|
||||
// Re: gh-647 and gh-3030, we're ok with casting using `toString()`
|
||||
// **unless** its the default Object.toString, because "[object Object]"
|
||||
// doesn't really qualify as useful data
|
||||
if (value.toString &&
|
||||
value.toString !== Object.prototype.toString &&
|
||||
!Array.isArray(value)) {
|
||||
return value.toString();
|
||||
}
|
||||
|
||||
throw new CastError('string', value, path);
|
||||
};
|
327
node_modules/mongoose/lib/collection.js
generated
vendored
Normal file
327
node_modules/mongoose/lib/collection.js
generated
vendored
Normal file
|
@ -0,0 +1,327 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const EventEmitter = require('events').EventEmitter;
|
||||
const STATES = require('./connectionstate');
|
||||
const immediate = require('./helpers/immediate');
|
||||
|
||||
/**
|
||||
* Abstract Collection constructor
|
||||
*
|
||||
* This is the base class that drivers inherit from and implement.
|
||||
*
|
||||
* @param {String} name name of the collection
|
||||
* @param {Connection} conn A MongooseConnection instance
|
||||
* @param {Object} [opts] optional collection options
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function Collection(name, conn, opts) {
|
||||
if (opts === void 0) {
|
||||
opts = {};
|
||||
}
|
||||
|
||||
this.opts = opts;
|
||||
this.name = name;
|
||||
this.collectionName = name;
|
||||
this.conn = conn;
|
||||
this.queue = [];
|
||||
this.buffer = true;
|
||||
this.emitter = new EventEmitter();
|
||||
|
||||
if (STATES.connected === this.conn.readyState) {
|
||||
this.onOpen();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The collection name
|
||||
*
|
||||
* @api public
|
||||
* @property name
|
||||
*/
|
||||
|
||||
Collection.prototype.name;
|
||||
|
||||
/**
|
||||
* The collection name
|
||||
*
|
||||
* @api public
|
||||
* @property collectionName
|
||||
*/
|
||||
|
||||
Collection.prototype.collectionName;
|
||||
|
||||
/**
|
||||
* The Connection instance
|
||||
*
|
||||
* @api public
|
||||
* @property conn
|
||||
*/
|
||||
|
||||
Collection.prototype.conn;
|
||||
|
||||
/**
|
||||
* Called when the database connects
|
||||
*
|
||||
* @api private
|
||||
*/
|
||||
|
||||
Collection.prototype.onOpen = function() {
|
||||
this.buffer = false;
|
||||
immediate(() => this.doQueue());
|
||||
};
|
||||
|
||||
/**
|
||||
* Called when the database disconnects
|
||||
*
|
||||
* @api private
|
||||
*/
|
||||
|
||||
Collection.prototype.onClose = function() {};
|
||||
|
||||
/**
|
||||
* Queues a method for later execution when its
|
||||
* database connection opens.
|
||||
*
|
||||
* @param {String} name name of the method to queue
|
||||
* @param {Array} args arguments to pass to the method when executed
|
||||
* @api private
|
||||
*/
|
||||
|
||||
Collection.prototype.addQueue = function(name, args) {
|
||||
this.queue.push([name, args]);
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Removes a queued method
|
||||
*
|
||||
* @param {String} name name of the method to queue
|
||||
* @param {Array} args arguments to pass to the method when executed
|
||||
* @api private
|
||||
*/
|
||||
|
||||
Collection.prototype.removeQueue = function(name, args) {
|
||||
const index = this.queue.findIndex(v => v[0] === name && v[1] === args);
|
||||
if (index === -1) {
|
||||
return false;
|
||||
}
|
||||
this.queue.splice(index, 1);
|
||||
return true;
|
||||
};
|
||||
|
||||
/**
|
||||
* Executes all queued methods and clears the queue.
|
||||
*
|
||||
* @api private
|
||||
*/
|
||||
|
||||
Collection.prototype.doQueue = function() {
|
||||
for (const method of this.queue) {
|
||||
if (typeof method[0] === 'function') {
|
||||
method[0].apply(this, method[1]);
|
||||
} else {
|
||||
this[method[0]].apply(this, method[1]);
|
||||
}
|
||||
}
|
||||
this.queue = [];
|
||||
const _this = this;
|
||||
immediate(function() {
|
||||
_this.emitter.emit('queue');
|
||||
});
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.ensureIndex = function() {
|
||||
throw new Error('Collection#ensureIndex unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.createIndex = function() {
|
||||
throw new Error('Collection#createIndex unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.findAndModify = function() {
|
||||
throw new Error('Collection#findAndModify unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.findOneAndUpdate = function() {
|
||||
throw new Error('Collection#findOneAndUpdate unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.findOneAndDelete = function() {
|
||||
throw new Error('Collection#findOneAndDelete unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.findOneAndReplace = function() {
|
||||
throw new Error('Collection#findOneAndReplace unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.findOne = function() {
|
||||
throw new Error('Collection#findOne unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.find = function() {
|
||||
throw new Error('Collection#find unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.insert = function() {
|
||||
throw new Error('Collection#insert unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.insertOne = function() {
|
||||
throw new Error('Collection#insertOne unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.insertMany = function() {
|
||||
throw new Error('Collection#insertMany unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.save = function() {
|
||||
throw new Error('Collection#save unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.updateOne = function() {
|
||||
throw new Error('Collection#updateOne unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.updateMany = function() {
|
||||
throw new Error('Collection#updateMany unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.deleteOne = function() {
|
||||
throw new Error('Collection#deleteOne unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.deleteMany = function() {
|
||||
throw new Error('Collection#deleteMany unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.getIndexes = function() {
|
||||
throw new Error('Collection#getIndexes unimplemented by driver');
|
||||
};
|
||||
|
||||
/**
|
||||
* Abstract method that drivers must implement.
|
||||
*/
|
||||
|
||||
Collection.prototype.watch = function() {
|
||||
throw new Error('Collection#watch unimplemented by driver');
|
||||
};
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
Collection.prototype._shouldBufferCommands = function _shouldBufferCommands() {
|
||||
const opts = this.opts;
|
||||
|
||||
if (opts.bufferCommands != null) {
|
||||
return opts.bufferCommands;
|
||||
}
|
||||
if (opts && opts.schemaUserProvidedOptions != null && opts.schemaUserProvidedOptions.bufferCommands != null) {
|
||||
return opts.schemaUserProvidedOptions.bufferCommands;
|
||||
}
|
||||
|
||||
return this.conn._shouldBufferCommands();
|
||||
};
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
Collection.prototype._getBufferTimeoutMS = function _getBufferTimeoutMS() {
|
||||
const conn = this.conn;
|
||||
const opts = this.opts;
|
||||
|
||||
if (opts.bufferTimeoutMS != null) {
|
||||
return opts.bufferTimeoutMS;
|
||||
}
|
||||
if (opts && opts.schemaUserProvidedOptions != null && opts.schemaUserProvidedOptions.bufferTimeoutMS != null) {
|
||||
return opts.schemaUserProvidedOptions.bufferTimeoutMS;
|
||||
}
|
||||
if (conn.config.bufferTimeoutMS != null) {
|
||||
return conn.config.bufferTimeoutMS;
|
||||
}
|
||||
if (conn.base != null && conn.base.get('bufferTimeoutMS') != null) {
|
||||
return conn.base.get('bufferTimeoutMS');
|
||||
}
|
||||
return 10000;
|
||||
};
|
||||
|
||||
/*!
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = Collection;
|
1579
node_modules/mongoose/lib/connection.js
generated
vendored
Normal file
1579
node_modules/mongoose/lib/connection.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
26
node_modules/mongoose/lib/connectionstate.js
generated
vendored
Normal file
26
node_modules/mongoose/lib/connectionstate.js
generated
vendored
Normal file
|
@ -0,0 +1,26 @@
|
|||
|
||||
/*!
|
||||
* Connection states
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const STATES = module.exports = exports = Object.create(null);
|
||||
|
||||
const disconnected = 'disconnected';
|
||||
const connected = 'connected';
|
||||
const connecting = 'connecting';
|
||||
const disconnecting = 'disconnecting';
|
||||
const uninitialized = 'uninitialized';
|
||||
|
||||
STATES[0] = disconnected;
|
||||
STATES[1] = connected;
|
||||
STATES[2] = connecting;
|
||||
STATES[3] = disconnecting;
|
||||
STATES[99] = uninitialized;
|
||||
|
||||
STATES[disconnected] = 0;
|
||||
STATES[connected] = 1;
|
||||
STATES[connecting] = 2;
|
||||
STATES[disconnecting] = 3;
|
||||
STATES[uninitialized] = 99;
|
386
node_modules/mongoose/lib/cursor/AggregationCursor.js
generated
vendored
Normal file
386
node_modules/mongoose/lib/cursor/AggregationCursor.js
generated
vendored
Normal file
|
@ -0,0 +1,386 @@
|
|||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('../error/mongooseError');
|
||||
const Readable = require('stream').Readable;
|
||||
const eachAsync = require('../helpers/cursor/eachAsync');
|
||||
const immediate = require('../helpers/immediate');
|
||||
const util = require('util');
|
||||
|
||||
/**
|
||||
* An AggregationCursor is a concurrency primitive for processing aggregation
|
||||
* results one document at a time. It is analogous to QueryCursor.
|
||||
*
|
||||
* An AggregationCursor fulfills the Node.js streams3 API,
|
||||
* in addition to several other mechanisms for loading documents from MongoDB
|
||||
* one at a time.
|
||||
*
|
||||
* Creating an AggregationCursor executes the model's pre aggregate hooks,
|
||||
* but **not** the model's post aggregate hooks.
|
||||
*
|
||||
* Unless you're an advanced user, do **not** instantiate this class directly.
|
||||
* Use [`Aggregate#cursor()`](/docs/api/aggregate.html#aggregate_Aggregate-cursor) instead.
|
||||
*
|
||||
* @param {Aggregate} agg
|
||||
* @inherits Readable https://nodejs.org/api/stream.html#class-streamreadable
|
||||
* @event `cursor`: Emitted when the cursor is created
|
||||
* @event `error`: Emitted when an error occurred
|
||||
* @event `data`: Emitted when the stream is flowing and the next doc is ready
|
||||
* @event `end`: Emitted when the stream is exhausted
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function AggregationCursor(agg) {
|
||||
// set autoDestroy=true because on node 12 it's by default false
|
||||
// gh-10902 need autoDestroy to destroy correctly and emit 'close' event
|
||||
Readable.call(this, { autoDestroy: true, objectMode: true });
|
||||
|
||||
this.cursor = null;
|
||||
this.agg = agg;
|
||||
this._transforms = [];
|
||||
const model = agg._model;
|
||||
delete agg.options.cursor.useMongooseAggCursor;
|
||||
this._mongooseOptions = {};
|
||||
|
||||
_init(model, this, agg);
|
||||
}
|
||||
|
||||
util.inherits(AggregationCursor, Readable);
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function _init(model, c, agg) {
|
||||
if (!model.collection.buffer) {
|
||||
model.hooks.execPre('aggregate', agg, function() {
|
||||
c.cursor = model.collection.aggregate(agg._pipeline, agg.options || {});
|
||||
c.emit('cursor', c.cursor);
|
||||
});
|
||||
} else {
|
||||
model.collection.emitter.once('queue', function() {
|
||||
model.hooks.execPre('aggregate', agg, function() {
|
||||
c.cursor = model.collection.aggregate(agg._pipeline, agg.options || {});
|
||||
c.emit('cursor', c.cursor);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Necessary to satisfy the Readable API
|
||||
* @method _read
|
||||
* @memberOf AggregationCursor
|
||||
* @instance
|
||||
* @api private
|
||||
*/
|
||||
|
||||
AggregationCursor.prototype._read = function() {
|
||||
const _this = this;
|
||||
_next(this, function(error, doc) {
|
||||
if (error) {
|
||||
return _this.emit('error', error);
|
||||
}
|
||||
if (!doc) {
|
||||
_this.push(null);
|
||||
_this.cursor.close(function(error) {
|
||||
if (error) {
|
||||
return _this.emit('error', error);
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
_this.push(doc);
|
||||
});
|
||||
};
|
||||
|
||||
if (Symbol.asyncIterator != null) {
|
||||
const msg = 'Mongoose does not support using async iterators with an ' +
|
||||
'existing aggregation cursor. See https://bit.ly/mongoose-async-iterate-aggregation';
|
||||
|
||||
AggregationCursor.prototype[Symbol.asyncIterator] = function() {
|
||||
throw new MongooseError(msg);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a transform function which subsequently maps documents retrieved
|
||||
* via the streams interface or `.next()`
|
||||
*
|
||||
* #### Example:
|
||||
*
|
||||
* // Map documents returned by `data` events
|
||||
* Thing.
|
||||
* find({ name: /^hello/ }).
|
||||
* cursor().
|
||||
* map(function (doc) {
|
||||
* doc.foo = "bar";
|
||||
* return doc;
|
||||
* })
|
||||
* on('data', function(doc) { console.log(doc.foo); });
|
||||
*
|
||||
* // Or map documents returned by `.next()`
|
||||
* const cursor = Thing.find({ name: /^hello/ }).
|
||||
* cursor().
|
||||
* map(function (doc) {
|
||||
* doc.foo = "bar";
|
||||
* return doc;
|
||||
* });
|
||||
* cursor.next(function(error, doc) {
|
||||
* console.log(doc.foo);
|
||||
* });
|
||||
*
|
||||
* @param {Function} fn
|
||||
* @return {AggregationCursor}
|
||||
* @memberOf AggregationCursor
|
||||
* @api public
|
||||
* @method map
|
||||
*/
|
||||
|
||||
Object.defineProperty(AggregationCursor.prototype, 'map', {
|
||||
value: function(fn) {
|
||||
this._transforms.push(fn);
|
||||
return this;
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true
|
||||
});
|
||||
|
||||
/**
|
||||
* Marks this cursor as errored
|
||||
* @method _markError
|
||||
* @instance
|
||||
* @memberOf AggregationCursor
|
||||
* @api private
|
||||
*/
|
||||
|
||||
AggregationCursor.prototype._markError = function(error) {
|
||||
this._error = error;
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Marks this cursor as closed. Will stop streaming and subsequent calls to
|
||||
* `next()` will error.
|
||||
*
|
||||
* @param {Function} callback
|
||||
* @return {Promise}
|
||||
* @api public
|
||||
* @method close
|
||||
* @emits close
|
||||
* @see AggregationCursor.close https://mongodb.github.io/node-mongodb-native/4.9/classes/AggregationCursor.html#close
|
||||
*/
|
||||
|
||||
AggregationCursor.prototype.close = async function close() {
|
||||
if (typeof arguments[0] === 'function') {
|
||||
throw new MongooseError('AggregationCursor.prototype.close() no longer accepts a callback');
|
||||
}
|
||||
try {
|
||||
await this.cursor.close();
|
||||
} catch (error) {
|
||||
this.listeners('error').length > 0 && this.emit('error', error);
|
||||
throw error;
|
||||
}
|
||||
this.emit('close');
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the next document from this cursor. Will return `null` when there are
|
||||
* no documents left.
|
||||
*
|
||||
* @return {Promise}
|
||||
* @api public
|
||||
* @method next
|
||||
*/
|
||||
|
||||
AggregationCursor.prototype.next = async function next() {
|
||||
if (typeof arguments[0] === 'function') {
|
||||
throw new MongooseError('AggregationCursor.prototype.next() no longer accepts a callback');
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
_next(this, (err, res) => {
|
||||
if (err != null) {
|
||||
return reject(err);
|
||||
}
|
||||
resolve(res);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Execute `fn` for every document in the cursor. If `fn` returns a promise,
|
||||
* will wait for the promise to resolve before iterating on to the next one.
|
||||
* Returns a promise that resolves when done.
|
||||
*
|
||||
* @param {Function} fn
|
||||
* @param {Object} [options]
|
||||
* @param {Number} [options.parallel] the number of promises to execute in parallel. Defaults to 1.
|
||||
* @param {Function} [callback] executed when all docs have been processed
|
||||
* @return {Promise}
|
||||
* @api public
|
||||
* @method eachAsync
|
||||
*/
|
||||
|
||||
AggregationCursor.prototype.eachAsync = function(fn, opts, callback) {
|
||||
const _this = this;
|
||||
if (typeof opts === 'function') {
|
||||
callback = opts;
|
||||
opts = {};
|
||||
}
|
||||
opts = opts || {};
|
||||
|
||||
return eachAsync(function(cb) { return _next(_this, cb); }, fn, opts, callback);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns an asyncIterator for use with [`for/await/of` loops](https://thecodebarbarian.com/getting-started-with-async-iterators-in-node-js)
|
||||
* You do not need to call this function explicitly, the JavaScript runtime
|
||||
* will call it for you.
|
||||
*
|
||||
* #### Example:
|
||||
*
|
||||
* // Async iterator without explicitly calling `cursor()`. Mongoose still
|
||||
* // creates an AggregationCursor instance internally.
|
||||
* const agg = Model.aggregate([{ $match: { age: { $gte: 25 } } }]);
|
||||
* for await (const doc of agg) {
|
||||
* console.log(doc.name);
|
||||
* }
|
||||
*
|
||||
* // You can also use an AggregationCursor instance for async iteration
|
||||
* const cursor = Model.aggregate([{ $match: { age: { $gte: 25 } } }]).cursor();
|
||||
* for await (const doc of cursor) {
|
||||
* console.log(doc.name);
|
||||
* }
|
||||
*
|
||||
* Node.js 10.x supports async iterators natively without any flags. You can
|
||||
* enable async iterators in Node.js 8.x using the [`--harmony_async_iteration` flag](https://github.com/tc39/proposal-async-iteration/issues/117#issuecomment-346695187).
|
||||
*
|
||||
* **Note:** This function is not set if `Symbol.asyncIterator` is undefined. If
|
||||
* `Symbol.asyncIterator` is undefined, that means your Node.js version does not
|
||||
* support async iterators.
|
||||
*
|
||||
* @method [Symbol.asyncIterator]
|
||||
* @memberOf AggregationCursor
|
||||
* @instance
|
||||
* @api public
|
||||
*/
|
||||
|
||||
if (Symbol.asyncIterator != null) {
|
||||
AggregationCursor.prototype[Symbol.asyncIterator] = function() {
|
||||
return this.transformNull()._transformForAsyncIterator();
|
||||
};
|
||||
}
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
AggregationCursor.prototype._transformForAsyncIterator = function() {
|
||||
if (this._transforms.indexOf(_transformForAsyncIterator) === -1) {
|
||||
this.map(_transformForAsyncIterator);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
AggregationCursor.prototype.transformNull = function(val) {
|
||||
if (arguments.length === 0) {
|
||||
val = true;
|
||||
}
|
||||
this._mongooseOptions.transformNull = val;
|
||||
return this;
|
||||
};
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function _transformForAsyncIterator(doc) {
|
||||
return doc == null ? { done: true } : { value: doc, done: false };
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a [cursor flag](https://mongodb.github.io/node-mongodb-native/4.9/classes/AggregationCursor.html#addCursorFlag).
|
||||
* Useful for setting the `noCursorTimeout` and `tailable` flags.
|
||||
*
|
||||
* @param {String} flag
|
||||
* @param {Boolean} value
|
||||
* @return {AggregationCursor} this
|
||||
* @api public
|
||||
* @method addCursorFlag
|
||||
*/
|
||||
|
||||
AggregationCursor.prototype.addCursorFlag = function(flag, value) {
|
||||
const _this = this;
|
||||
_waitForCursor(this, function() {
|
||||
_this.cursor.addCursorFlag(flag, value);
|
||||
});
|
||||
return this;
|
||||
};
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function _waitForCursor(ctx, cb) {
|
||||
if (ctx.cursor) {
|
||||
return cb();
|
||||
}
|
||||
ctx.once('cursor', function() {
|
||||
cb();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the next doc from the underlying cursor and mongooseify it
|
||||
* (populate, etc.)
|
||||
* @param {Any} ctx
|
||||
* @param {Function} cb
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function _next(ctx, cb) {
|
||||
let callback = cb;
|
||||
if (ctx._transforms.length) {
|
||||
callback = function(err, doc) {
|
||||
if (err || (doc === null && !ctx._mongooseOptions.transformNull)) {
|
||||
return cb(err, doc);
|
||||
}
|
||||
cb(err, ctx._transforms.reduce(function(doc, fn) {
|
||||
return fn(doc);
|
||||
}, doc));
|
||||
};
|
||||
}
|
||||
|
||||
if (ctx._error) {
|
||||
return immediate(function() {
|
||||
callback(ctx._error);
|
||||
});
|
||||
}
|
||||
|
||||
if (ctx.cursor) {
|
||||
return ctx.cursor.next().then(
|
||||
doc => {
|
||||
if (!doc) {
|
||||
return callback(null, null);
|
||||
}
|
||||
|
||||
callback(null, doc);
|
||||
},
|
||||
err => callback(err)
|
||||
);
|
||||
} else {
|
||||
ctx.once('cursor', function() {
|
||||
_next(ctx, cb);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AggregationCursor;
|
151
node_modules/mongoose/lib/cursor/ChangeStream.js
generated
vendored
Normal file
151
node_modules/mongoose/lib/cursor/ChangeStream.js
generated
vendored
Normal file
|
@ -0,0 +1,151 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const EventEmitter = require('events').EventEmitter;
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
class ChangeStream extends EventEmitter {
|
||||
constructor(changeStreamThunk, pipeline, options) {
|
||||
super();
|
||||
|
||||
this.driverChangeStream = null;
|
||||
this.closed = false;
|
||||
this.bindedEvents = false;
|
||||
this.pipeline = pipeline;
|
||||
this.options = options;
|
||||
|
||||
if (options && options.hydrate && !options.model) {
|
||||
throw new Error(
|
||||
'Cannot create change stream with `hydrate: true` ' +
|
||||
'unless calling `Model.watch()`'
|
||||
);
|
||||
}
|
||||
|
||||
// This wrapper is necessary because of buffering.
|
||||
changeStreamThunk((err, driverChangeStream) => {
|
||||
if (err != null) {
|
||||
this.emit('error', err);
|
||||
return;
|
||||
}
|
||||
|
||||
this.driverChangeStream = driverChangeStream;
|
||||
this.emit('ready');
|
||||
});
|
||||
}
|
||||
|
||||
_bindEvents() {
|
||||
if (this.bindedEvents) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.bindedEvents = true;
|
||||
|
||||
if (this.driverChangeStream == null) {
|
||||
this.once('ready', () => {
|
||||
this.driverChangeStream.on('close', () => {
|
||||
this.closed = true;
|
||||
});
|
||||
|
||||
['close', 'change', 'end', 'error'].forEach(ev => {
|
||||
this.driverChangeStream.on(ev, data => {
|
||||
// Sometimes Node driver still polls after close, so
|
||||
// avoid any uncaught exceptions due to closed change streams
|
||||
// See tests for gh-7022
|
||||
if (ev === 'error' && this.closed) {
|
||||
return;
|
||||
}
|
||||
if (data != null && data.fullDocument != null && this.options && this.options.hydrate) {
|
||||
data.fullDocument = this.options.model.hydrate(data.fullDocument);
|
||||
}
|
||||
this.emit(ev, data);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
this.driverChangeStream.on('close', () => {
|
||||
this.closed = true;
|
||||
});
|
||||
|
||||
['close', 'change', 'end', 'error'].forEach(ev => {
|
||||
this.driverChangeStream.on(ev, data => {
|
||||
// Sometimes Node driver still polls after close, so
|
||||
// avoid any uncaught exceptions due to closed change streams
|
||||
// See tests for gh-7022
|
||||
if (ev === 'error' && this.closed) {
|
||||
return;
|
||||
}
|
||||
this.emit(ev, data);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
hasNext(cb) {
|
||||
return this.driverChangeStream.hasNext(cb);
|
||||
}
|
||||
|
||||
next(cb) {
|
||||
if (this.options && this.options.hydrate) {
|
||||
if (cb != null) {
|
||||
const originalCb = cb;
|
||||
cb = (err, data) => {
|
||||
if (err != null) {
|
||||
return originalCb(err);
|
||||
}
|
||||
if (data.fullDocument != null) {
|
||||
data.fullDocument = this.options.model.hydrate(data.fullDocument);
|
||||
}
|
||||
return originalCb(null, data);
|
||||
};
|
||||
}
|
||||
|
||||
let maybePromise = this.driverChangeStream.next(cb);
|
||||
if (maybePromise && typeof maybePromise.then === 'function') {
|
||||
maybePromise = maybePromise.then(data => {
|
||||
if (data.fullDocument != null) {
|
||||
data.fullDocument = this.options.model.hydrate(data.fullDocument);
|
||||
}
|
||||
return data;
|
||||
});
|
||||
}
|
||||
return maybePromise;
|
||||
}
|
||||
|
||||
return this.driverChangeStream.next(cb);
|
||||
}
|
||||
|
||||
on(event, handler) {
|
||||
this._bindEvents();
|
||||
return super.on(event, handler);
|
||||
}
|
||||
|
||||
once(event, handler) {
|
||||
this._bindEvents();
|
||||
return super.once(event, handler);
|
||||
}
|
||||
|
||||
_queue(cb) {
|
||||
this.once('ready', () => cb());
|
||||
}
|
||||
|
||||
close() {
|
||||
this.closed = true;
|
||||
if (this.driverChangeStream) {
|
||||
this.driverChangeStream.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
module.exports = ChangeStream;
|
554
node_modules/mongoose/lib/cursor/QueryCursor.js
generated
vendored
Normal file
554
node_modules/mongoose/lib/cursor/QueryCursor.js
generated
vendored
Normal file
|
@ -0,0 +1,554 @@
|
|||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('../error/mongooseError');
|
||||
const Readable = require('stream').Readable;
|
||||
const eachAsync = require('../helpers/cursor/eachAsync');
|
||||
const helpers = require('../queryhelpers');
|
||||
const immediate = require('../helpers/immediate');
|
||||
const util = require('util');
|
||||
|
||||
/**
|
||||
* A QueryCursor is a concurrency primitive for processing query results
|
||||
* one document at a time. A QueryCursor fulfills the Node.js streams3 API,
|
||||
* in addition to several other mechanisms for loading documents from MongoDB
|
||||
* one at a time.
|
||||
*
|
||||
* QueryCursors execute the model's pre `find` hooks before loading any documents
|
||||
* from MongoDB, and the model's post `find` hooks after loading each document.
|
||||
*
|
||||
* Unless you're an advanced user, do **not** instantiate this class directly.
|
||||
* Use [`Query#cursor()`](/docs/api/query.html#query_Query-cursor) instead.
|
||||
*
|
||||
* @param {Query} query
|
||||
* @param {Object} options query options passed to `.find()`
|
||||
* @inherits Readable https://nodejs.org/api/stream.html#class-streamreadable
|
||||
* @event `cursor`: Emitted when the cursor is created
|
||||
* @event `error`: Emitted when an error occurred
|
||||
* @event `data`: Emitted when the stream is flowing and the next doc is ready
|
||||
* @event `end`: Emitted when the stream is exhausted
|
||||
* @api public
|
||||
*/
|
||||
|
||||
function QueryCursor(query, options) {
|
||||
// set autoDestroy=true because on node 12 it's by default false
|
||||
// gh-10902 need autoDestroy to destroy correctly and emit 'close' event
|
||||
Readable.call(this, { autoDestroy: true, objectMode: true });
|
||||
|
||||
this.cursor = null;
|
||||
this.query = query;
|
||||
const _this = this;
|
||||
const model = query.model;
|
||||
this._mongooseOptions = {};
|
||||
this._transforms = [];
|
||||
this.model = model;
|
||||
this.options = options || {};
|
||||
|
||||
model.hooks.execPre('find', query, (err) => {
|
||||
if (err != null) {
|
||||
_this._markError(err);
|
||||
_this.listeners('error').length > 0 && _this.emit('error', err);
|
||||
return;
|
||||
}
|
||||
this._transforms = this._transforms.concat(query._transforms.slice());
|
||||
if (this.options.transform) {
|
||||
this._transforms.push(options.transform);
|
||||
}
|
||||
// Re: gh-8039, you need to set the `cursor.batchSize` option, top-level
|
||||
// `batchSize` option doesn't work.
|
||||
if (this.options.batchSize) {
|
||||
this.options.cursor = options.cursor || {};
|
||||
this.options.cursor.batchSize = options.batchSize;
|
||||
|
||||
// Max out the number of documents we'll populate in parallel at 5000.
|
||||
this.options._populateBatchSize = Math.min(this.options.batchSize, 5000);
|
||||
}
|
||||
model.collection.find(query._conditions, this.options, (err, cursor) => {
|
||||
if (err != null) {
|
||||
_this._markError(err);
|
||||
_this.listeners('error').length > 0 && _this.emit('error', _this._error);
|
||||
return;
|
||||
}
|
||||
|
||||
if (_this._error) {
|
||||
cursor.close(function() {});
|
||||
_this.listeners('error').length > 0 && _this.emit('error', _this._error);
|
||||
}
|
||||
_this.cursor = cursor;
|
||||
_this.emit('cursor', cursor);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
util.inherits(QueryCursor, Readable);
|
||||
|
||||
/**
|
||||
* Necessary to satisfy the Readable API
|
||||
* @method _read
|
||||
* @memberOf QueryCursor
|
||||
* @instance
|
||||
* @api private
|
||||
*/
|
||||
|
||||
QueryCursor.prototype._read = function() {
|
||||
const _this = this;
|
||||
_next(this, function(error, doc) {
|
||||
if (error) {
|
||||
return _this.emit('error', error);
|
||||
}
|
||||
if (!doc) {
|
||||
_this.push(null);
|
||||
_this.cursor.close(function(error) {
|
||||
if (error) {
|
||||
return _this.emit('error', error);
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
_this.push(doc);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Registers a transform function which subsequently maps documents retrieved
|
||||
* via the streams interface or `.next()`
|
||||
*
|
||||
* #### Example:
|
||||
*
|
||||
* // Map documents returned by `data` events
|
||||
* Thing.
|
||||
* find({ name: /^hello/ }).
|
||||
* cursor().
|
||||
* map(function (doc) {
|
||||
* doc.foo = "bar";
|
||||
* return doc;
|
||||
* })
|
||||
* on('data', function(doc) { console.log(doc.foo); });
|
||||
*
|
||||
* // Or map documents returned by `.next()`
|
||||
* const cursor = Thing.find({ name: /^hello/ }).
|
||||
* cursor().
|
||||
* map(function (doc) {
|
||||
* doc.foo = "bar";
|
||||
* return doc;
|
||||
* });
|
||||
* cursor.next(function(error, doc) {
|
||||
* console.log(doc.foo);
|
||||
* });
|
||||
*
|
||||
* @param {Function} fn
|
||||
* @return {QueryCursor}
|
||||
* @memberOf QueryCursor
|
||||
* @api public
|
||||
* @method map
|
||||
*/
|
||||
|
||||
Object.defineProperty(QueryCursor.prototype, 'map', {
|
||||
value: function(fn) {
|
||||
this._transforms.push(fn);
|
||||
return this;
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true
|
||||
});
|
||||
|
||||
/**
|
||||
* Marks this cursor as errored
|
||||
* @method _markError
|
||||
* @memberOf QueryCursor
|
||||
* @instance
|
||||
* @api private
|
||||
*/
|
||||
|
||||
QueryCursor.prototype._markError = function(error) {
|
||||
this._error = error;
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Marks this cursor as closed. Will stop streaming and subsequent calls to
|
||||
* `next()` will error.
|
||||
*
|
||||
* @return {Promise}
|
||||
* @api public
|
||||
* @method close
|
||||
* @emits close
|
||||
* @see AggregationCursor.close https://mongodb.github.io/node-mongodb-native/4.9/classes/AggregationCursor.html#close
|
||||
*/
|
||||
|
||||
QueryCursor.prototype.close = async function close() {
|
||||
if (typeof arguments[0] === 'function') {
|
||||
throw new MongooseError('QueryCursor.prototype.close() no longer accepts a callback');
|
||||
}
|
||||
try {
|
||||
await this.cursor.close();
|
||||
this.emit('close');
|
||||
} catch (error) {
|
||||
this.listeners('error').length > 0 && this.emit('error', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Rewind this cursor to its uninitialized state. Any options that are present on the cursor will
|
||||
* remain in effect. Iterating this cursor will cause new queries to be sent to the server, even
|
||||
* if the resultant data has already been retrieved by this cursor.
|
||||
*
|
||||
* @return {AggregationCursor} this
|
||||
* @api public
|
||||
* @method rewind
|
||||
*/
|
||||
|
||||
QueryCursor.prototype.rewind = function() {
|
||||
const _this = this;
|
||||
_waitForCursor(this, function() {
|
||||
_this.cursor.rewind();
|
||||
});
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the next document from this cursor. Will return `null` when there are
|
||||
* no documents left.
|
||||
*
|
||||
* @return {Promise}
|
||||
* @api public
|
||||
* @method next
|
||||
*/
|
||||
|
||||
QueryCursor.prototype.next = async function next() {
|
||||
if (arguments[0] === 'function') {
|
||||
throw new MongooseError('QueryCursor.prototype.next() no longer accepts a callback');
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
_next(this, function(error, doc) {
|
||||
if (error) {
|
||||
return reject(error);
|
||||
}
|
||||
resolve(doc);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Execute `fn` for every document in the cursor. If `fn` returns a promise,
|
||||
* will wait for the promise to resolve before iterating on to the next one.
|
||||
* Returns a promise that resolves when done.
|
||||
*
|
||||
* #### Example:
|
||||
*
|
||||
* // Iterate over documents asynchronously
|
||||
* Thing.
|
||||
* find({ name: /^hello/ }).
|
||||
* cursor().
|
||||
* eachAsync(async function (doc, i) {
|
||||
* doc.foo = doc.bar + i;
|
||||
* await doc.save();
|
||||
* })
|
||||
*
|
||||
* @param {Function} fn
|
||||
* @param {Object} [options]
|
||||
* @param {Number} [options.parallel] the number of promises to execute in parallel. Defaults to 1.
|
||||
* @param {Number} [options.batchSize] if set, will call `fn()` with arrays of documents with length at most `batchSize`
|
||||
* @param {Boolean} [options.continueOnError=false] if true, `eachAsync()` iterates through all docs even if `fn` throws an error. If false, `eachAsync()` throws an error immediately if the given function `fn()` throws an error.
|
||||
* @param {Function} [callback] executed when all docs have been processed
|
||||
* @return {Promise}
|
||||
* @api public
|
||||
* @method eachAsync
|
||||
*/
|
||||
|
||||
QueryCursor.prototype.eachAsync = function(fn, opts, callback) {
|
||||
const _this = this;
|
||||
if (typeof opts === 'function') {
|
||||
callback = opts;
|
||||
opts = {};
|
||||
}
|
||||
opts = opts || {};
|
||||
|
||||
return eachAsync(function(cb) { return _next(_this, cb); }, fn, opts, callback);
|
||||
};
|
||||
|
||||
/**
|
||||
* The `options` passed in to the `QueryCursor` constructor.
|
||||
*
|
||||
* @api public
|
||||
* @property options
|
||||
*/
|
||||
|
||||
QueryCursor.prototype.options;
|
||||
|
||||
/**
|
||||
* Adds a [cursor flag](https://mongodb.github.io/node-mongodb-native/4.9/classes/FindCursor.html#addCursorFlag).
|
||||
* Useful for setting the `noCursorTimeout` and `tailable` flags.
|
||||
*
|
||||
* @param {String} flag
|
||||
* @param {Boolean} value
|
||||
* @return {AggregationCursor} this
|
||||
* @api public
|
||||
* @method addCursorFlag
|
||||
*/
|
||||
|
||||
QueryCursor.prototype.addCursorFlag = function(flag, value) {
|
||||
const _this = this;
|
||||
_waitForCursor(this, function() {
|
||||
_this.cursor.addCursorFlag(flag, value);
|
||||
});
|
||||
return this;
|
||||
};
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
QueryCursor.prototype.transformNull = function(val) {
|
||||
if (arguments.length === 0) {
|
||||
val = true;
|
||||
}
|
||||
this._mongooseOptions.transformNull = val;
|
||||
return this;
|
||||
};
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
QueryCursor.prototype._transformForAsyncIterator = function() {
|
||||
if (this._transforms.indexOf(_transformForAsyncIterator) === -1) {
|
||||
this.map(_transformForAsyncIterator);
|
||||
}
|
||||
return this;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns an asyncIterator for use with [`for/await/of` loops](https://thecodebarbarian.com/getting-started-with-async-iterators-in-node-js).
|
||||
* You do not need to call this function explicitly, the JavaScript runtime
|
||||
* will call it for you.
|
||||
*
|
||||
* #### Example:
|
||||
*
|
||||
* // Works without using `cursor()`
|
||||
* for await (const doc of Model.find([{ $sort: { name: 1 } }])) {
|
||||
* console.log(doc.name);
|
||||
* }
|
||||
*
|
||||
* // Can also use `cursor()`
|
||||
* for await (const doc of Model.find([{ $sort: { name: 1 } }]).cursor()) {
|
||||
* console.log(doc.name);
|
||||
* }
|
||||
*
|
||||
* Node.js 10.x supports async iterators natively without any flags. You can
|
||||
* enable async iterators in Node.js 8.x using the [`--harmony_async_iteration` flag](https://github.com/tc39/proposal-async-iteration/issues/117#issuecomment-346695187).
|
||||
*
|
||||
* **Note:** This function is not if `Symbol.asyncIterator` is undefined. If
|
||||
* `Symbol.asyncIterator` is undefined, that means your Node.js version does not
|
||||
* support async iterators.
|
||||
*
|
||||
* @method [Symbol.asyncIterator]
|
||||
* @memberOf QueryCursor
|
||||
* @instance
|
||||
* @api public
|
||||
*/
|
||||
|
||||
if (Symbol.asyncIterator != null) {
|
||||
QueryCursor.prototype[Symbol.asyncIterator] = function() {
|
||||
return this.transformNull()._transformForAsyncIterator();
|
||||
};
|
||||
}
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function _transformForAsyncIterator(doc) {
|
||||
return doc == null ? { done: true } : { value: doc, done: false };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the next doc from the underlying cursor and mongooseify it
|
||||
* (populate, etc.)
|
||||
* @param {Any} ctx
|
||||
* @param {Function} cb
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function _next(ctx, cb) {
|
||||
let callback = cb;
|
||||
if (ctx._transforms.length) {
|
||||
callback = function(err, doc) {
|
||||
if (err || (doc === null && !ctx._mongooseOptions.transformNull)) {
|
||||
return cb(err, doc);
|
||||
}
|
||||
cb(err, ctx._transforms.reduce(function(doc, fn) {
|
||||
return fn.call(ctx, doc);
|
||||
}, doc));
|
||||
};
|
||||
}
|
||||
|
||||
if (ctx._error) {
|
||||
return immediate(function() {
|
||||
callback(ctx._error);
|
||||
});
|
||||
}
|
||||
|
||||
if (ctx.cursor) {
|
||||
if (ctx.query._mongooseOptions.populate && !ctx._pop) {
|
||||
ctx._pop = helpers.preparePopulationOptionsMQ(ctx.query,
|
||||
ctx.query._mongooseOptions);
|
||||
ctx._pop.__noPromise = true;
|
||||
}
|
||||
if (ctx.query._mongooseOptions.populate && ctx.options._populateBatchSize > 1) {
|
||||
if (ctx._batchDocs && ctx._batchDocs.length) {
|
||||
// Return a cached populated doc
|
||||
return _nextDoc(ctx, ctx._batchDocs.shift(), ctx._pop, callback);
|
||||
} else if (ctx._batchExhausted) {
|
||||
// Internal cursor reported no more docs. Act the same here
|
||||
return callback(null, null);
|
||||
} else {
|
||||
// Request as many docs as batchSize, to populate them also in batch
|
||||
ctx._batchDocs = [];
|
||||
ctx.cursor.next().then(
|
||||
res => { _onNext.call({ ctx, callback }, null, res); },
|
||||
err => { _onNext.call({ ctx, callback }, err); }
|
||||
);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
return ctx.cursor.next().then(
|
||||
doc => {
|
||||
if (!doc) {
|
||||
callback(null, null);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!ctx.query._mongooseOptions.populate) {
|
||||
return _nextDoc(ctx, doc, null, callback);
|
||||
}
|
||||
|
||||
ctx.query.model.populate(doc, ctx._pop).then(
|
||||
doc => {
|
||||
_nextDoc(ctx, doc, ctx._pop, callback);
|
||||
},
|
||||
err => {
|
||||
callback(err);
|
||||
}
|
||||
);
|
||||
},
|
||||
error => {
|
||||
callback(error);
|
||||
}
|
||||
);
|
||||
}
|
||||
} else {
|
||||
ctx.once('error', cb);
|
||||
|
||||
ctx.once('cursor', function(cursor) {
|
||||
ctx.removeListener('error', cb);
|
||||
if (cursor == null) {
|
||||
return;
|
||||
}
|
||||
_next(ctx, cb);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function _onNext(error, doc) {
|
||||
if (error) {
|
||||
return this.callback(error);
|
||||
}
|
||||
if (!doc) {
|
||||
this.ctx._batchExhausted = true;
|
||||
return _populateBatch.call(this);
|
||||
}
|
||||
|
||||
this.ctx._batchDocs.push(doc);
|
||||
|
||||
if (this.ctx._batchDocs.length < this.ctx.options._populateBatchSize) {
|
||||
// If both `batchSize` and `_populateBatchSize` are huge, calling `next()` repeatedly may
|
||||
// cause a stack overflow. So make sure we clear the stack regularly.
|
||||
if (this.ctx._batchDocs.length > 0 && this.ctx._batchDocs.length % 1000 === 0) {
|
||||
return immediate(() => this.ctx.cursor.next().then(
|
||||
res => { _onNext.call(this, null, res); },
|
||||
err => { _onNext.call(this, err); }
|
||||
));
|
||||
}
|
||||
this.ctx.cursor.next().then(
|
||||
res => { _onNext.call(this, null, res); },
|
||||
err => { _onNext.call(this, err); }
|
||||
);
|
||||
} else {
|
||||
_populateBatch.call(this);
|
||||
}
|
||||
}
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function _populateBatch() {
|
||||
if (!this.ctx._batchDocs.length) {
|
||||
return this.callback(null, null);
|
||||
}
|
||||
const _this = this;
|
||||
this.ctx.query.model.populate(this.ctx._batchDocs, this.ctx._pop).then(
|
||||
() => {
|
||||
_nextDoc(_this.ctx, _this.ctx._batchDocs.shift(), _this.ctx._pop, _this.callback);
|
||||
},
|
||||
err => {
|
||||
_this.callback(err);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function _nextDoc(ctx, doc, pop, callback) {
|
||||
if (ctx.query._mongooseOptions.lean) {
|
||||
return ctx.model.hooks.execPost('find', ctx.query, [[doc]], err => {
|
||||
if (err != null) {
|
||||
return callback(err);
|
||||
}
|
||||
callback(null, doc);
|
||||
});
|
||||
}
|
||||
|
||||
const { model, _fields, _userProvidedFields, options } = ctx.query;
|
||||
helpers.createModelAndInit(model, doc, _fields, _userProvidedFields, options, pop, (err, doc) => {
|
||||
if (err != null) {
|
||||
return callback(err);
|
||||
}
|
||||
ctx.model.hooks.execPost('find', ctx.query, [[doc]], err => {
|
||||
if (err != null) {
|
||||
return callback(err);
|
||||
}
|
||||
callback(null, doc);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function _waitForCursor(ctx, cb) {
|
||||
if (ctx.cursor) {
|
||||
return cb();
|
||||
}
|
||||
ctx.once('cursor', function(cursor) {
|
||||
if (cursor == null) {
|
||||
return;
|
||||
}
|
||||
cb();
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = QueryCursor;
|
4653
node_modules/mongoose/lib/document.js
generated
vendored
Normal file
4653
node_modules/mongoose/lib/document.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
30
node_modules/mongoose/lib/document_provider.js
generated
vendored
Normal file
30
node_modules/mongoose/lib/document_provider.js
generated
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
'use strict';
|
||||
|
||||
/* eslint-env browser */
|
||||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
const Document = require('./document.js');
|
||||
const BrowserDocument = require('./browserDocument.js');
|
||||
|
||||
let isBrowser = false;
|
||||
|
||||
/**
|
||||
* Returns the Document constructor for the current context
|
||||
*
|
||||
* @api private
|
||||
*/
|
||||
module.exports = function() {
|
||||
if (isBrowser) {
|
||||
return BrowserDocument;
|
||||
}
|
||||
return Document;
|
||||
};
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
module.exports.setBrowser = function(flag) {
|
||||
isBrowser = flag;
|
||||
};
|
15
node_modules/mongoose/lib/driver.js
generated
vendored
Normal file
15
node_modules/mongoose/lib/driver.js
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
let driver = null;
|
||||
|
||||
module.exports.get = function() {
|
||||
return driver;
|
||||
};
|
||||
|
||||
module.exports.set = function(v) {
|
||||
driver = v;
|
||||
};
|
4
node_modules/mongoose/lib/drivers/SPEC.md
generated
vendored
Normal file
4
node_modules/mongoose/lib/drivers/SPEC.md
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
|
||||
# Driver Spec
|
||||
|
||||
TODO
|
14
node_modules/mongoose/lib/drivers/browser/binary.js
generated
vendored
Normal file
14
node_modules/mongoose/lib/drivers/browser/binary.js
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const Binary = require('bson').Binary;
|
||||
|
||||
/*!
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = exports = Binary;
|
7
node_modules/mongoose/lib/drivers/browser/decimal128.js
generated
vendored
Normal file
7
node_modules/mongoose/lib/drivers/browser/decimal128.js
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
module.exports = require('bson').Decimal128;
|
12
node_modules/mongoose/lib/drivers/browser/index.js
generated
vendored
Normal file
12
node_modules/mongoose/lib/drivers/browser/index.js
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
/*!
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
exports.Collection = function() {
|
||||
throw new Error('Cannot create a collection from browser library');
|
||||
};
|
||||
exports.Connection = function() {
|
||||
throw new Error('Cannot create a connection from browser library');
|
||||
};
|
29
node_modules/mongoose/lib/drivers/browser/objectid.js
generated
vendored
Normal file
29
node_modules/mongoose/lib/drivers/browser/objectid.js
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
|
||||
/*!
|
||||
* [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) ObjectId
|
||||
* @constructor NodeMongoDbObjectId
|
||||
* @see ObjectId
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const ObjectId = require('bson').ObjectID;
|
||||
|
||||
/**
|
||||
* Getter for convenience with populate, see gh-6115
|
||||
* @api private
|
||||
*/
|
||||
|
||||
Object.defineProperty(ObjectId.prototype, '_id', {
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
get: function() {
|
||||
return this;
|
||||
}
|
||||
});
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
module.exports = exports = ObjectId;
|
455
node_modules/mongoose/lib/drivers/node-mongodb-native/collection.js
generated
vendored
Normal file
455
node_modules/mongoose/lib/drivers/node-mongodb-native/collection.js
generated
vendored
Normal file
|
@ -0,0 +1,455 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const MongooseCollection = require('../../collection');
|
||||
const MongooseError = require('../../error/mongooseError');
|
||||
const Collection = require('mongodb').Collection;
|
||||
const ObjectId = require('../../types/objectid');
|
||||
const getConstructorName = require('../../helpers/getConstructorName');
|
||||
const stream = require('stream');
|
||||
const util = require('util');
|
||||
|
||||
/**
|
||||
* A [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) collection implementation.
|
||||
*
|
||||
* All methods methods from the [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) driver are copied and wrapped in queue management.
|
||||
*
|
||||
* @inherits Collection https://mongodb.github.io/node-mongodb-native/4.9/classes/Collection.html
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function NativeCollection(name, conn, options) {
|
||||
this.collection = null;
|
||||
this.Promise = options.Promise || Promise;
|
||||
this.modelName = options.modelName;
|
||||
delete options.modelName;
|
||||
this._closed = false;
|
||||
MongooseCollection.apply(this, arguments);
|
||||
}
|
||||
|
||||
/*!
|
||||
* Inherit from abstract Collection.
|
||||
*/
|
||||
|
||||
Object.setPrototypeOf(NativeCollection.prototype, MongooseCollection.prototype);
|
||||
|
||||
/**
|
||||
* Called when the connection opens.
|
||||
*
|
||||
* @api private
|
||||
*/
|
||||
|
||||
NativeCollection.prototype.onOpen = function() {
|
||||
this.collection = this.conn.db.collection(this.name);
|
||||
MongooseCollection.prototype.onOpen.call(this);
|
||||
return this.collection;
|
||||
};
|
||||
|
||||
/**
|
||||
* Called when the connection closes
|
||||
*
|
||||
* @api private
|
||||
*/
|
||||
|
||||
NativeCollection.prototype.onClose = function(force) {
|
||||
MongooseCollection.prototype.onClose.call(this, force);
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper to get the collection, in case `this.collection` isn't set yet.
|
||||
* May happen if `bufferCommands` is false and created the model when
|
||||
* Mongoose was disconnected.
|
||||
*
|
||||
* @api private
|
||||
*/
|
||||
|
||||
NativeCollection.prototype._getCollection = function _getCollection() {
|
||||
if (this.collection) {
|
||||
return this.collection;
|
||||
}
|
||||
if (this.conn.db != null) {
|
||||
this.collection = this.conn.db.collection(this.name);
|
||||
return this.collection;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
const syncCollectionMethods = { watch: true, find: true, aggregate: true };
|
||||
|
||||
/**
|
||||
* Copy the collection methods and make them subject to queues
|
||||
* @param {Number|String} I
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function iter(i) {
|
||||
NativeCollection.prototype[i] = function() {
|
||||
const collection = this._getCollection();
|
||||
const args = Array.from(arguments);
|
||||
const _this = this;
|
||||
const globalDebug = _this &&
|
||||
_this.conn &&
|
||||
_this.conn.base &&
|
||||
_this.conn.base.options &&
|
||||
_this.conn.base.options.debug;
|
||||
const connectionDebug = _this &&
|
||||
_this.conn &&
|
||||
_this.conn.options &&
|
||||
_this.conn.options.debug;
|
||||
const debug = connectionDebug == null ? globalDebug : connectionDebug;
|
||||
const lastArg = arguments[arguments.length - 1];
|
||||
const opId = new ObjectId();
|
||||
|
||||
// If user force closed, queueing will hang forever. See #5664
|
||||
if (this.conn.$wasForceClosed) {
|
||||
const error = new MongooseError('Connection was force closed');
|
||||
if (args.length > 0 &&
|
||||
typeof args[args.length - 1] === 'function') {
|
||||
args[args.length - 1](error);
|
||||
return;
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
let _args = args;
|
||||
let callback = null;
|
||||
if (this._shouldBufferCommands() && this.buffer) {
|
||||
this.conn.emit('buffer', {
|
||||
_id: opId,
|
||||
modelName: _this.modelName,
|
||||
collectionName: _this.name,
|
||||
method: i,
|
||||
args: args
|
||||
});
|
||||
|
||||
let callback;
|
||||
let _args = args;
|
||||
let promise = null;
|
||||
let timeout = null;
|
||||
if (syncCollectionMethods[i] && typeof lastArg === 'function') {
|
||||
this.addQueue(() => {
|
||||
lastArg.call(this, null, this[i].apply(this, _args.slice(0, _args.length - 1)));
|
||||
}, []);
|
||||
} else if (syncCollectionMethods[i]) {
|
||||
promise = new Promise((resolve, reject) => {
|
||||
callback = function collectionOperationCallback(err, res) {
|
||||
if (timeout != null) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
if (err != null) {
|
||||
return reject(err);
|
||||
}
|
||||
resolve(res);
|
||||
};
|
||||
_args = args.concat([callback]);
|
||||
this.addQueue(i, _args);
|
||||
});
|
||||
} else if (typeof lastArg === 'function') {
|
||||
callback = function collectionOperationCallback() {
|
||||
if (timeout != null) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
return lastArg.apply(this, arguments);
|
||||
};
|
||||
_args = args.slice(0, args.length - 1).concat([callback]);
|
||||
} else {
|
||||
promise = new Promise((resolve, reject) => {
|
||||
callback = function collectionOperationCallback(err, res) {
|
||||
if (timeout != null) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
if (err != null) {
|
||||
return reject(err);
|
||||
}
|
||||
resolve(res);
|
||||
};
|
||||
_args = args.concat([callback]);
|
||||
this.addQueue(i, _args);
|
||||
});
|
||||
}
|
||||
|
||||
const bufferTimeoutMS = this._getBufferTimeoutMS();
|
||||
timeout = setTimeout(() => {
|
||||
const removed = this.removeQueue(i, _args);
|
||||
if (removed) {
|
||||
const message = 'Operation `' + this.name + '.' + i + '()` buffering timed out after ' +
|
||||
bufferTimeoutMS + 'ms';
|
||||
const err = new MongooseError(message);
|
||||
this.conn.emit('buffer-end', { _id: opId, modelName: _this.modelName, collectionName: _this.name, method: i, error: err });
|
||||
callback(err);
|
||||
}
|
||||
}, bufferTimeoutMS);
|
||||
|
||||
if (!syncCollectionMethods[i] && typeof lastArg === 'function') {
|
||||
this.addQueue(i, _args);
|
||||
return;
|
||||
}
|
||||
|
||||
return promise;
|
||||
} else if (!syncCollectionMethods[i] && typeof lastArg === 'function') {
|
||||
callback = function collectionOperationCallback(err, res) {
|
||||
if (err != null) {
|
||||
_this.conn.emit('operation-end', { _id: opId, modelName: _this.modelName, collectionName: _this.name, method: i, error: err });
|
||||
} else {
|
||||
_this.conn.emit('operation-end', { _id: opId, modelName: _this.modelName, collectionName: _this.name, method: i, result: res });
|
||||
}
|
||||
return lastArg.apply(this, arguments);
|
||||
};
|
||||
_args = args.slice(0, args.length - 1).concat([callback]);
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
if (typeof debug === 'function') {
|
||||
debug.apply(_this,
|
||||
[_this.name, i].concat(args.slice(0, args.length - 1)));
|
||||
} else if (debug instanceof stream.Writable) {
|
||||
this.$printToStream(_this.name, i, args, debug);
|
||||
} else {
|
||||
const color = debug.color == null ? true : debug.color;
|
||||
const shell = debug.shell == null ? false : debug.shell;
|
||||
this.$print(_this.name, i, args, color, shell);
|
||||
}
|
||||
}
|
||||
|
||||
this.conn.emit('operation-start', { _id: opId, modelName: _this.modelName, collectionName: this.name, method: i, params: _args });
|
||||
|
||||
try {
|
||||
if (collection == null) {
|
||||
const message = 'Cannot call `' + this.name + '.' + i + '()` before initial connection ' +
|
||||
'is complete if `bufferCommands = false`. Make sure you `await mongoose.connect()` if ' +
|
||||
'you have `bufferCommands = false`.';
|
||||
throw new MongooseError(message);
|
||||
}
|
||||
|
||||
if (syncCollectionMethods[i] && typeof lastArg === 'function') {
|
||||
const ret = collection[i].apply(collection, _args.slice(0, _args.length - 1));
|
||||
return lastArg.call(this, null, ret);
|
||||
}
|
||||
|
||||
const ret = collection[i].apply(collection, _args);
|
||||
if (ret != null && typeof ret.then === 'function') {
|
||||
return ret.then(
|
||||
res => {
|
||||
typeof lastArg === 'function' && lastArg(null, res);
|
||||
return res;
|
||||
},
|
||||
err => {
|
||||
if (typeof lastArg === 'function') {
|
||||
lastArg(err);
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
);
|
||||
}
|
||||
return ret;
|
||||
} catch (error) {
|
||||
// Collection operation may throw because of max bson size, catch it here
|
||||
// See gh-3906
|
||||
if (typeof lastArg === 'function') {
|
||||
return lastArg(error);
|
||||
} else {
|
||||
this.conn.emit('operation-end', { _id: opId, modelName: _this.modelName, collectionName: this.name, method: i, error: error });
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
for (const key of Object.getOwnPropertyNames(Collection.prototype)) {
|
||||
// Janky hack to work around gh-3005 until we can get rid of the mongoose
|
||||
// collection abstraction
|
||||
const descriptor = Object.getOwnPropertyDescriptor(Collection.prototype, key);
|
||||
// Skip properties with getters because they may throw errors (gh-8528)
|
||||
if (descriptor.get !== undefined) {
|
||||
continue;
|
||||
}
|
||||
if (typeof Collection.prototype[key] !== 'function') {
|
||||
continue;
|
||||
}
|
||||
|
||||
iter(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Debug print helper
|
||||
*
|
||||
* @api public
|
||||
* @method $print
|
||||
*/
|
||||
|
||||
NativeCollection.prototype.$print = function(name, i, args, color, shell) {
|
||||
const moduleName = color ? '\x1B[0;36mMongoose:\x1B[0m ' : 'Mongoose: ';
|
||||
const functionCall = [name, i].join('.');
|
||||
const _args = [];
|
||||
for (let j = args.length - 1; j >= 0; --j) {
|
||||
if (this.$format(args[j]) || _args.length) {
|
||||
_args.unshift(this.$format(args[j], color, shell));
|
||||
}
|
||||
}
|
||||
const params = '(' + _args.join(', ') + ')';
|
||||
|
||||
console.info(moduleName + functionCall + params);
|
||||
};
|
||||
|
||||
/**
|
||||
* Debug print helper
|
||||
*
|
||||
* @api public
|
||||
* @method $print
|
||||
*/
|
||||
|
||||
NativeCollection.prototype.$printToStream = function(name, i, args, stream) {
|
||||
const functionCall = [name, i].join('.');
|
||||
const _args = [];
|
||||
for (let j = args.length - 1; j >= 0; --j) {
|
||||
if (this.$format(args[j]) || _args.length) {
|
||||
_args.unshift(this.$format(args[j]));
|
||||
}
|
||||
}
|
||||
const params = '(' + _args.join(', ') + ')';
|
||||
|
||||
stream.write(functionCall + params, 'utf8');
|
||||
};
|
||||
|
||||
/**
|
||||
* Formatter for debug print args
|
||||
*
|
||||
* @api public
|
||||
* @method $format
|
||||
*/
|
||||
|
||||
NativeCollection.prototype.$format = function(arg, color, shell) {
|
||||
const type = typeof arg;
|
||||
if (type === 'function' || type === 'undefined') return '';
|
||||
return format(arg, false, color, shell);
|
||||
};
|
||||
|
||||
/**
|
||||
* Debug print helper
|
||||
* @param {Any} representation
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function inspectable(representation) {
|
||||
const ret = {
|
||||
inspect: function() { return representation; }
|
||||
};
|
||||
if (util.inspect.custom) {
|
||||
ret[util.inspect.custom] = ret.inspect;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
function map(o) {
|
||||
return format(o, true);
|
||||
}
|
||||
function formatObjectId(x, key) {
|
||||
x[key] = inspectable('ObjectId("' + x[key].toHexString() + '")');
|
||||
}
|
||||
function formatDate(x, key, shell) {
|
||||
if (shell) {
|
||||
x[key] = inspectable('ISODate("' + x[key].toUTCString() + '")');
|
||||
} else {
|
||||
x[key] = inspectable('new Date("' + x[key].toUTCString() + '")');
|
||||
}
|
||||
}
|
||||
function format(obj, sub, color, shell) {
|
||||
if (obj && typeof obj.toBSON === 'function') {
|
||||
obj = obj.toBSON();
|
||||
}
|
||||
if (obj == null) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
const clone = require('../../helpers/clone');
|
||||
let x = clone(obj, { transform: false });
|
||||
const constructorName = getConstructorName(x);
|
||||
|
||||
if (constructorName === 'Binary') {
|
||||
x = 'BinData(' + x.sub_type + ', "' + x.toString('base64') + '")';
|
||||
} else if (constructorName === 'ObjectId') {
|
||||
x = inspectable('ObjectId("' + x.toHexString() + '")');
|
||||
} else if (constructorName === 'Date') {
|
||||
x = inspectable('new Date("' + x.toUTCString() + '")');
|
||||
} else if (constructorName === 'Object') {
|
||||
const keys = Object.keys(x);
|
||||
const numKeys = keys.length;
|
||||
let key;
|
||||
for (let i = 0; i < numKeys; ++i) {
|
||||
key = keys[i];
|
||||
if (x[key]) {
|
||||
let error;
|
||||
if (typeof x[key].toBSON === 'function') {
|
||||
try {
|
||||
// `session.toBSON()` throws an error. This means we throw errors
|
||||
// in debug mode when using transactions, see gh-6712. As a
|
||||
// workaround, catch `toBSON()` errors, try to serialize without
|
||||
// `toBSON()`, and rethrow if serialization still fails.
|
||||
x[key] = x[key].toBSON();
|
||||
} catch (_error) {
|
||||
error = _error;
|
||||
}
|
||||
}
|
||||
const _constructorName = getConstructorName(x[key]);
|
||||
if (_constructorName === 'Binary') {
|
||||
x[key] = 'BinData(' + x[key].sub_type + ', "' +
|
||||
x[key].buffer.toString('base64') + '")';
|
||||
} else if (_constructorName === 'Object') {
|
||||
x[key] = format(x[key], true);
|
||||
} else if (_constructorName === 'ObjectId') {
|
||||
formatObjectId(x, key);
|
||||
} else if (_constructorName === 'Date') {
|
||||
formatDate(x, key, shell);
|
||||
} else if (_constructorName === 'ClientSession') {
|
||||
x[key] = inspectable('ClientSession("' +
|
||||
(
|
||||
x[key] &&
|
||||
x[key].id &&
|
||||
x[key].id.id &&
|
||||
x[key].id.id.buffer || ''
|
||||
).toString('hex') + '")');
|
||||
} else if (Array.isArray(x[key])) {
|
||||
x[key] = x[key].map(map);
|
||||
} else if (error != null) {
|
||||
// If there was an error with `toBSON()` and the object wasn't
|
||||
// already converted to a string representation, rethrow it.
|
||||
// Open to better ideas on how to handle this.
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (sub) {
|
||||
return x;
|
||||
}
|
||||
|
||||
return util.
|
||||
inspect(x, false, 10, color).
|
||||
replace(/\n/g, '').
|
||||
replace(/\s{2,}/g, ' ');
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves information about this collections indexes.
|
||||
*
|
||||
* @param {Function} callback
|
||||
* @method getIndexes
|
||||
* @api public
|
||||
*/
|
||||
|
||||
NativeCollection.prototype.getIndexes = NativeCollection.prototype.indexInformation;
|
||||
|
||||
/*!
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = NativeCollection;
|
162
node_modules/mongoose/lib/drivers/node-mongodb-native/connection.js
generated
vendored
Normal file
162
node_modules/mongoose/lib/drivers/node-mongodb-native/connection.js
generated
vendored
Normal file
|
@ -0,0 +1,162 @@
|
|||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseConnection = require('../../connection');
|
||||
const STATES = require('../../connectionstate');
|
||||
const setTimeout = require('../../helpers/timers').setTimeout;
|
||||
|
||||
/**
|
||||
* A [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) connection implementation.
|
||||
*
|
||||
* @inherits Connection
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function NativeConnection() {
|
||||
MongooseConnection.apply(this, arguments);
|
||||
this._listening = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expose the possible connection states.
|
||||
* @api public
|
||||
*/
|
||||
|
||||
NativeConnection.STATES = STATES;
|
||||
|
||||
/*!
|
||||
* Inherits from Connection.
|
||||
*/
|
||||
|
||||
Object.setPrototypeOf(NativeConnection.prototype, MongooseConnection.prototype);
|
||||
|
||||
/**
|
||||
* Switches to a different database using the same connection pool.
|
||||
*
|
||||
* Returns a new connection object, with the new db. If you set the `useCache`
|
||||
* option, `useDb()` will cache connections by `name`.
|
||||
*
|
||||
* **Note:** Calling `close()` on a `useDb()` connection will close the base connection as well.
|
||||
*
|
||||
* @param {String} name The database name
|
||||
* @param {Object} [options]
|
||||
* @param {Boolean} [options.useCache=false] If true, cache results so calling `useDb()` multiple times with the same name only creates 1 connection object.
|
||||
* @param {Boolean} [options.noListener=false] If true, the new connection object won't listen to any events on the base connection. This is better for memory usage in cases where you're calling `useDb()` for every request.
|
||||
* @return {Connection} New Connection Object
|
||||
* @api public
|
||||
*/
|
||||
|
||||
NativeConnection.prototype.useDb = function(name, options) {
|
||||
// Return immediately if cached
|
||||
options = options || {};
|
||||
if (options.useCache && this.relatedDbs[name]) {
|
||||
return this.relatedDbs[name];
|
||||
}
|
||||
|
||||
// we have to manually copy all of the attributes...
|
||||
const newConn = new this.constructor();
|
||||
newConn.name = name;
|
||||
newConn.base = this.base;
|
||||
newConn.collections = {};
|
||||
newConn.models = {};
|
||||
newConn.replica = this.replica;
|
||||
newConn.config = Object.assign({}, this.config, newConn.config);
|
||||
newConn.name = this.name;
|
||||
newConn.options = this.options;
|
||||
newConn._readyState = this._readyState;
|
||||
newConn._closeCalled = this._closeCalled;
|
||||
newConn._hasOpened = this._hasOpened;
|
||||
newConn._listening = false;
|
||||
newConn._parent = this;
|
||||
|
||||
newConn.host = this.host;
|
||||
newConn.port = this.port;
|
||||
newConn.user = this.user;
|
||||
newConn.pass = this.pass;
|
||||
|
||||
// First, when we create another db object, we are not guaranteed to have a
|
||||
// db object to work with. So, in the case where we have a db object and it
|
||||
// is connected, we can just proceed with setting everything up. However, if
|
||||
// we do not have a db or the state is not connected, then we need to wait on
|
||||
// the 'open' event of the connection before doing the rest of the setup
|
||||
// the 'connected' event is the first time we'll have access to the db object
|
||||
|
||||
const _this = this;
|
||||
|
||||
newConn.client = _this.client;
|
||||
|
||||
if (this.db && this._readyState === STATES.connected) {
|
||||
wireup();
|
||||
} else {
|
||||
this.once('connected', wireup);
|
||||
}
|
||||
|
||||
function wireup() {
|
||||
newConn.client = _this.client;
|
||||
const _opts = {};
|
||||
if (options.hasOwnProperty('noListener')) {
|
||||
_opts.noListener = options.noListener;
|
||||
}
|
||||
newConn.db = _this.client.db(name, _opts);
|
||||
newConn.onOpen();
|
||||
}
|
||||
|
||||
newConn.name = name;
|
||||
|
||||
// push onto the otherDbs stack, this is used when state changes
|
||||
if (options.noListener !== true) {
|
||||
this.otherDbs.push(newConn);
|
||||
}
|
||||
newConn.otherDbs.push(this);
|
||||
|
||||
// push onto the relatedDbs cache, this is used when state changes
|
||||
if (options && options.useCache) {
|
||||
this.relatedDbs[newConn.name] = newConn;
|
||||
newConn.relatedDbs = this.relatedDbs;
|
||||
}
|
||||
|
||||
return newConn;
|
||||
};
|
||||
|
||||
/**
|
||||
* Closes the connection
|
||||
*
|
||||
* @param {Boolean} [force]
|
||||
* @return {Connection} this
|
||||
* @api private
|
||||
*/
|
||||
|
||||
NativeConnection.prototype.doClose = async function doClose(force) {
|
||||
if (this.client == null) {
|
||||
return this;
|
||||
}
|
||||
|
||||
let skipCloseClient = false;
|
||||
if (force != null && typeof force === 'object') {
|
||||
skipCloseClient = force.skipCloseClient;
|
||||
force = force.force;
|
||||
}
|
||||
|
||||
if (skipCloseClient) {
|
||||
return this;
|
||||
}
|
||||
|
||||
await this.client.close(force);
|
||||
// Defer because the driver will wait at least 1ms before finishing closing
|
||||
// the pool, see https://github.com/mongodb-js/mongodb-core/blob/a8f8e4ce41936babc3b9112bf42d609779f03b39/lib/connection/pool.js#L1026-L1030.
|
||||
// If there's queued operations, you may still get some background work
|
||||
// after the callback is called.
|
||||
await new Promise(resolve => setTimeout(resolve, 1));
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
|
||||
/*!
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = NativeConnection;
|
8
node_modules/mongoose/lib/drivers/node-mongodb-native/index.js
generated
vendored
Normal file
8
node_modules/mongoose/lib/drivers/node-mongodb-native/index.js
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
/*!
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
exports.Collection = require('./collection');
|
||||
exports.Connection = require('./connection');
|
28
node_modules/mongoose/lib/error/browserMissingSchema.js
generated
vendored
Normal file
28
node_modules/mongoose/lib/error/browserMissingSchema.js
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('./');
|
||||
|
||||
|
||||
class MissingSchemaError extends MongooseError {
|
||||
/**
|
||||
* MissingSchema Error constructor.
|
||||
*/
|
||||
constructor() {
|
||||
super('Schema hasn\'t been registered for document.\n'
|
||||
+ 'Use mongoose.Document(name, schema)');
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(MissingSchemaError.prototype, 'name', {
|
||||
value: 'MongooseError'
|
||||
});
|
||||
|
||||
/*!
|
||||
* exports
|
||||
*/
|
||||
|
||||
module.exports = MissingSchemaError;
|
158
node_modules/mongoose/lib/error/cast.js
generated
vendored
Normal file
158
node_modules/mongoose/lib/error/cast.js
generated
vendored
Normal file
|
@ -0,0 +1,158 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const MongooseError = require('./mongooseError');
|
||||
const util = require('util');
|
||||
|
||||
/**
|
||||
* Casting Error constructor.
|
||||
*
|
||||
* @param {String} type
|
||||
* @param {String} value
|
||||
* @inherits MongooseError
|
||||
* @api private
|
||||
*/
|
||||
|
||||
class CastError extends MongooseError {
|
||||
constructor(type, value, path, reason, schemaType) {
|
||||
// If no args, assume we'll `init()` later.
|
||||
if (arguments.length > 0) {
|
||||
const stringValue = getStringValue(value);
|
||||
const valueType = getValueType(value);
|
||||
const messageFormat = getMessageFormat(schemaType);
|
||||
const msg = formatMessage(null, type, stringValue, path, messageFormat, valueType, reason);
|
||||
super(msg);
|
||||
this.init(type, value, path, reason, schemaType);
|
||||
} else {
|
||||
super(formatMessage());
|
||||
}
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
stringValue: this.stringValue,
|
||||
valueType: this.valueType,
|
||||
kind: this.kind,
|
||||
value: this.value,
|
||||
path: this.path,
|
||||
reason: this.reason,
|
||||
name: this.name,
|
||||
message: this.message
|
||||
};
|
||||
}
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
init(type, value, path, reason, schemaType) {
|
||||
this.stringValue = getStringValue(value);
|
||||
this.messageFormat = getMessageFormat(schemaType);
|
||||
this.kind = type;
|
||||
this.value = value;
|
||||
this.path = path;
|
||||
this.reason = reason;
|
||||
this.valueType = getValueType(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* ignore
|
||||
* @param {Readonly<CastError>} other
|
||||
* @api private
|
||||
*/
|
||||
copy(other) {
|
||||
this.messageFormat = other.messageFormat;
|
||||
this.stringValue = other.stringValue;
|
||||
this.kind = other.kind;
|
||||
this.value = other.value;
|
||||
this.path = other.path;
|
||||
this.reason = other.reason;
|
||||
this.message = other.message;
|
||||
this.valueType = other.valueType;
|
||||
}
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
setModel(model) {
|
||||
this.model = model;
|
||||
this.message = formatMessage(model, this.kind, this.stringValue, this.path,
|
||||
this.messageFormat, this.valueType);
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(CastError.prototype, 'name', {
|
||||
value: 'CastError'
|
||||
});
|
||||
|
||||
function getStringValue(value) {
|
||||
let stringValue = util.inspect(value);
|
||||
stringValue = stringValue.replace(/^'|'$/g, '"');
|
||||
if (!stringValue.startsWith('"')) {
|
||||
stringValue = '"' + stringValue + '"';
|
||||
}
|
||||
return stringValue;
|
||||
}
|
||||
|
||||
function getValueType(value) {
|
||||
if (value == null) {
|
||||
return '' + value;
|
||||
}
|
||||
|
||||
const t = typeof value;
|
||||
if (t !== 'object') {
|
||||
return t;
|
||||
}
|
||||
if (typeof value.constructor !== 'function') {
|
||||
return t;
|
||||
}
|
||||
return value.constructor.name;
|
||||
}
|
||||
|
||||
function getMessageFormat(schemaType) {
|
||||
const messageFormat = schemaType &&
|
||||
schemaType.options &&
|
||||
schemaType.options.cast || null;
|
||||
if (typeof messageFormat === 'string') {
|
||||
return messageFormat;
|
||||
}
|
||||
}
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function formatMessage(model, kind, stringValue, path, messageFormat, valueType, reason) {
|
||||
if (messageFormat != null) {
|
||||
let ret = messageFormat.
|
||||
replace('{KIND}', kind).
|
||||
replace('{VALUE}', stringValue).
|
||||
replace('{PATH}', path);
|
||||
if (model != null) {
|
||||
ret = ret.replace('{MODEL}', model.modelName);
|
||||
}
|
||||
|
||||
return ret;
|
||||
} else {
|
||||
const valueTypeMsg = valueType ? ' (type ' + valueType + ')' : '';
|
||||
let ret = 'Cast to ' + kind + ' failed for value ' +
|
||||
stringValue + valueTypeMsg + ' at path "' + path + '"';
|
||||
if (model != null) {
|
||||
ret += ' for model "' + model.modelName + '"';
|
||||
}
|
||||
if (reason != null &&
|
||||
typeof reason.constructor === 'function' &&
|
||||
reason.constructor.name !== 'AssertionError' &&
|
||||
reason.constructor.name !== 'Error') {
|
||||
ret += ' because of "' + reason.constructor.name + '"';
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
/*!
|
||||
* exports
|
||||
*/
|
||||
|
||||
module.exports = CastError;
|
38
node_modules/mongoose/lib/error/divergentArray.js
generated
vendored
Normal file
38
node_modules/mongoose/lib/error/divergentArray.js
generated
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('./');
|
||||
|
||||
class DivergentArrayError extends MongooseError {
|
||||
/**
|
||||
* DivergentArrayError constructor.
|
||||
* @param {Array<String>} paths
|
||||
* @api private
|
||||
*/
|
||||
constructor(paths) {
|
||||
const msg = 'For your own good, using `document.save()` to update an array '
|
||||
+ 'which was selected using an $elemMatch projection OR '
|
||||
+ 'populated using skip, limit, query conditions, or exclusion of '
|
||||
+ 'the _id field when the operation results in a $pop or $set of '
|
||||
+ 'the entire array is not supported. The following '
|
||||
+ 'path(s) would have been modified unsafely:\n'
|
||||
+ ' ' + paths.join('\n ') + '\n'
|
||||
+ 'Use Model.updateOne() to update these arrays instead.';
|
||||
// TODO write up a docs page (FAQ) and link to it
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(DivergentArrayError.prototype, 'name', {
|
||||
value: 'DivergentArrayError'
|
||||
});
|
||||
|
||||
/*!
|
||||
* exports
|
||||
*/
|
||||
|
||||
module.exports = DivergentArrayError;
|
41
node_modules/mongoose/lib/error/eachAsyncMultiError.js
generated
vendored
Normal file
41
node_modules/mongoose/lib/error/eachAsyncMultiError.js
generated
vendored
Normal file
|
@ -0,0 +1,41 @@
|
|||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('./');
|
||||
|
||||
|
||||
/**
|
||||
* If `eachAsync()` is called with `continueOnError: true`, there can be
|
||||
* multiple errors. This error class contains an `errors` property, which
|
||||
* contains an array of all errors that occurred in `eachAsync()`.
|
||||
*
|
||||
* @api private
|
||||
*/
|
||||
|
||||
class EachAsyncMultiError extends MongooseError {
|
||||
/**
|
||||
* @param {String} connectionString
|
||||
*/
|
||||
constructor(errors) {
|
||||
let preview = errors.map(e => e.message).join(', ');
|
||||
if (preview.length > 50) {
|
||||
preview = preview.slice(0, 50) + '...';
|
||||
}
|
||||
super(`eachAsync() finished with ${errors.length} errors: ${preview}`);
|
||||
|
||||
this.errors = errors;
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(EachAsyncMultiError.prototype, 'name', {
|
||||
value: 'EachAsyncMultiError'
|
||||
});
|
||||
|
||||
/*!
|
||||
* exports
|
||||
*/
|
||||
|
||||
module.exports = EachAsyncMultiError;
|
227
node_modules/mongoose/lib/error/index.js
generated
vendored
Normal file
227
node_modules/mongoose/lib/error/index.js
generated
vendored
Normal file
|
@ -0,0 +1,227 @@
|
|||
'use strict';
|
||||
|
||||
/**
|
||||
* MongooseError constructor. MongooseError is the base class for all
|
||||
* Mongoose-specific errors.
|
||||
*
|
||||
* #### Example:
|
||||
*
|
||||
* const Model = mongoose.model('Test', new mongoose.Schema({ answer: Number }));
|
||||
* const doc = new Model({ answer: 'not a number' });
|
||||
* const err = doc.validateSync();
|
||||
*
|
||||
* err instanceof mongoose.Error.ValidationError; // true
|
||||
*
|
||||
* @constructor Error
|
||||
* @param {String} msg Error message
|
||||
* @inherits Error https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Error
|
||||
*/
|
||||
|
||||
const MongooseError = require('./mongooseError');
|
||||
|
||||
/**
|
||||
* The name of the error. The name uniquely identifies this Mongoose error. The
|
||||
* possible values are:
|
||||
*
|
||||
* - `MongooseError`: general Mongoose error
|
||||
* - `CastError`: Mongoose could not convert a value to the type defined in the schema path. May be in a `ValidationError` class' `errors` property.
|
||||
* - `DivergentArrayError`: You attempted to `save()` an array that was modified after you loaded it with a `$elemMatch` or similar projection
|
||||
* - `MissingSchemaError`: You tried to access a model with [`mongoose.model()`](mongoose.html#mongoose_Mongoose-model) that was not defined
|
||||
* - `DocumentNotFoundError`: The document you tried to [`save()`](document.html#document_Document-save) was not found
|
||||
* - `ValidatorError`: error from an individual schema path's validator
|
||||
* - `ValidationError`: error returned from [`validate()`](document.html#document_Document-validate) or [`validateSync()`](document.html#document_Document-validateSync). Contains zero or more `ValidatorError` instances in `.errors` property.
|
||||
* - `MissingSchemaError`: You called `mongoose.Document()` without a schema
|
||||
* - `ObjectExpectedError`: Thrown when you set a nested path to a non-object value with [strict mode set](/docs/guide.html#strict).
|
||||
* - `ObjectParameterError`: Thrown when you pass a non-object value to a function which expects an object as a paramter
|
||||
* - `OverwriteModelError`: Thrown when you call [`mongoose.model()`](mongoose.html#mongoose_Mongoose-model) to re-define a model that was already defined.
|
||||
* - `ParallelSaveError`: Thrown when you call [`save()`](model.html#model_Model-save) on a document when the same document instance is already saving.
|
||||
* - `StrictModeError`: Thrown when you set a path that isn't the schema and [strict mode](/docs/guide.html#strict) is set to `throw`.
|
||||
* - `VersionError`: Thrown when the [document is out of sync](/docs/guide.html#versionKey)
|
||||
*
|
||||
* @api public
|
||||
* @property {String} name
|
||||
* @memberOf Error
|
||||
* @instance
|
||||
*/
|
||||
|
||||
/*!
|
||||
* Module exports.
|
||||
*/
|
||||
|
||||
module.exports = exports = MongooseError;
|
||||
|
||||
/**
|
||||
* The default built-in validator error messages.
|
||||
*
|
||||
* @see Error.messages #error_messages_MongooseError-messages
|
||||
* @api public
|
||||
* @memberOf Error
|
||||
* @static
|
||||
*/
|
||||
|
||||
MongooseError.messages = require('./messages');
|
||||
|
||||
// backward compat
|
||||
MongooseError.Messages = MongooseError.messages;
|
||||
|
||||
/**
|
||||
* An instance of this error class will be returned when `save()` fails
|
||||
* because the underlying
|
||||
* document was not found. The constructor takes one parameter, the
|
||||
* conditions that mongoose passed to `updateOne()` when trying to update
|
||||
* the document.
|
||||
*
|
||||
* @api public
|
||||
* @memberOf Error
|
||||
* @static
|
||||
*/
|
||||
|
||||
MongooseError.DocumentNotFoundError = require('./notFound');
|
||||
|
||||
/**
|
||||
* An instance of this error class will be returned when mongoose failed to
|
||||
* cast a value.
|
||||
*
|
||||
* @api public
|
||||
* @memberOf Error
|
||||
* @static
|
||||
*/
|
||||
|
||||
MongooseError.CastError = require('./cast');
|
||||
|
||||
/**
|
||||
* An instance of this error class will be returned when [validation](/docs/validation.html) failed.
|
||||
* The `errors` property contains an object whose keys are the paths that failed and whose values are
|
||||
* instances of CastError or ValidationError.
|
||||
*
|
||||
* @api public
|
||||
* @memberOf Error
|
||||
* @static
|
||||
*/
|
||||
|
||||
MongooseError.ValidationError = require('./validation');
|
||||
|
||||
/**
|
||||
* A `ValidationError` has a hash of `errors` that contain individual
|
||||
* `ValidatorError` instances.
|
||||
*
|
||||
* #### Example:
|
||||
*
|
||||
* const schema = Schema({ name: { type: String, required: true } });
|
||||
* const Model = mongoose.model('Test', schema);
|
||||
* const doc = new Model({});
|
||||
*
|
||||
* // Top-level error is a ValidationError, **not** a ValidatorError
|
||||
* const err = doc.validateSync();
|
||||
* err instanceof mongoose.Error.ValidationError; // true
|
||||
*
|
||||
* // A ValidationError `err` has 0 or more ValidatorErrors keyed by the
|
||||
* // path in the `err.errors` property.
|
||||
* err.errors['name'] instanceof mongoose.Error.ValidatorError;
|
||||
*
|
||||
* err.errors['name'].kind; // 'required'
|
||||
* err.errors['name'].path; // 'name'
|
||||
* err.errors['name'].value; // undefined
|
||||
*
|
||||
* Instances of `ValidatorError` have the following properties:
|
||||
*
|
||||
* - `kind`: The validator's `type`, like `'required'` or `'regexp'`
|
||||
* - `path`: The path that failed validation
|
||||
* - `value`: The value that failed validation
|
||||
*
|
||||
* @api public
|
||||
* @memberOf Error
|
||||
* @static
|
||||
*/
|
||||
|
||||
MongooseError.ValidatorError = require('./validator');
|
||||
|
||||
/**
|
||||
* An instance of this error class will be returned when you call `save()` after
|
||||
* the document in the database was changed in a potentially unsafe way. See
|
||||
* the [`versionKey` option](/docs/guide.html#versionKey) for more information.
|
||||
*
|
||||
* @api public
|
||||
* @memberOf Error
|
||||
* @static
|
||||
*/
|
||||
|
||||
MongooseError.VersionError = require('./version');
|
||||
|
||||
/**
|
||||
* An instance of this error class will be returned when you call `save()` multiple
|
||||
* times on the same document in parallel. See the [FAQ](/docs/faq.html) for more
|
||||
* information.
|
||||
*
|
||||
* @api public
|
||||
* @memberOf Error
|
||||
* @static
|
||||
*/
|
||||
|
||||
MongooseError.ParallelSaveError = require('./parallelSave');
|
||||
|
||||
/**
|
||||
* Thrown when a model with the given name was already registered on the connection.
|
||||
* See [the FAQ about `OverwriteModelError`](/docs/faq.html#overwrite-model-error).
|
||||
*
|
||||
* @api public
|
||||
* @memberOf Error
|
||||
* @static
|
||||
*/
|
||||
|
||||
MongooseError.OverwriteModelError = require('./overwriteModel');
|
||||
|
||||
/**
|
||||
* Thrown when you try to access a model that has not been registered yet
|
||||
*
|
||||
* @api public
|
||||
* @memberOf Error
|
||||
* @static
|
||||
*/
|
||||
|
||||
MongooseError.MissingSchemaError = require('./missingSchema');
|
||||
|
||||
/**
|
||||
* Thrown when the MongoDB Node driver can't connect to a valid server
|
||||
* to send an operation to.
|
||||
*
|
||||
* @api public
|
||||
* @memberOf Error
|
||||
* @static
|
||||
*/
|
||||
|
||||
MongooseError.MongooseServerSelectionError = require('./serverSelection');
|
||||
|
||||
/**
|
||||
* An instance of this error will be returned if you used an array projection
|
||||
* and then modified the array in an unsafe way.
|
||||
*
|
||||
* @api public
|
||||
* @memberOf Error
|
||||
* @static
|
||||
*/
|
||||
|
||||
MongooseError.DivergentArrayError = require('./divergentArray');
|
||||
|
||||
/**
|
||||
* Thrown when your try to pass values to model constructor that
|
||||
* were not specified in schema or change immutable properties when
|
||||
* `strict` mode is `"throw"`
|
||||
*
|
||||
* @api public
|
||||
* @memberOf Error
|
||||
* @static
|
||||
*/
|
||||
|
||||
MongooseError.StrictModeError = require('./strict');
|
||||
|
||||
/**
|
||||
* An instance of this error class will be returned when mongoose failed to
|
||||
* populate with a path that is not existing.
|
||||
*
|
||||
* @api public
|
||||
* @memberOf Error
|
||||
* @static
|
||||
*/
|
||||
|
||||
MongooseError.StrictPopulateError = require('./strictPopulate');
|
47
node_modules/mongoose/lib/error/messages.js
generated
vendored
Normal file
47
node_modules/mongoose/lib/error/messages.js
generated
vendored
Normal file
|
@ -0,0 +1,47 @@
|
|||
|
||||
/**
|
||||
* The default built-in validator error messages. These may be customized.
|
||||
*
|
||||
* // customize within each schema or globally like so
|
||||
* const mongoose = require('mongoose');
|
||||
* mongoose.Error.messages.String.enum = "Your custom message for {PATH}.";
|
||||
*
|
||||
* As you might have noticed, error messages support basic templating
|
||||
*
|
||||
* - `{PATH}` is replaced with the invalid document path
|
||||
* - `{VALUE}` is replaced with the invalid value
|
||||
* - `{TYPE}` is replaced with the validator type such as "regexp", "min", or "user defined"
|
||||
* - `{MIN}` is replaced with the declared min value for the Number.min validator
|
||||
* - `{MAX}` is replaced with the declared max value for the Number.max validator
|
||||
*
|
||||
* Click the "show code" link below to see all defaults.
|
||||
*
|
||||
* @static
|
||||
* @memberOf MongooseError
|
||||
* @api public
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const msg = module.exports = exports = {};
|
||||
|
||||
msg.DocumentNotFoundError = null;
|
||||
|
||||
msg.general = {};
|
||||
msg.general.default = 'Validator failed for path `{PATH}` with value `{VALUE}`';
|
||||
msg.general.required = 'Path `{PATH}` is required.';
|
||||
|
||||
msg.Number = {};
|
||||
msg.Number.min = 'Path `{PATH}` ({VALUE}) is less than minimum allowed value ({MIN}).';
|
||||
msg.Number.max = 'Path `{PATH}` ({VALUE}) is more than maximum allowed value ({MAX}).';
|
||||
msg.Number.enum = '`{VALUE}` is not a valid enum value for path `{PATH}`.';
|
||||
|
||||
msg.Date = {};
|
||||
msg.Date.min = 'Path `{PATH}` ({VALUE}) is before minimum allowed value ({MIN}).';
|
||||
msg.Date.max = 'Path `{PATH}` ({VALUE}) is after maximum allowed value ({MAX}).';
|
||||
|
||||
msg.String = {};
|
||||
msg.String.enum = '`{VALUE}` is not a valid enum value for path `{PATH}`.';
|
||||
msg.String.match = 'Path `{PATH}` is invalid ({VALUE}).';
|
||||
msg.String.minlength = 'Path `{PATH}` (`{VALUE}`) is shorter than the minimum allowed length ({MINLENGTH}).';
|
||||
msg.String.maxlength = 'Path `{PATH}` (`{VALUE}`) is longer than the maximum allowed length ({MAXLENGTH}).';
|
31
node_modules/mongoose/lib/error/missingSchema.js
generated
vendored
Normal file
31
node_modules/mongoose/lib/error/missingSchema.js
generated
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('./');
|
||||
|
||||
class MissingSchemaError extends MongooseError {
|
||||
/**
|
||||
* MissingSchema Error constructor.
|
||||
* @param {String} name
|
||||
* @api private
|
||||
*/
|
||||
constructor(name) {
|
||||
const msg = 'Schema hasn\'t been registered for model "' + name + '".\n'
|
||||
+ 'Use mongoose.model(name, schema)';
|
||||
super(msg);
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(MissingSchemaError.prototype, 'name', {
|
||||
value: 'MissingSchemaError'
|
||||
});
|
||||
|
||||
/*!
|
||||
* exports
|
||||
*/
|
||||
|
||||
module.exports = MissingSchemaError;
|
13
node_modules/mongoose/lib/error/mongooseError.js
generated
vendored
Normal file
13
node_modules/mongoose/lib/error/mongooseError.js
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
class MongooseError extends Error { }
|
||||
|
||||
Object.defineProperty(MongooseError.prototype, 'name', {
|
||||
value: 'MongooseError'
|
||||
});
|
||||
|
||||
module.exports = MongooseError;
|
45
node_modules/mongoose/lib/error/notFound.js
generated
vendored
Normal file
45
node_modules/mongoose/lib/error/notFound.js
generated
vendored
Normal file
|
@ -0,0 +1,45 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const MongooseError = require('./');
|
||||
const util = require('util');
|
||||
|
||||
class DocumentNotFoundError extends MongooseError {
|
||||
/**
|
||||
* OverwriteModel Error constructor.
|
||||
* @api private
|
||||
*/
|
||||
constructor(filter, model, numAffected, result) {
|
||||
let msg;
|
||||
const messages = MongooseError.messages;
|
||||
if (messages.DocumentNotFoundError != null) {
|
||||
msg = typeof messages.DocumentNotFoundError === 'function' ?
|
||||
messages.DocumentNotFoundError(filter, model) :
|
||||
messages.DocumentNotFoundError;
|
||||
} else {
|
||||
msg = 'No document found for query "' + util.inspect(filter) +
|
||||
'" on model "' + model + '"';
|
||||
}
|
||||
|
||||
super(msg);
|
||||
|
||||
this.result = result;
|
||||
this.numAffected = numAffected;
|
||||
this.filter = filter;
|
||||
// Backwards compat
|
||||
this.query = filter;
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(DocumentNotFoundError.prototype, 'name', {
|
||||
value: 'DocumentNotFoundError'
|
||||
});
|
||||
|
||||
/*!
|
||||
* exports
|
||||
*/
|
||||
|
||||
module.exports = DocumentNotFoundError;
|
30
node_modules/mongoose/lib/error/objectExpected.js
generated
vendored
Normal file
30
node_modules/mongoose/lib/error/objectExpected.js
generated
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('./');
|
||||
|
||||
|
||||
class ObjectExpectedError extends MongooseError {
|
||||
/**
|
||||
* Strict mode error constructor
|
||||
*
|
||||
* @param {string} type
|
||||
* @param {string} value
|
||||
* @api private
|
||||
*/
|
||||
constructor(path, val) {
|
||||
const typeDescription = Array.isArray(val) ? 'array' : 'primitive value';
|
||||
super('Tried to set nested object field `' + path +
|
||||
`\` to ${typeDescription} \`` + val + '`');
|
||||
this.path = path;
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(ObjectExpectedError.prototype, 'name', {
|
||||
value: 'ObjectExpectedError'
|
||||
});
|
||||
|
||||
module.exports = ObjectExpectedError;
|
30
node_modules/mongoose/lib/error/objectParameter.js
generated
vendored
Normal file
30
node_modules/mongoose/lib/error/objectParameter.js
generated
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('./');
|
||||
|
||||
class ObjectParameterError extends MongooseError {
|
||||
/**
|
||||
* Constructor for errors that happen when a parameter that's expected to be
|
||||
* an object isn't an object
|
||||
*
|
||||
* @param {Any} value
|
||||
* @param {String} paramName
|
||||
* @param {String} fnName
|
||||
* @api private
|
||||
*/
|
||||
constructor(value, paramName, fnName) {
|
||||
super('Parameter "' + paramName + '" to ' + fnName +
|
||||
'() must be an object, got ' + value.toString());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Object.defineProperty(ObjectParameterError.prototype, 'name', {
|
||||
value: 'ObjectParameterError'
|
||||
});
|
||||
|
||||
module.exports = ObjectParameterError;
|
30
node_modules/mongoose/lib/error/overwriteModel.js
generated
vendored
Normal file
30
node_modules/mongoose/lib/error/overwriteModel.js
generated
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('./');
|
||||
|
||||
|
||||
class OverwriteModelError extends MongooseError {
|
||||
/**
|
||||
* OverwriteModel Error constructor.
|
||||
* @param {String} name
|
||||
* @api private
|
||||
*/
|
||||
constructor(name) {
|
||||
super('Cannot overwrite `' + name + '` model once compiled.');
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(OverwriteModelError.prototype, 'name', {
|
||||
value: 'OverwriteModelError'
|
||||
});
|
||||
|
||||
/*!
|
||||
* exports
|
||||
*/
|
||||
|
||||
module.exports = OverwriteModelError;
|
30
node_modules/mongoose/lib/error/parallelSave.js
generated
vendored
Normal file
30
node_modules/mongoose/lib/error/parallelSave.js
generated
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const MongooseError = require('./');
|
||||
|
||||
class ParallelSaveError extends MongooseError {
|
||||
/**
|
||||
* ParallelSave Error constructor.
|
||||
*
|
||||
* @param {Document} doc
|
||||
* @api private
|
||||
*/
|
||||
constructor(doc) {
|
||||
const msg = 'Can\'t save() the same doc multiple times in parallel. Document: ';
|
||||
super(msg + doc._id);
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(ParallelSaveError.prototype, 'name', {
|
||||
value: 'ParallelSaveError'
|
||||
});
|
||||
|
||||
/*!
|
||||
* exports
|
||||
*/
|
||||
|
||||
module.exports = ParallelSaveError;
|
31
node_modules/mongoose/lib/error/parallelValidate.js
generated
vendored
Normal file
31
node_modules/mongoose/lib/error/parallelValidate.js
generated
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const MongooseError = require('./mongooseError');
|
||||
|
||||
|
||||
class ParallelValidateError extends MongooseError {
|
||||
/**
|
||||
* ParallelValidate Error constructor.
|
||||
*
|
||||
* @param {Document} doc
|
||||
* @api private
|
||||
*/
|
||||
constructor(doc) {
|
||||
const msg = 'Can\'t validate() the same doc multiple times in parallel. Document: ';
|
||||
super(msg + doc._id);
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(ParallelValidateError.prototype, 'name', {
|
||||
value: 'ParallelValidateError'
|
||||
});
|
||||
|
||||
/*!
|
||||
* exports
|
||||
*/
|
||||
|
||||
module.exports = ParallelValidateError;
|
61
node_modules/mongoose/lib/error/serverSelection.js
generated
vendored
Normal file
61
node_modules/mongoose/lib/error/serverSelection.js
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
|||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('./mongooseError');
|
||||
const allServersUnknown = require('../helpers/topology/allServersUnknown');
|
||||
const isAtlas = require('../helpers/topology/isAtlas');
|
||||
const isSSLError = require('../helpers/topology/isSSLError');
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
const atlasMessage = 'Could not connect to any servers in your MongoDB Atlas cluster. ' +
|
||||
'One common reason is that you\'re trying to access the database from ' +
|
||||
'an IP that isn\'t whitelisted. Make sure your current IP address is on your Atlas ' +
|
||||
'cluster\'s IP whitelist: https://www.mongodb.com/docs/atlas/security-whitelist/';
|
||||
|
||||
const sslMessage = 'Mongoose is connecting with SSL enabled, but the server is ' +
|
||||
'not accepting SSL connections. Please ensure that the MongoDB server you are ' +
|
||||
'connecting to is configured to accept SSL connections. Learn more: ' +
|
||||
'https://mongoosejs.com/docs/tutorials/ssl.html';
|
||||
|
||||
class MongooseServerSelectionError extends MongooseError {
|
||||
/**
|
||||
* MongooseServerSelectionError constructor
|
||||
*
|
||||
* @api private
|
||||
*/
|
||||
assimilateError(err) {
|
||||
const reason = err.reason;
|
||||
// Special message for a case that is likely due to IP whitelisting issues.
|
||||
const isAtlasWhitelistError = isAtlas(reason) &&
|
||||
allServersUnknown(reason) &&
|
||||
err.message.indexOf('bad auth') === -1 &&
|
||||
err.message.indexOf('Authentication failed') === -1;
|
||||
|
||||
if (isAtlasWhitelistError) {
|
||||
this.message = atlasMessage;
|
||||
} else if (isSSLError(reason)) {
|
||||
this.message = sslMessage;
|
||||
} else {
|
||||
this.message = err.message;
|
||||
}
|
||||
for (const key in err) {
|
||||
if (key !== 'name') {
|
||||
this[key] = err[key];
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(MongooseServerSelectionError.prototype, 'name', {
|
||||
value: 'MongooseServerSelectionError'
|
||||
});
|
||||
|
||||
module.exports = MongooseServerSelectionError;
|
101
node_modules/mongoose/lib/error/setOptionError.js
generated
vendored
Normal file
101
node_modules/mongoose/lib/error/setOptionError.js
generated
vendored
Normal file
|
@ -0,0 +1,101 @@
|
|||
/*!
|
||||
* Module requirements
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('./mongooseError');
|
||||
const util = require('util');
|
||||
const combinePathErrors = require('../helpers/error/combinePathErrors');
|
||||
|
||||
class SetOptionError extends MongooseError {
|
||||
/**
|
||||
* Mongoose.set Error
|
||||
*
|
||||
* @api private
|
||||
* @inherits MongooseError
|
||||
*/
|
||||
constructor() {
|
||||
super('');
|
||||
|
||||
this.errors = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Console.log helper
|
||||
*/
|
||||
toString() {
|
||||
return combinePathErrors(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* inspect helper
|
||||
* @api private
|
||||
*/
|
||||
inspect() {
|
||||
return Object.assign(new Error(this.message), this);
|
||||
}
|
||||
|
||||
/**
|
||||
* add message
|
||||
* @param {String} key
|
||||
* @param {String|Error} error
|
||||
* @api private
|
||||
*/
|
||||
addError(key, error) {
|
||||
if (error instanceof SetOptionError) {
|
||||
const { errors } = error;
|
||||
for (const optionKey of Object.keys(errors)) {
|
||||
this.addError(optionKey, errors[optionKey]);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
this.errors[key] = error;
|
||||
this.message = combinePathErrors(this);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (util.inspect.custom) {
|
||||
// Avoid Node deprecation warning DEP0079
|
||||
SetOptionError.prototype[util.inspect.custom] = SetOptionError.prototype.inspect;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper for JSON.stringify
|
||||
* Ensure `name` and `message` show up in toJSON output re: gh-9847
|
||||
* @api private
|
||||
*/
|
||||
Object.defineProperty(SetOptionError.prototype, 'toJSON', {
|
||||
enumerable: false,
|
||||
writable: false,
|
||||
configurable: true,
|
||||
value: function() {
|
||||
return Object.assign({}, this, { name: this.name, message: this.message });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
Object.defineProperty(SetOptionError.prototype, 'name', {
|
||||
value: 'SetOptionError'
|
||||
});
|
||||
|
||||
class SetOptionInnerError extends MongooseError {
|
||||
/**
|
||||
* Error for the "errors" array in "SetOptionError" with consistent message
|
||||
* @param {String} key
|
||||
*/
|
||||
constructor(key) {
|
||||
super(`"${key}" is not a valid option to set`);
|
||||
}
|
||||
}
|
||||
|
||||
SetOptionError.SetOptionInnerError = SetOptionInnerError;
|
||||
|
||||
/*!
|
||||
* Module exports
|
||||
*/
|
||||
|
||||
module.exports = SetOptionError;
|
33
node_modules/mongoose/lib/error/strict.js
generated
vendored
Normal file
33
node_modules/mongoose/lib/error/strict.js
generated
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('./');
|
||||
|
||||
|
||||
class StrictModeError extends MongooseError {
|
||||
/**
|
||||
* Strict mode error constructor
|
||||
*
|
||||
* @param {String} path
|
||||
* @param {String} [msg]
|
||||
* @param {Boolean} [immutable]
|
||||
* @inherits MongooseError
|
||||
* @api private
|
||||
*/
|
||||
constructor(path, msg, immutable) {
|
||||
msg = msg || 'Field `' + path + '` is not in schema and strict ' +
|
||||
'mode is set to throw.';
|
||||
super(msg);
|
||||
this.isImmutableError = !!immutable;
|
||||
this.path = path;
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(StrictModeError.prototype, 'name', {
|
||||
value: 'StrictModeError'
|
||||
});
|
||||
|
||||
module.exports = StrictModeError;
|
29
node_modules/mongoose/lib/error/strictPopulate.js
generated
vendored
Normal file
29
node_modules/mongoose/lib/error/strictPopulate.js
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('./');
|
||||
|
||||
class StrictPopulateError extends MongooseError {
|
||||
/**
|
||||
* Strict mode error constructor
|
||||
*
|
||||
* @param {String} path
|
||||
* @param {String} [msg]
|
||||
* @inherits MongooseError
|
||||
* @api private
|
||||
*/
|
||||
constructor(path, msg) {
|
||||
msg = msg || 'Cannot populate path `' + path + '` because it is not in your schema. ' + 'Set the `strictPopulate` option to false to override.';
|
||||
super(msg);
|
||||
this.path = path;
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(StrictPopulateError.prototype, 'name', {
|
||||
value: 'StrictPopulateError'
|
||||
});
|
||||
|
||||
module.exports = StrictPopulateError;
|
30
node_modules/mongoose/lib/error/syncIndexes.js
generated
vendored
Normal file
30
node_modules/mongoose/lib/error/syncIndexes.js
generated
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const MongooseError = require('./mongooseError');
|
||||
|
||||
/**
|
||||
* SyncIndexes Error constructor.
|
||||
*
|
||||
* @param {String} message
|
||||
* @param {String} errorsMap
|
||||
* @inherits MongooseError
|
||||
* @api private
|
||||
*/
|
||||
|
||||
class SyncIndexesError extends MongooseError {
|
||||
constructor(message, errorsMap) {
|
||||
super(message);
|
||||
this.errors = errorsMap;
|
||||
}
|
||||
}
|
||||
|
||||
Object.defineProperty(SyncIndexesError.prototype, 'name', {
|
||||
value: 'SyncIndexesError'
|
||||
});
|
||||
|
||||
|
||||
module.exports = SyncIndexesError;
|
103
node_modules/mongoose/lib/error/validation.js
generated
vendored
Normal file
103
node_modules/mongoose/lib/error/validation.js
generated
vendored
Normal file
|
@ -0,0 +1,103 @@
|
|||
/*!
|
||||
* Module requirements
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('./mongooseError');
|
||||
const getConstructorName = require('../helpers/getConstructorName');
|
||||
const util = require('util');
|
||||
const combinePathErrors = require('../helpers/error/combinePathErrors');
|
||||
|
||||
class ValidationError extends MongooseError {
|
||||
/**
|
||||
* Document Validation Error
|
||||
*
|
||||
* @api private
|
||||
* @param {Document} [instance]
|
||||
* @inherits MongooseError
|
||||
*/
|
||||
constructor(instance) {
|
||||
let _message;
|
||||
if (getConstructorName(instance) === 'model') {
|
||||
_message = instance.constructor.modelName + ' validation failed';
|
||||
} else {
|
||||
_message = 'Validation failed';
|
||||
}
|
||||
|
||||
super(_message);
|
||||
|
||||
this.errors = {};
|
||||
this._message = _message;
|
||||
|
||||
if (instance) {
|
||||
instance.$errors = this.errors;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Console.log helper
|
||||
*/
|
||||
toString() {
|
||||
return this.name + ': ' + combinePathErrors(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* inspect helper
|
||||
* @api private
|
||||
*/
|
||||
inspect() {
|
||||
return Object.assign(new Error(this.message), this);
|
||||
}
|
||||
|
||||
/**
|
||||
* add message
|
||||
* @param {String} path
|
||||
* @param {String|Error} error
|
||||
* @api private
|
||||
*/
|
||||
addError(path, error) {
|
||||
if (error instanceof ValidationError) {
|
||||
const { errors } = error;
|
||||
for (const errorPath of Object.keys(errors)) {
|
||||
this.addError(`${path}.${errorPath}`, errors[errorPath]);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
this.errors[path] = error;
|
||||
this.message = this._message + ': ' + combinePathErrors(this);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (util.inspect.custom) {
|
||||
// Avoid Node deprecation warning DEP0079
|
||||
ValidationError.prototype[util.inspect.custom] = ValidationError.prototype.inspect;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper for JSON.stringify
|
||||
* Ensure `name` and `message` show up in toJSON output re: gh-9847
|
||||
* @api private
|
||||
*/
|
||||
Object.defineProperty(ValidationError.prototype, 'toJSON', {
|
||||
enumerable: false,
|
||||
writable: false,
|
||||
configurable: true,
|
||||
value: function() {
|
||||
return Object.assign({}, this, { name: this.name, message: this.message });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
Object.defineProperty(ValidationError.prototype, 'name', {
|
||||
value: 'ValidationError'
|
||||
});
|
||||
|
||||
/*!
|
||||
* Module exports
|
||||
*/
|
||||
|
||||
module.exports = ValidationError;
|
99
node_modules/mongoose/lib/error/validator.js
generated
vendored
Normal file
99
node_modules/mongoose/lib/error/validator.js
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const MongooseError = require('./');
|
||||
|
||||
|
||||
class ValidatorError extends MongooseError {
|
||||
/**
|
||||
* Schema validator error
|
||||
*
|
||||
* @param {Object} properties
|
||||
* @param {Document} doc
|
||||
* @api private
|
||||
*/
|
||||
constructor(properties, doc) {
|
||||
let msg = properties.message;
|
||||
if (!msg) {
|
||||
msg = MongooseError.messages.general.default;
|
||||
}
|
||||
|
||||
const message = formatMessage(msg, properties, doc);
|
||||
super(message);
|
||||
|
||||
properties = Object.assign({}, properties, { message: message });
|
||||
this.properties = properties;
|
||||
this.kind = properties.type;
|
||||
this.path = properties.path;
|
||||
this.value = properties.value;
|
||||
this.reason = properties.reason;
|
||||
}
|
||||
|
||||
/**
|
||||
* toString helper
|
||||
* TODO remove? This defaults to `${this.name}: ${this.message}`
|
||||
* @api private
|
||||
*/
|
||||
toString() {
|
||||
return this.message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure `name` and `message` show up in toJSON output re: gh-9296
|
||||
* @api private
|
||||
*/
|
||||
|
||||
toJSON() {
|
||||
return Object.assign({ name: this.name, message: this.message }, this);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Object.defineProperty(ValidatorError.prototype, 'name', {
|
||||
value: 'ValidatorError'
|
||||
});
|
||||
|
||||
/**
|
||||
* The object used to define this validator. Not enumerable to hide
|
||||
* it from `require('util').inspect()` output re: gh-3925
|
||||
* @api private
|
||||
*/
|
||||
|
||||
Object.defineProperty(ValidatorError.prototype, 'properties', {
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
value: null
|
||||
});
|
||||
|
||||
// Exposed for testing
|
||||
ValidatorError.prototype.formatMessage = formatMessage;
|
||||
|
||||
/**
|
||||
* Formats error messages
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function formatMessage(msg, properties, doc) {
|
||||
if (typeof msg === 'function') {
|
||||
return msg(properties, doc);
|
||||
}
|
||||
|
||||
const propertyNames = Object.keys(properties);
|
||||
for (const propertyName of propertyNames) {
|
||||
if (propertyName === 'message') {
|
||||
continue;
|
||||
}
|
||||
msg = msg.replace('{' + propertyName.toUpperCase() + '}', properties[propertyName]);
|
||||
}
|
||||
|
||||
return msg;
|
||||
}
|
||||
|
||||
/*!
|
||||
* exports
|
||||
*/
|
||||
|
||||
module.exports = ValidatorError;
|
36
node_modules/mongoose/lib/error/version.js
generated
vendored
Normal file
36
node_modules/mongoose/lib/error/version.js
generated
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const MongooseError = require('./');
|
||||
|
||||
class VersionError extends MongooseError {
|
||||
/**
|
||||
* Version Error constructor.
|
||||
*
|
||||
* @param {Document} doc
|
||||
* @param {Number} currentVersion
|
||||
* @param {Array<String>} modifiedPaths
|
||||
* @api private
|
||||
*/
|
||||
constructor(doc, currentVersion, modifiedPaths) {
|
||||
const modifiedPathsStr = modifiedPaths.join(', ');
|
||||
super('No matching document found for id "' + doc._id +
|
||||
'" version ' + currentVersion + ' modifiedPaths "' + modifiedPathsStr + '"');
|
||||
this.version = currentVersion;
|
||||
this.modifiedPaths = modifiedPaths;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Object.defineProperty(VersionError.prototype, 'name', {
|
||||
value: 'VersionError'
|
||||
});
|
||||
|
||||
/*!
|
||||
* exports
|
||||
*/
|
||||
|
||||
module.exports = VersionError;
|
39
node_modules/mongoose/lib/helpers/aggregate/prepareDiscriminatorPipeline.js
generated
vendored
Normal file
39
node_modules/mongoose/lib/helpers/aggregate/prepareDiscriminatorPipeline.js
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = function prepareDiscriminatorPipeline(pipeline, schema, prefix) {
|
||||
const discriminatorMapping = schema && schema.discriminatorMapping;
|
||||
prefix = prefix || '';
|
||||
|
||||
if (discriminatorMapping && !discriminatorMapping.isRoot) {
|
||||
const originalPipeline = pipeline;
|
||||
const filterKey = (prefix.length > 0 ? prefix + '.' : prefix) + discriminatorMapping.key;
|
||||
const discriminatorValue = discriminatorMapping.value;
|
||||
|
||||
// If the first pipeline stage is a match and it doesn't specify a `__t`
|
||||
// key, add the discriminator key to it. This allows for potential
|
||||
// aggregation query optimizations not to be disturbed by this feature.
|
||||
if (originalPipeline[0] != null &&
|
||||
originalPipeline[0].$match &&
|
||||
(originalPipeline[0].$match[filterKey] === undefined || originalPipeline[0].$match[filterKey] === discriminatorValue)) {
|
||||
originalPipeline[0].$match[filterKey] = discriminatorValue;
|
||||
// `originalPipeline` is a ref, so there's no need for
|
||||
// aggregate._pipeline = originalPipeline
|
||||
} else if (originalPipeline[0] != null && originalPipeline[0].$geoNear) {
|
||||
originalPipeline[0].$geoNear.query =
|
||||
originalPipeline[0].$geoNear.query || {};
|
||||
originalPipeline[0].$geoNear.query[filterKey] = discriminatorValue;
|
||||
} else if (originalPipeline[0] != null && originalPipeline[0].$search) {
|
||||
if (originalPipeline[1] && originalPipeline[1].$match != null) {
|
||||
originalPipeline[1].$match[filterKey] = originalPipeline[1].$match[filterKey] || discriminatorValue;
|
||||
} else {
|
||||
const match = {};
|
||||
match[filterKey] = discriminatorValue;
|
||||
originalPipeline.splice(1, 0, { $match: match });
|
||||
}
|
||||
} else {
|
||||
const match = {};
|
||||
match[filterKey] = discriminatorValue;
|
||||
originalPipeline.unshift({ $match: match });
|
||||
}
|
||||
}
|
||||
};
|
50
node_modules/mongoose/lib/helpers/aggregate/stringifyFunctionOperators.js
generated
vendored
Normal file
50
node_modules/mongoose/lib/helpers/aggregate/stringifyFunctionOperators.js
generated
vendored
Normal file
|
@ -0,0 +1,50 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = function stringifyFunctionOperators(pipeline) {
|
||||
if (!Array.isArray(pipeline)) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const stage of pipeline) {
|
||||
if (stage == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const canHaveAccumulator = stage.$group || stage.$bucket || stage.$bucketAuto;
|
||||
if (canHaveAccumulator != null) {
|
||||
for (const key of Object.keys(canHaveAccumulator)) {
|
||||
handleAccumulator(canHaveAccumulator[key]);
|
||||
}
|
||||
}
|
||||
|
||||
const stageType = Object.keys(stage)[0];
|
||||
if (stageType && typeof stage[stageType] === 'object') {
|
||||
const stageOptions = stage[stageType];
|
||||
for (const key of Object.keys(stageOptions)) {
|
||||
if (stageOptions[key] != null &&
|
||||
stageOptions[key].$function != null &&
|
||||
typeof stageOptions[key].$function.body === 'function') {
|
||||
stageOptions[key].$function.body = stageOptions[key].$function.body.toString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (stage.$facet != null) {
|
||||
for (const key of Object.keys(stage.$facet)) {
|
||||
stringifyFunctionOperators(stage.$facet[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function handleAccumulator(operator) {
|
||||
if (operator == null || operator.$accumulator == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const key of ['init', 'accumulate', 'merge', 'finalize']) {
|
||||
if (typeof operator.$accumulator[key] === 'function') {
|
||||
operator.$accumulator[key] = String(operator.$accumulator[key]);
|
||||
}
|
||||
}
|
||||
}
|
33
node_modules/mongoose/lib/helpers/arrayDepth.js
generated
vendored
Normal file
33
node_modules/mongoose/lib/helpers/arrayDepth.js
generated
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = arrayDepth;
|
||||
|
||||
function arrayDepth(arr) {
|
||||
if (!Array.isArray(arr)) {
|
||||
return { min: 0, max: 0, containsNonArrayItem: true };
|
||||
}
|
||||
if (arr.length === 0) {
|
||||
return { min: 1, max: 1, containsNonArrayItem: false };
|
||||
}
|
||||
if (arr.length === 1 && !Array.isArray(arr[0])) {
|
||||
return { min: 1, max: 1, containsNonArrayItem: false };
|
||||
}
|
||||
|
||||
const res = arrayDepth(arr[0]);
|
||||
|
||||
for (let i = 1; i < arr.length; ++i) {
|
||||
const _res = arrayDepth(arr[i]);
|
||||
if (_res.min < res.min) {
|
||||
res.min = _res.min;
|
||||
}
|
||||
if (_res.max > res.max) {
|
||||
res.max = _res.max;
|
||||
}
|
||||
res.containsNonArrayItem = res.containsNonArrayItem || _res.containsNonArrayItem;
|
||||
}
|
||||
|
||||
res.min = res.min + 1;
|
||||
res.max = res.max + 1;
|
||||
|
||||
return res;
|
||||
}
|
177
node_modules/mongoose/lib/helpers/clone.js
generated
vendored
Normal file
177
node_modules/mongoose/lib/helpers/clone.js
generated
vendored
Normal file
|
@ -0,0 +1,177 @@
|
|||
'use strict';
|
||||
|
||||
const Decimal = require('../types/decimal128');
|
||||
const ObjectId = require('../types/objectid');
|
||||
const specialProperties = require('./specialProperties');
|
||||
const isMongooseObject = require('./isMongooseObject');
|
||||
const getFunctionName = require('./getFunctionName');
|
||||
const isBsonType = require('./isBsonType');
|
||||
const isMongooseArray = require('../types/array/isMongooseArray').isMongooseArray;
|
||||
const isObject = require('./isObject');
|
||||
const isPOJO = require('./isPOJO');
|
||||
const symbols = require('./symbols');
|
||||
const trustedSymbol = require('./query/trusted').trustedSymbol;
|
||||
|
||||
/**
|
||||
* Object clone with Mongoose natives support.
|
||||
*
|
||||
* If options.minimize is true, creates a minimal data object. Empty objects and undefined values will not be cloned. This makes the data payload sent to MongoDB as small as possible.
|
||||
*
|
||||
* Functions are never cloned.
|
||||
*
|
||||
* @param {Object} obj the object to clone
|
||||
* @param {Object} options
|
||||
* @param {Boolean} isArrayChild true if cloning immediately underneath an array. Special case for minimize.
|
||||
* @return {Object} the cloned object
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function clone(obj, options, isArrayChild) {
|
||||
if (obj == null) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return cloneArray(isMongooseArray(obj) ? obj.__array : obj, options);
|
||||
}
|
||||
|
||||
if (isMongooseObject(obj)) {
|
||||
// Single nested subdocs should apply getters later in `applyGetters()`
|
||||
// when calling `toObject()`. See gh-7442, gh-8295
|
||||
if (options && options._skipSingleNestedGetters && obj.$isSingleNested) {
|
||||
options = Object.assign({}, options, { getters: false });
|
||||
}
|
||||
const isSingleNested = obj.$isSingleNested;
|
||||
|
||||
if (isPOJO(obj) && obj.$__ != null && obj._doc != null) {
|
||||
return obj._doc;
|
||||
}
|
||||
|
||||
let ret;
|
||||
if (options && options.json && typeof obj.toJSON === 'function') {
|
||||
ret = obj.toJSON(options);
|
||||
} else {
|
||||
ret = obj.toObject(options);
|
||||
}
|
||||
|
||||
if (options && options.minimize && isSingleNested && Object.keys(ret).length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
const objConstructor = obj.constructor;
|
||||
|
||||
if (objConstructor) {
|
||||
switch (getFunctionName(objConstructor)) {
|
||||
case 'Object':
|
||||
return cloneObject(obj, options, isArrayChild);
|
||||
case 'Date':
|
||||
return new objConstructor(+obj);
|
||||
case 'RegExp':
|
||||
return cloneRegExp(obj);
|
||||
default:
|
||||
// ignore
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (isBsonType(obj, 'ObjectId')) {
|
||||
return new ObjectId(obj.id);
|
||||
}
|
||||
|
||||
if (isBsonType(obj, 'Decimal128')) {
|
||||
if (options && options.flattenDecimals) {
|
||||
return obj.toJSON();
|
||||
}
|
||||
return Decimal.fromString(obj.toString());
|
||||
}
|
||||
|
||||
// object created with Object.create(null)
|
||||
if (!objConstructor && isObject(obj)) {
|
||||
return cloneObject(obj, options, isArrayChild);
|
||||
}
|
||||
|
||||
if (typeof obj === 'object' && obj[symbols.schemaTypeSymbol]) {
|
||||
return obj.clone();
|
||||
}
|
||||
|
||||
// If we're cloning this object to go into a MongoDB command,
|
||||
// and there's a `toBSON()` function, assume this object will be
|
||||
// stored as a primitive in MongoDB and doesn't need to be cloned.
|
||||
if (options && options.bson && typeof obj.toBSON === 'function') {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (typeof obj.valueOf === 'function') {
|
||||
return obj.valueOf();
|
||||
}
|
||||
|
||||
return cloneObject(obj, options, isArrayChild);
|
||||
}
|
||||
module.exports = clone;
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function cloneObject(obj, options, isArrayChild) {
|
||||
const minimize = options && options.minimize;
|
||||
const omitUndefined = options && options.omitUndefined;
|
||||
const seen = options && options._seen;
|
||||
const ret = {};
|
||||
let hasKeys;
|
||||
|
||||
if (seen && seen.has(obj)) {
|
||||
return seen.get(obj);
|
||||
} else if (seen) {
|
||||
seen.set(obj, ret);
|
||||
}
|
||||
if (trustedSymbol in obj) {
|
||||
ret[trustedSymbol] = obj[trustedSymbol];
|
||||
}
|
||||
|
||||
let i = 0;
|
||||
let key = '';
|
||||
const keys = Object.keys(obj);
|
||||
const len = keys.length;
|
||||
|
||||
for (i = 0; i < len; ++i) {
|
||||
if (specialProperties.has(key = keys[i])) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Don't pass `isArrayChild` down
|
||||
const val = clone(obj[key], options, false);
|
||||
|
||||
if ((minimize === false || omitUndefined) && typeof val === 'undefined') {
|
||||
delete ret[key];
|
||||
} else if (minimize !== true || (typeof val !== 'undefined')) {
|
||||
hasKeys || (hasKeys = true);
|
||||
ret[key] = val;
|
||||
}
|
||||
}
|
||||
|
||||
return minimize && !isArrayChild ? hasKeys && ret : ret;
|
||||
}
|
||||
|
||||
function cloneArray(arr, options) {
|
||||
let i = 0;
|
||||
const len = arr.length;
|
||||
const ret = new Array(len);
|
||||
for (i = 0; i < len; ++i) {
|
||||
ret[i] = clone(arr[i], options, true);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
function cloneRegExp(regexp) {
|
||||
const ret = new RegExp(regexp.source, regexp.flags);
|
||||
|
||||
if (ret.lastIndex !== regexp.lastIndex) {
|
||||
ret.lastIndex = regexp.lastIndex;
|
||||
}
|
||||
return ret;
|
||||
}
|
127
node_modules/mongoose/lib/helpers/common.js
generated
vendored
Normal file
127
node_modules/mongoose/lib/helpers/common.js
generated
vendored
Normal file
|
@ -0,0 +1,127 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const Binary = require('bson').Binary;
|
||||
const isBsonType = require('./isBsonType');
|
||||
const isMongooseObject = require('./isMongooseObject');
|
||||
const MongooseError = require('../error');
|
||||
const util = require('util');
|
||||
|
||||
exports.flatten = flatten;
|
||||
exports.modifiedPaths = modifiedPaths;
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function flatten(update, path, options, schema) {
|
||||
let keys;
|
||||
if (update && isMongooseObject(update) && !Buffer.isBuffer(update)) {
|
||||
keys = Object.keys(update.toObject({ transform: false, virtuals: false }) || {});
|
||||
} else {
|
||||
keys = Object.keys(update || {});
|
||||
}
|
||||
|
||||
const numKeys = keys.length;
|
||||
const result = {};
|
||||
path = path ? path + '.' : '';
|
||||
|
||||
for (let i = 0; i < numKeys; ++i) {
|
||||
const key = keys[i];
|
||||
const val = update[key];
|
||||
result[path + key] = val;
|
||||
|
||||
// Avoid going into mixed paths if schema is specified
|
||||
const keySchema = schema && schema.path && schema.path(path + key);
|
||||
const isNested = schema && schema.nested && schema.nested[path + key];
|
||||
if (keySchema && keySchema.instance === 'Mixed') continue;
|
||||
|
||||
if (shouldFlatten(val)) {
|
||||
if (options && options.skipArrays && Array.isArray(val)) {
|
||||
continue;
|
||||
}
|
||||
const flat = flatten(val, path + key, options, schema);
|
||||
for (const k in flat) {
|
||||
result[k] = flat[k];
|
||||
}
|
||||
if (Array.isArray(val)) {
|
||||
result[path + key] = val;
|
||||
}
|
||||
}
|
||||
|
||||
if (isNested) {
|
||||
const paths = Object.keys(schema.paths);
|
||||
for (const p of paths) {
|
||||
if (p.startsWith(path + key + '.') && !result.hasOwnProperty(p)) {
|
||||
result[p] = void 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function modifiedPaths(update, path, result, recursion = null) {
|
||||
if (update == null || typeof update !== 'object') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (recursion == null) {
|
||||
recursion = {
|
||||
raw: { update, path },
|
||||
trace: new WeakSet()
|
||||
};
|
||||
}
|
||||
|
||||
if (recursion.trace.has(update)) {
|
||||
throw new MongooseError(`a circular reference in the update value, updateValue:
|
||||
${util.inspect(recursion.raw.update, { showHidden: false, depth: 1 })}
|
||||
updatePath: '${recursion.raw.path}'`);
|
||||
}
|
||||
recursion.trace.add(update);
|
||||
|
||||
const keys = Object.keys(update || {});
|
||||
const numKeys = keys.length;
|
||||
result = result || {};
|
||||
path = path ? path + '.' : '';
|
||||
|
||||
for (let i = 0; i < numKeys; ++i) {
|
||||
const key = keys[i];
|
||||
let val = update[key];
|
||||
|
||||
const _path = path + key;
|
||||
result[_path] = true;
|
||||
if (!Buffer.isBuffer(val) && isMongooseObject(val)) {
|
||||
val = val.toObject({ transform: false, virtuals: false });
|
||||
}
|
||||
if (shouldFlatten(val)) {
|
||||
modifiedPaths(val, path + key, result, recursion);
|
||||
}
|
||||
}
|
||||
recursion.trace.delete(update);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function shouldFlatten(val) {
|
||||
return val &&
|
||||
typeof val === 'object' &&
|
||||
!(val instanceof Date) &&
|
||||
!isBsonType(val, 'ObjectId') &&
|
||||
(!Array.isArray(val) || val.length !== 0) &&
|
||||
!(val instanceof Buffer) &&
|
||||
!isBsonType(val, 'Decimal128') &&
|
||||
!(val instanceof Binary);
|
||||
}
|
225
node_modules/mongoose/lib/helpers/cursor/eachAsync.js
generated
vendored
Normal file
225
node_modules/mongoose/lib/helpers/cursor/eachAsync.js
generated
vendored
Normal file
|
@ -0,0 +1,225 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
const EachAsyncMultiError = require('../../error/eachAsyncMultiError');
|
||||
const immediate = require('../immediate');
|
||||
|
||||
/**
|
||||
* Execute `fn` for every document in the cursor. If `fn` returns a promise,
|
||||
* will wait for the promise to resolve before iterating on to the next one.
|
||||
* Returns a promise that resolves when done.
|
||||
*
|
||||
* @param {Function} next the thunk to call to get the next document
|
||||
* @param {Function} fn
|
||||
* @param {Object} options
|
||||
* @param {Number} [options.batchSize=null] if set, Mongoose will call `fn` with an array of at most `batchSize` documents, instead of a single document
|
||||
* @param {Number} [options.parallel=1] maximum number of `fn` calls that Mongoose will run in parallel
|
||||
* @param {AbortSignal} [options.signal] allow cancelling this eachAsync(). Once the abort signal is fired, `eachAsync()` will immediately fulfill the returned promise (or call the callback) and not fetch any more documents.
|
||||
* @return {Promise}
|
||||
* @api public
|
||||
* @method eachAsync
|
||||
*/
|
||||
|
||||
module.exports = async function eachAsync(next, fn, options) {
|
||||
const parallel = options.parallel || 1;
|
||||
const batchSize = options.batchSize;
|
||||
const signal = options.signal;
|
||||
const continueOnError = options.continueOnError;
|
||||
const aggregatedErrors = [];
|
||||
const enqueue = asyncQueue();
|
||||
|
||||
let aborted = false;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (signal != null) {
|
||||
if (signal.aborted) {
|
||||
return resolve(null);
|
||||
}
|
||||
|
||||
signal.addEventListener('abort', () => {
|
||||
aborted = true;
|
||||
return resolve(null);
|
||||
}, { once: true });
|
||||
}
|
||||
|
||||
if (batchSize != null) {
|
||||
if (typeof batchSize !== 'number') {
|
||||
throw new TypeError('batchSize must be a number');
|
||||
} else if (!Number.isInteger(batchSize)) {
|
||||
throw new TypeError('batchSize must be an integer');
|
||||
} else if (batchSize < 1) {
|
||||
throw new TypeError('batchSize must be at least 1');
|
||||
}
|
||||
}
|
||||
|
||||
iterate((err, res) => {
|
||||
if (err != null) {
|
||||
return reject(err);
|
||||
}
|
||||
resolve(res);
|
||||
});
|
||||
});
|
||||
|
||||
function iterate(finalCallback) {
|
||||
let handleResultsInProgress = 0;
|
||||
let currentDocumentIndex = 0;
|
||||
|
||||
let error = null;
|
||||
for (let i = 0; i < parallel; ++i) {
|
||||
enqueue(createFetch());
|
||||
}
|
||||
|
||||
function createFetch() {
|
||||
let documentsBatch = [];
|
||||
let drained = false;
|
||||
|
||||
return fetch;
|
||||
|
||||
function fetch(done) {
|
||||
if (drained || aborted) {
|
||||
return done();
|
||||
} else if (error) {
|
||||
return done();
|
||||
}
|
||||
|
||||
next(function(err, doc) {
|
||||
if (error != null) {
|
||||
return done();
|
||||
}
|
||||
if (err != null) {
|
||||
if (err.name === 'MongoCursorExhaustedError') {
|
||||
// We may end up calling `next()` multiple times on an exhausted
|
||||
// cursor, which leads to an error. In case cursor is exhausted,
|
||||
// just treat it as if the cursor returned no document, which is
|
||||
// how a cursor indicates it is exhausted.
|
||||
doc = null;
|
||||
} else if (continueOnError) {
|
||||
aggregatedErrors.push(err);
|
||||
} else {
|
||||
error = err;
|
||||
finalCallback(err);
|
||||
return done();
|
||||
}
|
||||
}
|
||||
if (doc == null) {
|
||||
drained = true;
|
||||
if (handleResultsInProgress <= 0) {
|
||||
const finalErr = continueOnError ?
|
||||
createEachAsyncMultiError(aggregatedErrors) :
|
||||
error;
|
||||
|
||||
finalCallback(finalErr);
|
||||
} else if (batchSize && documentsBatch.length) {
|
||||
handleNextResult(documentsBatch, currentDocumentIndex++, handleNextResultCallBack);
|
||||
}
|
||||
return done();
|
||||
}
|
||||
|
||||
++handleResultsInProgress;
|
||||
|
||||
// Kick off the subsequent `next()` before handling the result, but
|
||||
// make sure we know that we still have a result to handle re: #8422
|
||||
immediate(() => done());
|
||||
|
||||
if (batchSize) {
|
||||
documentsBatch.push(doc);
|
||||
}
|
||||
|
||||
// If the current documents size is less than the provided batch size don't process the documents yet
|
||||
if (batchSize && documentsBatch.length !== batchSize) {
|
||||
immediate(() => enqueue(fetch));
|
||||
return;
|
||||
}
|
||||
|
||||
const docsToProcess = batchSize ? documentsBatch : doc;
|
||||
|
||||
function handleNextResultCallBack(err) {
|
||||
if (batchSize) {
|
||||
handleResultsInProgress -= documentsBatch.length;
|
||||
documentsBatch = [];
|
||||
} else {
|
||||
--handleResultsInProgress;
|
||||
}
|
||||
if (err != null) {
|
||||
if (continueOnError) {
|
||||
aggregatedErrors.push(err);
|
||||
} else {
|
||||
error = err;
|
||||
return finalCallback(err);
|
||||
}
|
||||
}
|
||||
if ((drained || aborted) && handleResultsInProgress <= 0) {
|
||||
const finalErr = continueOnError ?
|
||||
createEachAsyncMultiError(aggregatedErrors) :
|
||||
error;
|
||||
return finalCallback(finalErr);
|
||||
}
|
||||
|
||||
immediate(() => enqueue(fetch));
|
||||
}
|
||||
|
||||
handleNextResult(docsToProcess, currentDocumentIndex++, handleNextResultCallBack);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function handleNextResult(doc, i, callback) {
|
||||
let maybePromise;
|
||||
try {
|
||||
maybePromise = fn(doc, i);
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
if (maybePromise && typeof maybePromise.then === 'function') {
|
||||
maybePromise.then(
|
||||
function() { callback(null); },
|
||||
function(error) {
|
||||
callback(error || new Error('`eachAsync()` promise rejected without error'));
|
||||
});
|
||||
} else {
|
||||
callback(null);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// `next()` can only execute one at a time, so make sure we always execute
|
||||
// `next()` in series, while still allowing multiple `fn()` instances to run
|
||||
// in parallel.
|
||||
function asyncQueue() {
|
||||
const _queue = [];
|
||||
let inProgress = null;
|
||||
let id = 0;
|
||||
|
||||
return function enqueue(fn) {
|
||||
if (
|
||||
inProgress === null &&
|
||||
_queue.length === 0
|
||||
) {
|
||||
inProgress = id++;
|
||||
return fn(_step);
|
||||
}
|
||||
_queue.push(fn);
|
||||
};
|
||||
|
||||
function _step() {
|
||||
if (_queue.length !== 0) {
|
||||
inProgress = id++;
|
||||
const fn = _queue.shift();
|
||||
fn(_step);
|
||||
} else {
|
||||
inProgress = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createEachAsyncMultiError(aggregatedErrors) {
|
||||
if (aggregatedErrors.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return new EachAsyncMultiError(aggregatedErrors);
|
||||
}
|
16
node_modules/mongoose/lib/helpers/discriminator/areDiscriminatorValuesEqual.js
generated
vendored
Normal file
16
node_modules/mongoose/lib/helpers/discriminator/areDiscriminatorValuesEqual.js
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
'use strict';
|
||||
|
||||
const isBsonType = require('../isBsonType');
|
||||
|
||||
module.exports = function areDiscriminatorValuesEqual(a, b) {
|
||||
if (typeof a === 'string' && typeof b === 'string') {
|
||||
return a === b;
|
||||
}
|
||||
if (typeof a === 'number' && typeof b === 'number') {
|
||||
return a === b;
|
||||
}
|
||||
if (isBsonType(a, 'ObjectId') && isBsonType(b, 'ObjectId')) {
|
||||
return a.toString() === b.toString();
|
||||
}
|
||||
return false;
|
||||
};
|
12
node_modules/mongoose/lib/helpers/discriminator/checkEmbeddedDiscriminatorKeyProjection.js
generated
vendored
Normal file
12
node_modules/mongoose/lib/helpers/discriminator/checkEmbeddedDiscriminatorKeyProjection.js
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = function checkEmbeddedDiscriminatorKeyProjection(userProjection, path, schema, selected, addedPaths) {
|
||||
const userProjectedInPath = Object.keys(userProjection).
|
||||
reduce((cur, key) => cur || key.startsWith(path + '.'), false);
|
||||
const _discriminatorKey = path + '.' + schema.options.discriminatorKey;
|
||||
if (!userProjectedInPath &&
|
||||
addedPaths.length === 1 &&
|
||||
addedPaths[0] === _discriminatorKey) {
|
||||
selected.splice(selected.indexOf(_discriminatorKey), 1);
|
||||
}
|
||||
};
|
26
node_modules/mongoose/lib/helpers/discriminator/getConstructor.js
generated
vendored
Normal file
26
node_modules/mongoose/lib/helpers/discriminator/getConstructor.js
generated
vendored
Normal file
|
@ -0,0 +1,26 @@
|
|||
'use strict';
|
||||
|
||||
const getDiscriminatorByValue = require('./getDiscriminatorByValue');
|
||||
|
||||
/**
|
||||
* Find the correct constructor, taking into account discriminators
|
||||
* @api private
|
||||
*/
|
||||
|
||||
module.exports = function getConstructor(Constructor, value) {
|
||||
const discriminatorKey = Constructor.schema.options.discriminatorKey;
|
||||
if (value != null &&
|
||||
Constructor.discriminators &&
|
||||
value[discriminatorKey] != null) {
|
||||
if (Constructor.discriminators[value[discriminatorKey]]) {
|
||||
Constructor = Constructor.discriminators[value[discriminatorKey]];
|
||||
} else {
|
||||
const constructorByValue = getDiscriminatorByValue(Constructor.discriminators, value[discriminatorKey]);
|
||||
if (constructorByValue) {
|
||||
Constructor = constructorByValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Constructor;
|
||||
};
|
28
node_modules/mongoose/lib/helpers/discriminator/getDiscriminatorByValue.js
generated
vendored
Normal file
28
node_modules/mongoose/lib/helpers/discriminator/getDiscriminatorByValue.js
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
'use strict';
|
||||
|
||||
const areDiscriminatorValuesEqual = require('./areDiscriminatorValuesEqual');
|
||||
|
||||
/**
|
||||
* returns discriminator by discriminatorMapping.value
|
||||
*
|
||||
* @param {Object} discriminators
|
||||
* @param {string} value
|
||||
* @api private
|
||||
*/
|
||||
|
||||
module.exports = function getDiscriminatorByValue(discriminators, value) {
|
||||
if (discriminators == null) {
|
||||
return null;
|
||||
}
|
||||
for (const name of Object.keys(discriminators)) {
|
||||
const it = discriminators[name];
|
||||
if (
|
||||
it.schema &&
|
||||
it.schema.discriminatorMapping &&
|
||||
areDiscriminatorValuesEqual(it.schema.discriminatorMapping.value, value)
|
||||
) {
|
||||
return it;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
27
node_modules/mongoose/lib/helpers/discriminator/getSchemaDiscriminatorByValue.js
generated
vendored
Normal file
27
node_modules/mongoose/lib/helpers/discriminator/getSchemaDiscriminatorByValue.js
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
'use strict';
|
||||
|
||||
const areDiscriminatorValuesEqual = require('./areDiscriminatorValuesEqual');
|
||||
|
||||
/**
|
||||
* returns discriminator by discriminatorMapping.value
|
||||
*
|
||||
* @param {Schema} schema
|
||||
* @param {string} value
|
||||
* @api private
|
||||
*/
|
||||
|
||||
module.exports = function getSchemaDiscriminatorByValue(schema, value) {
|
||||
if (schema == null || schema.discriminators == null) {
|
||||
return null;
|
||||
}
|
||||
for (const key of Object.keys(schema.discriminators)) {
|
||||
const discriminatorSchema = schema.discriminators[key];
|
||||
if (discriminatorSchema.discriminatorMapping == null) {
|
||||
continue;
|
||||
}
|
||||
if (areDiscriminatorValuesEqual(discriminatorSchema.discriminatorMapping.value, value)) {
|
||||
return discriminatorSchema;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
75
node_modules/mongoose/lib/helpers/discriminator/mergeDiscriminatorSchema.js
generated
vendored
Normal file
75
node_modules/mongoose/lib/helpers/discriminator/mergeDiscriminatorSchema.js
generated
vendored
Normal file
|
@ -0,0 +1,75 @@
|
|||
'use strict';
|
||||
const schemaMerge = require('../schema/merge');
|
||||
const specialProperties = require('../../helpers/specialProperties');
|
||||
const isBsonType = require('../../helpers/isBsonType');
|
||||
const ObjectId = require('../../types/objectid');
|
||||
const isObject = require('../../helpers/isObject');
|
||||
/**
|
||||
* Merges `from` into `to` without overwriting existing properties.
|
||||
*
|
||||
* @param {Object} to
|
||||
* @param {Object} from
|
||||
* @param {String} [path]
|
||||
* @api private
|
||||
*/
|
||||
|
||||
module.exports = function mergeDiscriminatorSchema(to, from, path, seen) {
|
||||
const keys = Object.keys(from);
|
||||
let i = 0;
|
||||
const len = keys.length;
|
||||
let key;
|
||||
|
||||
path = path || '';
|
||||
seen = seen || new WeakSet();
|
||||
|
||||
if (seen.has(from)) {
|
||||
return;
|
||||
}
|
||||
seen.add(from);
|
||||
|
||||
while (i < len) {
|
||||
key = keys[i++];
|
||||
if (!path) {
|
||||
if (key === 'discriminators' ||
|
||||
key === 'base' ||
|
||||
key === '_applyDiscriminators' ||
|
||||
key === '_userProvidedOptions' ||
|
||||
key === 'options') {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (path === 'tree' && from != null && from.instanceOfSchema) {
|
||||
continue;
|
||||
}
|
||||
if (specialProperties.has(key)) {
|
||||
continue;
|
||||
}
|
||||
if (to[key] == null) {
|
||||
to[key] = from[key];
|
||||
} else if (isObject(from[key])) {
|
||||
if (!isObject(to[key])) {
|
||||
to[key] = {};
|
||||
}
|
||||
if (from[key] != null) {
|
||||
// Skip merging schemas if we're creating a discriminator schema and
|
||||
// base schema has a given path as a single nested but discriminator schema
|
||||
// has the path as a document array, or vice versa (gh-9534)
|
||||
if ((from[key].$isSingleNested && to[key].$isMongooseDocumentArray) ||
|
||||
(from[key].$isMongooseDocumentArray && to[key].$isSingleNested)) {
|
||||
continue;
|
||||
} else if (from[key].instanceOfSchema) {
|
||||
if (to[key].instanceOfSchema) {
|
||||
schemaMerge(to[key], from[key].clone(), true);
|
||||
} else {
|
||||
to[key] = from[key].clone();
|
||||
}
|
||||
continue;
|
||||
} else if (isBsonType(from[key], 'ObjectId')) {
|
||||
to[key] = new ObjectId(from[key]);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
mergeDiscriminatorSchema(to[key], from[key], path ? path + '.' + key : key, seen);
|
||||
}
|
||||
}
|
||||
};
|
126
node_modules/mongoose/lib/helpers/document/applyDefaults.js
generated
vendored
Normal file
126
node_modules/mongoose/lib/helpers/document/applyDefaults.js
generated
vendored
Normal file
|
@ -0,0 +1,126 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = function applyDefaults(doc, fields, exclude, hasIncludedChildren, isBeforeSetters, pathsToSkip) {
|
||||
const paths = Object.keys(doc.$__schema.paths);
|
||||
const plen = paths.length;
|
||||
|
||||
for (let i = 0; i < plen; ++i) {
|
||||
let def;
|
||||
let curPath = '';
|
||||
const p = paths[i];
|
||||
|
||||
if (p === '_id' && doc.$__.skipId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const type = doc.$__schema.paths[p];
|
||||
const path = type.splitPath();
|
||||
const len = path.length;
|
||||
let included = false;
|
||||
let doc_ = doc._doc;
|
||||
for (let j = 0; j < len; ++j) {
|
||||
if (doc_ == null) {
|
||||
break;
|
||||
}
|
||||
|
||||
const piece = path[j];
|
||||
curPath += (!curPath.length ? '' : '.') + piece;
|
||||
|
||||
if (exclude === true) {
|
||||
if (curPath in fields) {
|
||||
break;
|
||||
}
|
||||
} else if (exclude === false && fields && !included) {
|
||||
const hasSubpaths = type.$isSingleNested || type.$isMongooseDocumentArray;
|
||||
if (curPath in fields || (j === len - 1 && hasSubpaths && hasIncludedChildren != null && hasIncludedChildren[curPath])) {
|
||||
included = true;
|
||||
} else if (hasIncludedChildren != null && !hasIncludedChildren[curPath]) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (j === len - 1) {
|
||||
if (doc_[piece] !== void 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (isBeforeSetters != null) {
|
||||
if (typeof type.defaultValue === 'function') {
|
||||
if (!type.defaultValue.$runBeforeSetters && isBeforeSetters) {
|
||||
break;
|
||||
}
|
||||
if (type.defaultValue.$runBeforeSetters && !isBeforeSetters) {
|
||||
break;
|
||||
}
|
||||
} else if (!isBeforeSetters) {
|
||||
// Non-function defaults should always run **before** setters
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (pathsToSkip && pathsToSkip[curPath]) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (fields && exclude !== null) {
|
||||
if (exclude === true) {
|
||||
// apply defaults to all non-excluded fields
|
||||
if (p in fields) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
def = type.getDefault(doc, false);
|
||||
} catch (err) {
|
||||
doc.invalidate(p, err);
|
||||
break;
|
||||
}
|
||||
|
||||
if (typeof def !== 'undefined') {
|
||||
doc_[piece] = def;
|
||||
applyChangeTracking(doc, p);
|
||||
}
|
||||
} else if (included) {
|
||||
// selected field
|
||||
try {
|
||||
def = type.getDefault(doc, false);
|
||||
} catch (err) {
|
||||
doc.invalidate(p, err);
|
||||
break;
|
||||
}
|
||||
|
||||
if (typeof def !== 'undefined') {
|
||||
doc_[piece] = def;
|
||||
applyChangeTracking(doc, p);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
def = type.getDefault(doc, false);
|
||||
} catch (err) {
|
||||
doc.invalidate(p, err);
|
||||
break;
|
||||
}
|
||||
|
||||
if (typeof def !== 'undefined') {
|
||||
doc_[piece] = def;
|
||||
applyChangeTracking(doc, p);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
doc_ = doc_[piece];
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
function applyChangeTracking(doc, fullPath) {
|
||||
doc.$__.activePaths.default(fullPath);
|
||||
if (doc.$isSubdocument && doc.$isSingleNested && doc.$parent() != null) {
|
||||
doc.$parent().$__.activePaths.default(doc.$__pathRelativeToParent(fullPath));
|
||||
}
|
||||
}
|
35
node_modules/mongoose/lib/helpers/document/cleanModifiedSubpaths.js
generated
vendored
Normal file
35
node_modules/mongoose/lib/helpers/document/cleanModifiedSubpaths.js
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
module.exports = function cleanModifiedSubpaths(doc, path, options) {
|
||||
options = options || {};
|
||||
const skipDocArrays = options.skipDocArrays;
|
||||
|
||||
let deleted = 0;
|
||||
if (!doc) {
|
||||
return deleted;
|
||||
}
|
||||
|
||||
for (const modifiedPath of Object.keys(doc.$__.activePaths.getStatePaths('modify'))) {
|
||||
if (skipDocArrays) {
|
||||
const schemaType = doc.$__schema.path(modifiedPath);
|
||||
if (schemaType && schemaType.$isMongooseDocumentArray) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (modifiedPath.startsWith(path + '.')) {
|
||||
doc.$__.activePaths.clearPath(modifiedPath);
|
||||
++deleted;
|
||||
|
||||
if (doc.$isSubdocument) {
|
||||
const owner = doc.ownerDocument();
|
||||
const fullPath = doc.$__fullPath(modifiedPath);
|
||||
owner.$__.activePaths.clearPath(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
return deleted;
|
||||
};
|
228
node_modules/mongoose/lib/helpers/document/compile.js
generated
vendored
Normal file
228
node_modules/mongoose/lib/helpers/document/compile.js
generated
vendored
Normal file
|
@ -0,0 +1,228 @@
|
|||
'use strict';
|
||||
|
||||
const clone = require('../../helpers/clone');
|
||||
const documentSchemaSymbol = require('../../helpers/symbols').documentSchemaSymbol;
|
||||
const internalToObjectOptions = require('../../options').internalToObjectOptions;
|
||||
const utils = require('../../utils');
|
||||
|
||||
let Document;
|
||||
const getSymbol = require('../../helpers/symbols').getSymbol;
|
||||
const scopeSymbol = require('../../helpers/symbols').scopeSymbol;
|
||||
|
||||
const isPOJO = utils.isPOJO;
|
||||
|
||||
/*!
|
||||
* exports
|
||||
*/
|
||||
|
||||
exports.compile = compile;
|
||||
exports.defineKey = defineKey;
|
||||
|
||||
const _isEmptyOptions = Object.freeze({
|
||||
minimize: true,
|
||||
virtuals: false,
|
||||
getters: false,
|
||||
transform: false
|
||||
});
|
||||
|
||||
/**
|
||||
* Compiles schemas.
|
||||
* @param {Object} tree
|
||||
* @param {Any} proto
|
||||
* @param {String} prefix
|
||||
* @param {Object} options
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function compile(tree, proto, prefix, options) {
|
||||
Document = Document || require('../../document');
|
||||
const typeKey = options.typeKey;
|
||||
|
||||
for (const key of Object.keys(tree)) {
|
||||
const limb = tree[key];
|
||||
|
||||
const hasSubprops = isPOJO(limb) &&
|
||||
Object.keys(limb).length > 0 &&
|
||||
(!limb[typeKey] || (typeKey === 'type' && isPOJO(limb.type) && limb.type.type));
|
||||
const subprops = hasSubprops ? limb : null;
|
||||
|
||||
defineKey({ prop: key, subprops: subprops, prototype: proto, prefix: prefix, options: options });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the accessor named prop on the incoming prototype.
|
||||
* @param {Object} options
|
||||
* @param {String} options.prop
|
||||
* @param {Boolean} options.subprops
|
||||
* @param {Any} options.prototype
|
||||
* @param {String} [options.prefix]
|
||||
* @param {Object} options.options
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function defineKey({ prop, subprops, prototype, prefix, options }) {
|
||||
Document = Document || require('../../document');
|
||||
const path = (prefix ? prefix + '.' : '') + prop;
|
||||
prefix = prefix || '';
|
||||
|
||||
if (subprops) {
|
||||
Object.defineProperty(prototype, prop, {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
get: function() {
|
||||
const _this = this;
|
||||
if (!this.$__.getters) {
|
||||
this.$__.getters = {};
|
||||
}
|
||||
|
||||
if (!this.$__.getters[path]) {
|
||||
const nested = Object.create(Document.prototype, getOwnPropertyDescriptors(this));
|
||||
|
||||
// save scope for nested getters/setters
|
||||
if (!prefix) {
|
||||
nested.$__[scopeSymbol] = this;
|
||||
}
|
||||
nested.$__.nestedPath = path;
|
||||
|
||||
Object.defineProperty(nested, 'schema', {
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: false,
|
||||
value: prototype.schema
|
||||
});
|
||||
|
||||
Object.defineProperty(nested, '$__schema', {
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: false,
|
||||
value: prototype.schema
|
||||
});
|
||||
|
||||
Object.defineProperty(nested, documentSchemaSymbol, {
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: false,
|
||||
value: prototype.schema
|
||||
});
|
||||
|
||||
Object.defineProperty(nested, 'toObject', {
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: false,
|
||||
value: function() {
|
||||
return clone(_this.get(path, null, {
|
||||
virtuals: this &&
|
||||
this.schema &&
|
||||
this.schema.options &&
|
||||
this.schema.options.toObject &&
|
||||
this.schema.options.toObject.virtuals || null
|
||||
}));
|
||||
}
|
||||
});
|
||||
|
||||
Object.defineProperty(nested, '$__get', {
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: false,
|
||||
value: function() {
|
||||
return _this.get(path, null, {
|
||||
virtuals: this && this.schema && this.schema.options && this.schema.options.toObject && this.schema.options.toObject.virtuals || null
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
Object.defineProperty(nested, 'toJSON', {
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: false,
|
||||
value: function() {
|
||||
return _this.get(path, null, {
|
||||
virtuals: this && this.schema && this.schema.options && this.schema.options.toJSON && this.schema.options.toJSON.virtuals || null
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
Object.defineProperty(nested, '$__isNested', {
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: false,
|
||||
value: true
|
||||
});
|
||||
|
||||
Object.defineProperty(nested, '$isEmpty', {
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: false,
|
||||
value: function() {
|
||||
return Object.keys(this.get(path, null, _isEmptyOptions) || {}).length === 0;
|
||||
}
|
||||
});
|
||||
|
||||
Object.defineProperty(nested, '$__parent', {
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
writable: false,
|
||||
value: this
|
||||
});
|
||||
|
||||
compile(subprops, nested, path, options);
|
||||
this.$__.getters[path] = nested;
|
||||
}
|
||||
|
||||
return this.$__.getters[path];
|
||||
},
|
||||
set: function(v) {
|
||||
if (v != null && v.$__isNested) {
|
||||
// Convert top-level to POJO, but leave subdocs hydrated so `$set`
|
||||
// can handle them. See gh-9293.
|
||||
v = v.$__get();
|
||||
} else if (v instanceof Document && !v.$__isNested) {
|
||||
v = v.$toObject(internalToObjectOptions);
|
||||
}
|
||||
const doc = this.$__[scopeSymbol] || this;
|
||||
doc.$set(path, v);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
Object.defineProperty(prototype, prop, {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
get: function() {
|
||||
return this[getSymbol].call(this.$__[scopeSymbol] || this, path);
|
||||
},
|
||||
set: function(v) {
|
||||
this.$set.call(this.$__[scopeSymbol] || this, path, v);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// gets descriptors for all properties of `object`
|
||||
// makes all properties non-enumerable to match previous behavior to #2211
|
||||
function getOwnPropertyDescriptors(object) {
|
||||
const result = {};
|
||||
|
||||
Object.getOwnPropertyNames(object).forEach(function(key) {
|
||||
const skip = [
|
||||
'isNew',
|
||||
'$__',
|
||||
'$errors',
|
||||
'errors',
|
||||
'_doc',
|
||||
'$locals',
|
||||
'$op',
|
||||
'__parentArray',
|
||||
'__index',
|
||||
'$isDocumentArrayElement'
|
||||
].indexOf(key) === -1;
|
||||
if (skip) {
|
||||
return;
|
||||
}
|
||||
|
||||
result[key] = Object.getOwnPropertyDescriptor(object, key);
|
||||
result[key].enumerable = false;
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
50
node_modules/mongoose/lib/helpers/document/getEmbeddedDiscriminatorPath.js
generated
vendored
Normal file
50
node_modules/mongoose/lib/helpers/document/getEmbeddedDiscriminatorPath.js
generated
vendored
Normal file
|
@ -0,0 +1,50 @@
|
|||
'use strict';
|
||||
|
||||
const get = require('../get');
|
||||
const getSchemaDiscriminatorByValue = require('../discriminator/getSchemaDiscriminatorByValue');
|
||||
|
||||
/**
|
||||
* Like `schema.path()`, except with a document, because impossible to
|
||||
* determine path type without knowing the embedded discriminator key.
|
||||
* @param {Document} doc
|
||||
* @param {String} path
|
||||
* @param {Object} [options]
|
||||
* @api private
|
||||
*/
|
||||
|
||||
module.exports = function getEmbeddedDiscriminatorPath(doc, path, options) {
|
||||
options = options || {};
|
||||
const typeOnly = options.typeOnly;
|
||||
const parts = path.indexOf('.') === -1 ? [path] : path.split('.');
|
||||
let schemaType = null;
|
||||
let type = 'adhocOrUndefined';
|
||||
|
||||
const schema = getSchemaDiscriminatorByValue(doc.schema, doc.get(doc.schema.options.discriminatorKey)) || doc.schema;
|
||||
|
||||
for (let i = 0; i < parts.length; ++i) {
|
||||
const subpath = parts.slice(0, i + 1).join('.');
|
||||
schemaType = schema.path(subpath);
|
||||
if (schemaType == null) {
|
||||
type = 'adhocOrUndefined';
|
||||
continue;
|
||||
}
|
||||
if (schemaType.instance === 'Mixed') {
|
||||
return typeOnly ? 'real' : schemaType;
|
||||
}
|
||||
type = schema.pathType(subpath);
|
||||
if ((schemaType.$isSingleNested || schemaType.$isMongooseDocumentArrayElement) &&
|
||||
schemaType.schema.discriminators != null) {
|
||||
const discriminators = schemaType.schema.discriminators;
|
||||
const discriminatorKey = doc.get(subpath + '.' +
|
||||
get(schemaType, 'schema.options.discriminatorKey'));
|
||||
if (discriminatorKey == null || discriminators[discriminatorKey] == null) {
|
||||
continue;
|
||||
}
|
||||
const rest = parts.slice(i + 1).join('.');
|
||||
return getEmbeddedDiscriminatorPath(doc.get(subpath), rest, options);
|
||||
}
|
||||
}
|
||||
|
||||
// Are we getting the whole schema or just the type, 'real', 'nested', etc.
|
||||
return typeOnly ? type : schemaType;
|
||||
};
|
35
node_modules/mongoose/lib/helpers/document/handleSpreadDoc.js
generated
vendored
Normal file
35
node_modules/mongoose/lib/helpers/document/handleSpreadDoc.js
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
'use strict';
|
||||
|
||||
const utils = require('../../utils');
|
||||
|
||||
const keysToSkip = new Set(['__index', '__parentArray', '_doc']);
|
||||
|
||||
/**
|
||||
* Using spread operator on a Mongoose document gives you a
|
||||
* POJO that has a tendency to cause infinite recursion. So
|
||||
* we use this function on `set()` to prevent that.
|
||||
*/
|
||||
|
||||
module.exports = function handleSpreadDoc(v, includeExtraKeys) {
|
||||
if (utils.isPOJO(v) && v.$__ != null && v._doc != null) {
|
||||
if (includeExtraKeys) {
|
||||
const extraKeys = {};
|
||||
for (const key of Object.keys(v)) {
|
||||
if (typeof key === 'symbol') {
|
||||
continue;
|
||||
}
|
||||
if (key[0] === '$') {
|
||||
continue;
|
||||
}
|
||||
if (keysToSkip.has(key)) {
|
||||
continue;
|
||||
}
|
||||
extraKeys[key] = v[key];
|
||||
}
|
||||
return { ...v._doc, ...extraKeys };
|
||||
}
|
||||
return v._doc;
|
||||
}
|
||||
|
||||
return v;
|
||||
};
|
25
node_modules/mongoose/lib/helpers/each.js
generated
vendored
Normal file
25
node_modules/mongoose/lib/helpers/each.js
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = function each(arr, cb, done) {
|
||||
if (arr.length === 0) {
|
||||
return done();
|
||||
}
|
||||
|
||||
let remaining = arr.length;
|
||||
let err = null;
|
||||
for (const v of arr) {
|
||||
cb(v, function(_err) {
|
||||
if (err != null) {
|
||||
return;
|
||||
}
|
||||
if (_err != null) {
|
||||
err = _err;
|
||||
return done(err);
|
||||
}
|
||||
|
||||
if (--remaining <= 0) {
|
||||
return done();
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
22
node_modules/mongoose/lib/helpers/error/combinePathErrors.js
generated
vendored
Normal file
22
node_modules/mongoose/lib/helpers/error/combinePathErrors.js
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
'use strict';
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
module.exports = function combinePathErrors(err) {
|
||||
const keys = Object.keys(err.errors || {});
|
||||
const len = keys.length;
|
||||
const msgs = [];
|
||||
let key;
|
||||
|
||||
for (let i = 0; i < len; ++i) {
|
||||
key = keys[i];
|
||||
if (err === err.errors[key]) {
|
||||
continue;
|
||||
}
|
||||
msgs.push(key + ': ' + err.errors[key].message);
|
||||
}
|
||||
|
||||
return msgs.join(', ');
|
||||
};
|
8
node_modules/mongoose/lib/helpers/firstKey.js
generated
vendored
Normal file
8
node_modules/mongoose/lib/helpers/firstKey.js
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = function firstKey(obj) {
|
||||
if (obj == null) {
|
||||
return null;
|
||||
}
|
||||
return Object.keys(obj)[0];
|
||||
};
|
65
node_modules/mongoose/lib/helpers/get.js
generated
vendored
Normal file
65
node_modules/mongoose/lib/helpers/get.js
generated
vendored
Normal file
|
@ -0,0 +1,65 @@
|
|||
'use strict';
|
||||
|
||||
/**
|
||||
* Simplified lodash.get to work around the annoying null quirk. See:
|
||||
* https://github.com/lodash/lodash/issues/3659
|
||||
* @api private
|
||||
*/
|
||||
|
||||
module.exports = function get(obj, path, def) {
|
||||
let parts;
|
||||
let isPathArray = false;
|
||||
if (typeof path === 'string') {
|
||||
if (path.indexOf('.') === -1) {
|
||||
const _v = getProperty(obj, path);
|
||||
if (_v == null) {
|
||||
return def;
|
||||
}
|
||||
return _v;
|
||||
}
|
||||
|
||||
parts = path.split('.');
|
||||
} else {
|
||||
isPathArray = true;
|
||||
parts = path;
|
||||
|
||||
if (parts.length === 1) {
|
||||
const _v = getProperty(obj, parts[0]);
|
||||
if (_v == null) {
|
||||
return def;
|
||||
}
|
||||
return _v;
|
||||
}
|
||||
}
|
||||
let rest = path;
|
||||
let cur = obj;
|
||||
for (const part of parts) {
|
||||
if (cur == null) {
|
||||
return def;
|
||||
}
|
||||
|
||||
// `lib/cast.js` depends on being able to get dotted paths in updates,
|
||||
// like `{ $set: { 'a.b': 42 } }`
|
||||
if (!isPathArray && cur[rest] != null) {
|
||||
return cur[rest];
|
||||
}
|
||||
|
||||
cur = getProperty(cur, part);
|
||||
|
||||
if (!isPathArray) {
|
||||
rest = rest.substr(part.length + 1);
|
||||
}
|
||||
}
|
||||
|
||||
return cur == null ? def : cur;
|
||||
};
|
||||
|
||||
function getProperty(obj, prop) {
|
||||
if (obj == null) {
|
||||
return obj;
|
||||
}
|
||||
if (obj instanceof Map) {
|
||||
return obj.get(prop);
|
||||
}
|
||||
return obj[prop];
|
||||
}
|
16
node_modules/mongoose/lib/helpers/getConstructorName.js
generated
vendored
Normal file
16
node_modules/mongoose/lib/helpers/getConstructorName.js
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
'use strict';
|
||||
|
||||
/**
|
||||
* If `val` is an object, returns constructor name, if possible. Otherwise returns undefined.
|
||||
* @api private
|
||||
*/
|
||||
|
||||
module.exports = function getConstructorName(val) {
|
||||
if (val == null) {
|
||||
return void 0;
|
||||
}
|
||||
if (typeof val.constructor !== 'function') {
|
||||
return void 0;
|
||||
}
|
||||
return val.constructor.name;
|
||||
};
|
27
node_modules/mongoose/lib/helpers/getDefaultBulkwriteResult.js
generated
vendored
Normal file
27
node_modules/mongoose/lib/helpers/getDefaultBulkwriteResult.js
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
'use strict';
|
||||
function getDefaultBulkwriteResult() {
|
||||
return {
|
||||
result: {
|
||||
ok: 1,
|
||||
writeErrors: [],
|
||||
writeConcernErrors: [],
|
||||
insertedIds: [],
|
||||
nInserted: 0,
|
||||
nUpserted: 0,
|
||||
nMatched: 0,
|
||||
nModified: 0,
|
||||
nRemoved: 0,
|
||||
upserted: []
|
||||
},
|
||||
insertedCount: 0,
|
||||
matchedCount: 0,
|
||||
modifiedCount: 0,
|
||||
deletedCount: 0,
|
||||
upsertedCount: 0,
|
||||
upsertedIds: {},
|
||||
insertedIds: {},
|
||||
n: 0
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = getDefaultBulkwriteResult;
|
10
node_modules/mongoose/lib/helpers/getFunctionName.js
generated
vendored
Normal file
10
node_modules/mongoose/lib/helpers/getFunctionName.js
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
'use strict';
|
||||
|
||||
const functionNameRE = /^function\s*([^\s(]+)/;
|
||||
|
||||
module.exports = function(fn) {
|
||||
return (
|
||||
fn.name ||
|
||||
(fn.toString().trim().match(functionNameRE) || [])[1]
|
||||
);
|
||||
};
|
16
node_modules/mongoose/lib/helpers/immediate.js
generated
vendored
Normal file
16
node_modules/mongoose/lib/helpers/immediate.js
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
/*!
|
||||
* Centralize this so we can more easily work around issues with people
|
||||
* stubbing out `process.nextTick()` in tests using sinon:
|
||||
* https://github.com/sinonjs/lolex#automatically-incrementing-mocked-time
|
||||
* See gh-6074
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const nextTick = typeof process !== 'undefined' && typeof process.nextTick === 'function' ?
|
||||
process.nextTick.bind(process) :
|
||||
cb => setTimeout(cb, 0); // Fallback for browser build
|
||||
|
||||
module.exports = function immediate(cb) {
|
||||
return nextTick(cb);
|
||||
};
|
13
node_modules/mongoose/lib/helpers/indexes/applySchemaCollation.js
generated
vendored
Normal file
13
node_modules/mongoose/lib/helpers/indexes/applySchemaCollation.js
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
'use strict';
|
||||
|
||||
const isTextIndex = require('./isTextIndex');
|
||||
|
||||
module.exports = function applySchemaCollation(indexKeys, indexOptions, schemaOptions) {
|
||||
if (isTextIndex(indexKeys)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (schemaOptions.hasOwnProperty('collation') && !indexOptions.hasOwnProperty('collation')) {
|
||||
indexOptions.collation = schemaOptions.collation;
|
||||
}
|
||||
};
|
14
node_modules/mongoose/lib/helpers/indexes/decorateDiscriminatorIndexOptions.js
generated
vendored
Normal file
14
node_modules/mongoose/lib/helpers/indexes/decorateDiscriminatorIndexOptions.js
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = function decorateDiscriminatorIndexOptions(schema, indexOptions) {
|
||||
// If the model is a discriminator and has an index, add a
|
||||
// partialFilterExpression by default so the index will only apply
|
||||
// to that discriminator.
|
||||
const discriminatorName = schema.discriminatorMapping && schema.discriminatorMapping.value;
|
||||
if (discriminatorName && !('sparse' in indexOptions)) {
|
||||
const discriminatorKey = schema.options.discriminatorKey;
|
||||
indexOptions.partialFilterExpression = indexOptions.partialFilterExpression || {};
|
||||
indexOptions.partialFilterExpression[discriminatorKey] = discriminatorName;
|
||||
}
|
||||
return indexOptions;
|
||||
};
|
59
node_modules/mongoose/lib/helpers/indexes/getRelatedIndexes.js
generated
vendored
Normal file
59
node_modules/mongoose/lib/helpers/indexes/getRelatedIndexes.js
generated
vendored
Normal file
|
@ -0,0 +1,59 @@
|
|||
'use strict';
|
||||
|
||||
function getRelatedSchemaIndexes(model, schemaIndexes) {
|
||||
return getRelatedIndexes({
|
||||
baseModelName: model.baseModelName,
|
||||
discriminatorMapping: model.schema.discriminatorMapping,
|
||||
indexes: schemaIndexes,
|
||||
indexesType: 'schema'
|
||||
});
|
||||
}
|
||||
|
||||
function getRelatedDBIndexes(model, dbIndexes) {
|
||||
return getRelatedIndexes({
|
||||
baseModelName: model.baseModelName,
|
||||
discriminatorMapping: model.schema.discriminatorMapping,
|
||||
indexes: dbIndexes,
|
||||
indexesType: 'db'
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getRelatedSchemaIndexes,
|
||||
getRelatedDBIndexes
|
||||
};
|
||||
|
||||
function getRelatedIndexes({
|
||||
baseModelName,
|
||||
discriminatorMapping,
|
||||
indexes,
|
||||
indexesType
|
||||
}) {
|
||||
const discriminatorKey = discriminatorMapping && discriminatorMapping.key;
|
||||
const discriminatorValue = discriminatorMapping && discriminatorMapping.value;
|
||||
|
||||
if (!discriminatorKey) {
|
||||
return indexes;
|
||||
}
|
||||
|
||||
const isChildDiscriminatorModel = Boolean(baseModelName);
|
||||
if (isChildDiscriminatorModel) {
|
||||
return indexes.filter(index => {
|
||||
const partialFilterExpression = getPartialFilterExpression(index, indexesType);
|
||||
return partialFilterExpression && partialFilterExpression[discriminatorKey] === discriminatorValue;
|
||||
});
|
||||
}
|
||||
|
||||
return indexes.filter(index => {
|
||||
const partialFilterExpression = getPartialFilterExpression(index, indexesType);
|
||||
return !partialFilterExpression || !partialFilterExpression[discriminatorKey];
|
||||
});
|
||||
}
|
||||
|
||||
function getPartialFilterExpression(index, indexesType) {
|
||||
if (indexesType === 'schema') {
|
||||
const options = index[1];
|
||||
return options && options.partialFilterExpression;
|
||||
}
|
||||
return index.partialFilterExpression;
|
||||
}
|
18
node_modules/mongoose/lib/helpers/indexes/isDefaultIdIndex.js
generated
vendored
Normal file
18
node_modules/mongoose/lib/helpers/indexes/isDefaultIdIndex.js
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
'use strict';
|
||||
|
||||
const get = require('../get');
|
||||
|
||||
module.exports = function isDefaultIdIndex(index) {
|
||||
if (Array.isArray(index)) {
|
||||
// Mongoose syntax
|
||||
const keys = Object.keys(index[0]);
|
||||
return keys.length === 1 && keys[0] === '_id' && index[0]._id !== 'hashed';
|
||||
}
|
||||
|
||||
if (typeof index !== 'object') {
|
||||
return false;
|
||||
}
|
||||
|
||||
const key = get(index, 'key', {});
|
||||
return Object.keys(key).length === 1 && key.hasOwnProperty('_id');
|
||||
};
|
96
node_modules/mongoose/lib/helpers/indexes/isIndexEqual.js
generated
vendored
Normal file
96
node_modules/mongoose/lib/helpers/indexes/isIndexEqual.js
generated
vendored
Normal file
|
@ -0,0 +1,96 @@
|
|||
'use strict';
|
||||
|
||||
const get = require('../get');
|
||||
const utils = require('../../utils');
|
||||
/**
|
||||
* Given a Mongoose index definition (key + options objects) and a MongoDB server
|
||||
* index definition, determine if the two indexes are equal.
|
||||
*
|
||||
* @param {Object} schemaIndexKeysObject the Mongoose index spec
|
||||
* @param {Object} options the Mongoose index definition's options
|
||||
* @param {Object} dbIndex the index in MongoDB as returned by `listIndexes()`
|
||||
* @api private
|
||||
*/
|
||||
|
||||
module.exports = function isIndexEqual(schemaIndexKeysObject, options, dbIndex) {
|
||||
// Special case: text indexes have a special format in the db. For example,
|
||||
// `{ name: 'text' }` becomes:
|
||||
// {
|
||||
// v: 2,
|
||||
// key: { _fts: 'text', _ftsx: 1 },
|
||||
// name: 'name_text',
|
||||
// ns: 'test.tests',
|
||||
// background: true,
|
||||
// weights: { name: 1 },
|
||||
// default_language: 'english',
|
||||
// language_override: 'language',
|
||||
// textIndexVersion: 3
|
||||
// }
|
||||
if (dbIndex.textIndexVersion != null) {
|
||||
delete dbIndex.key._fts;
|
||||
delete dbIndex.key._ftsx;
|
||||
const weights = { ...dbIndex.weights, ...dbIndex.key };
|
||||
if (Object.keys(weights).length !== Object.keys(schemaIndexKeysObject).length) {
|
||||
return false;
|
||||
}
|
||||
for (const prop of Object.keys(weights)) {
|
||||
if (!(prop in schemaIndexKeysObject)) {
|
||||
return false;
|
||||
}
|
||||
const weight = weights[prop];
|
||||
if (weight !== get(options, 'weights.' + prop) && !(weight === 1 && get(options, 'weights.' + prop) == null)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (options['default_language'] !== dbIndex['default_language']) {
|
||||
return dbIndex['default_language'] === 'english' && options['default_language'] == null;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
const optionKeys = [
|
||||
'unique',
|
||||
'partialFilterExpression',
|
||||
'sparse',
|
||||
'expireAfterSeconds',
|
||||
'collation'
|
||||
];
|
||||
for (const key of optionKeys) {
|
||||
if (!(key in options) && !(key in dbIndex)) {
|
||||
continue;
|
||||
}
|
||||
if (key === 'collation') {
|
||||
if (options[key] == null || dbIndex[key] == null) {
|
||||
return options[key] == null && dbIndex[key] == null;
|
||||
}
|
||||
const definedKeys = Object.keys(options.collation);
|
||||
const schemaCollation = options.collation;
|
||||
const dbCollation = dbIndex.collation;
|
||||
for (const opt of definedKeys) {
|
||||
if (get(schemaCollation, opt) !== get(dbCollation, opt)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} else if (!utils.deepEqual(options[key], dbIndex[key])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const schemaIndexKeys = Object.keys(schemaIndexKeysObject);
|
||||
const dbIndexKeys = Object.keys(dbIndex.key);
|
||||
if (schemaIndexKeys.length !== dbIndexKeys.length) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < schemaIndexKeys.length; ++i) {
|
||||
if (schemaIndexKeys[i] !== dbIndexKeys[i]) {
|
||||
return false;
|
||||
}
|
||||
if (!utils.deepEqual(schemaIndexKeysObject[schemaIndexKeys[i]], dbIndex.key[dbIndexKeys[i]])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
16
node_modules/mongoose/lib/helpers/indexes/isTextIndex.js
generated
vendored
Normal file
16
node_modules/mongoose/lib/helpers/indexes/isTextIndex.js
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
'use strict';
|
||||
|
||||
/**
|
||||
* Returns `true` if the given index options have a `text` option.
|
||||
*/
|
||||
|
||||
module.exports = function isTextIndex(indexKeys) {
|
||||
let isTextIndex = false;
|
||||
for (const key of Object.keys(indexKeys)) {
|
||||
if (indexKeys[key] === 'text') {
|
||||
isTextIndex = true;
|
||||
}
|
||||
}
|
||||
|
||||
return isTextIndex;
|
||||
};
|
9
node_modules/mongoose/lib/helpers/isAsyncFunction.js
generated
vendored
Normal file
9
node_modules/mongoose/lib/helpers/isAsyncFunction.js
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = function isAsyncFunction(v) {
|
||||
return (
|
||||
typeof v === 'function' &&
|
||||
v.constructor &&
|
||||
v.constructor.name === 'AsyncFunction'
|
||||
);
|
||||
};
|
16
node_modules/mongoose/lib/helpers/isBsonType.js
generated
vendored
Normal file
16
node_modules/mongoose/lib/helpers/isBsonType.js
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
'use strict';
|
||||
|
||||
/**
|
||||
* Get the bson type, if it exists
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function isBsonType(obj, typename) {
|
||||
return (
|
||||
typeof obj === 'object' &&
|
||||
obj !== null &&
|
||||
obj._bsontype === typename
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = isBsonType;
|
22
node_modules/mongoose/lib/helpers/isMongooseObject.js
generated
vendored
Normal file
22
node_modules/mongoose/lib/helpers/isMongooseObject.js
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
'use strict';
|
||||
|
||||
const isMongooseArray = require('../types/array/isMongooseArray').isMongooseArray;
|
||||
/**
|
||||
* Returns if `v` is a mongoose object that has a `toObject()` method we can use.
|
||||
*
|
||||
* This is for compatibility with libs like Date.js which do foolish things to Natives.
|
||||
*
|
||||
* @param {Any} v
|
||||
* @api private
|
||||
*/
|
||||
|
||||
module.exports = function(v) {
|
||||
return (
|
||||
v != null && (
|
||||
isMongooseArray(v) || // Array or Document Array
|
||||
v.$__ != null || // Document
|
||||
v.isMongooseBuffer || // Buffer
|
||||
v.$isMongooseMap // Map
|
||||
)
|
||||
);
|
||||
};
|
16
node_modules/mongoose/lib/helpers/isObject.js
generated
vendored
Normal file
16
node_modules/mongoose/lib/helpers/isObject.js
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
'use strict';
|
||||
|
||||
/**
|
||||
* Determines if `arg` is an object.
|
||||
*
|
||||
* @param {Object|Array|String|Function|RegExp|any} arg
|
||||
* @api private
|
||||
* @return {Boolean}
|
||||
*/
|
||||
|
||||
module.exports = function(arg) {
|
||||
return (
|
||||
Buffer.isBuffer(arg) ||
|
||||
Object.prototype.toString.call(arg) === '[object Object]'
|
||||
);
|
||||
};
|
12
node_modules/mongoose/lib/helpers/isPOJO.js
generated
vendored
Normal file
12
node_modules/mongoose/lib/helpers/isPOJO.js
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = function isPOJO(arg) {
|
||||
if (arg == null || typeof arg !== 'object') {
|
||||
return false;
|
||||
}
|
||||
const proto = Object.getPrototypeOf(arg);
|
||||
// Prototype may be null if you used `Object.create(null)`
|
||||
// Checking `proto`'s constructor is safe because `getPrototypeOf()`
|
||||
// explicitly crosses the boundary from object data to object metadata
|
||||
return !proto || proto.constructor.name === 'Object';
|
||||
};
|
6
node_modules/mongoose/lib/helpers/isPromise.js
generated
vendored
Normal file
6
node_modules/mongoose/lib/helpers/isPromise.js
generated
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
'use strict';
|
||||
function isPromise(val) {
|
||||
return !!val && (typeof val === 'object' || typeof val === 'function') && typeof val.then === 'function';
|
||||
}
|
||||
|
||||
module.exports = isPromise;
|
22
node_modules/mongoose/lib/helpers/isSimpleValidator.js
generated
vendored
Normal file
22
node_modules/mongoose/lib/helpers/isSimpleValidator.js
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
'use strict';
|
||||
|
||||
/**
|
||||
* Determines if `arg` is a flat object.
|
||||
*
|
||||
* @param {Object|Array|String|Function|RegExp|any} arg
|
||||
* @api private
|
||||
* @return {Boolean}
|
||||
*/
|
||||
|
||||
module.exports = function isSimpleValidator(obj) {
|
||||
const keys = Object.keys(obj);
|
||||
let result = true;
|
||||
for (let i = 0, len = keys.length; i < len; ++i) {
|
||||
if (typeof obj[keys[i]] === 'object' && obj[keys[i]] !== null) {
|
||||
result = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
52
node_modules/mongoose/lib/helpers/model/applyDefaultsToPOJO.js
generated
vendored
Normal file
52
node_modules/mongoose/lib/helpers/model/applyDefaultsToPOJO.js
generated
vendored
Normal file
|
@ -0,0 +1,52 @@
|
|||
'use strict';
|
||||
|
||||
module.exports = function applyDefaultsToPOJO(doc, schema) {
|
||||
const paths = Object.keys(schema.paths);
|
||||
const plen = paths.length;
|
||||
|
||||
for (let i = 0; i < plen; ++i) {
|
||||
let curPath = '';
|
||||
const p = paths[i];
|
||||
|
||||
const type = schema.paths[p];
|
||||
const path = type.splitPath();
|
||||
const len = path.length;
|
||||
let doc_ = doc;
|
||||
for (let j = 0; j < len; ++j) {
|
||||
if (doc_ == null) {
|
||||
break;
|
||||
}
|
||||
|
||||
const piece = path[j];
|
||||
curPath += (!curPath.length ? '' : '.') + piece;
|
||||
|
||||
if (j === len - 1) {
|
||||
if (typeof doc_[piece] !== 'undefined') {
|
||||
if (type.$isSingleNested) {
|
||||
applyDefaultsToPOJO(doc_[piece], type.caster.schema);
|
||||
} else if (type.$isMongooseDocumentArray && Array.isArray(doc_[piece])) {
|
||||
doc_[piece].forEach(el => applyDefaultsToPOJO(el, type.schema));
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
const def = type.getDefault(doc, false, { skipCast: true });
|
||||
if (typeof def !== 'undefined') {
|
||||
doc_[piece] = def;
|
||||
|
||||
if (type.$isSingleNested) {
|
||||
applyDefaultsToPOJO(def, type.caster.schema);
|
||||
} else if (type.$isMongooseDocumentArray && Array.isArray(def)) {
|
||||
def.forEach(el => applyDefaultsToPOJO(el, type.schema));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (doc_[piece] == null) {
|
||||
doc_[piece] = {};
|
||||
}
|
||||
doc_ = doc_[piece];
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
149
node_modules/mongoose/lib/helpers/model/applyHooks.js
generated
vendored
Normal file
149
node_modules/mongoose/lib/helpers/model/applyHooks.js
generated
vendored
Normal file
|
@ -0,0 +1,149 @@
|
|||
'use strict';
|
||||
|
||||
const symbols = require('../../schema/symbols');
|
||||
const promiseOrCallback = require('../promiseOrCallback');
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
module.exports = applyHooks;
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
applyHooks.middlewareFunctions = [
|
||||
'deleteOne',
|
||||
'save',
|
||||
'validate',
|
||||
'remove',
|
||||
'updateOne',
|
||||
'init'
|
||||
];
|
||||
|
||||
/*!
|
||||
* ignore
|
||||
*/
|
||||
|
||||
const alreadyHookedFunctions = new Set(applyHooks.middlewareFunctions.flatMap(fn => ([fn, `$__${fn}`])));
|
||||
|
||||
/**
|
||||
* Register hooks for this model
|
||||
*
|
||||
* @param {Model} model
|
||||
* @param {Schema} schema
|
||||
* @param {Object} options
|
||||
* @api private
|
||||
*/
|
||||
|
||||
function applyHooks(model, schema, options) {
|
||||
options = options || {};
|
||||
|
||||
const kareemOptions = {
|
||||
useErrorHandlers: true,
|
||||
numCallbackParams: 1,
|
||||
nullResultByDefault: true,
|
||||
contextParameter: true
|
||||
};
|
||||
const objToDecorate = options.decorateDoc ? model : model.prototype;
|
||||
|
||||
model.$appliedHooks = true;
|
||||
for (const key of Object.keys(schema.paths)) {
|
||||
const type = schema.paths[key];
|
||||
let childModel = null;
|
||||
if (type.$isSingleNested) {
|
||||
childModel = type.caster;
|
||||
} else if (type.$isMongooseDocumentArray) {
|
||||
childModel = type.Constructor;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (childModel.$appliedHooks) {
|
||||
continue;
|
||||
}
|
||||
|
||||
applyHooks(childModel, type.schema, options);
|
||||
if (childModel.discriminators != null) {
|
||||
const keys = Object.keys(childModel.discriminators);
|
||||
for (const key of keys) {
|
||||
applyHooks(childModel.discriminators[key],
|
||||
childModel.discriminators[key].schema, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Built-in hooks rely on hooking internal functions in order to support
|
||||
// promises and make it so that `doc.save.toString()` provides meaningful
|
||||
// information.
|
||||
|
||||
const middleware = schema.s.hooks.
|
||||
filter(hook => {
|
||||
if (hook.name === 'updateOne' || hook.name === 'deleteOne') {
|
||||
return !!hook['document'];
|
||||
}
|
||||
if (hook.name === 'remove' || hook.name === 'init') {
|
||||
return hook['document'] == null || !!hook['document'];
|
||||
}
|
||||
if (hook.query != null || hook.document != null) {
|
||||
return hook.document !== false;
|
||||
}
|
||||
return true;
|
||||
}).
|
||||
filter(hook => {
|
||||
// If user has overwritten the method, don't apply built-in middleware
|
||||
if (schema.methods[hook.name]) {
|
||||
return !hook.fn[symbols.builtInMiddleware];
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
model._middleware = middleware;
|
||||
|
||||
objToDecorate.$__originalValidate = objToDecorate.$__originalValidate || objToDecorate.$__validate;
|
||||
|
||||
for (const method of ['save', 'validate', 'remove', 'deleteOne']) {
|
||||
const toWrap = method === 'validate' ? '$__originalValidate' : `$__${method}`;
|
||||
const wrapped = middleware.
|
||||
createWrapper(method, objToDecorate[toWrap], null, kareemOptions);
|
||||
objToDecorate[`$__${method}`] = wrapped;
|
||||
}
|
||||
objToDecorate.$__init = middleware.
|
||||
createWrapperSync('init', objToDecorate.$__init, null, kareemOptions);
|
||||
|
||||
// Support hooks for custom methods
|
||||
const customMethods = Object.keys(schema.methods);
|
||||
const customMethodOptions = Object.assign({}, kareemOptions, {
|
||||
// Only use `checkForPromise` for custom methods, because mongoose
|
||||
// query thunks are not as consistent as I would like about returning
|
||||
// a nullish value rather than the query. If a query thunk returns
|
||||
// a query, `checkForPromise` causes infinite recursion
|
||||
checkForPromise: true
|
||||
});
|
||||
for (const method of customMethods) {
|
||||
if (alreadyHookedFunctions.has(method)) {
|
||||
continue;
|
||||
}
|
||||
if (!middleware.hasHooks(method)) {
|
||||
// Don't wrap if there are no hooks for the custom method to avoid
|
||||
// surprises. Also, `createWrapper()` enforces consistent async,
|
||||
// so wrapping a sync method would break it.
|
||||
continue;
|
||||
}
|
||||
const originalMethod = objToDecorate[method];
|
||||
objToDecorate[method] = function() {
|
||||
const args = Array.prototype.slice.call(arguments);
|
||||
const cb = args.slice(-1).pop();
|
||||
const argsWithoutCallback = typeof cb === 'function' ?
|
||||
args.slice(0, args.length - 1) : args;
|
||||
return promiseOrCallback(cb, callback => {
|
||||
return this[`$__${method}`].apply(this,
|
||||
argsWithoutCallback.concat([callback]));
|
||||
}, model.events);
|
||||
};
|
||||
objToDecorate[`$__${method}`] = middleware.
|
||||
createWrapper(method, originalMethod, null, customMethodOptions);
|
||||
}
|
||||
}
|
70
node_modules/mongoose/lib/helpers/model/applyMethods.js
generated
vendored
Normal file
70
node_modules/mongoose/lib/helpers/model/applyMethods.js
generated
vendored
Normal file
|
@ -0,0 +1,70 @@
|
|||
'use strict';
|
||||
|
||||
const get = require('../get');
|
||||
const utils = require('../../utils');
|
||||
|
||||
/**
|
||||
* Register methods for this model
|
||||
*
|
||||
* @param {Model} model
|
||||
* @param {Schema} schema
|
||||
* @api private
|
||||
*/
|
||||
|
||||
module.exports = function applyMethods(model, schema) {
|
||||
const Model = require('../../model');
|
||||
|
||||
function apply(method, schema) {
|
||||
Object.defineProperty(model.prototype, method, {
|
||||
get: function() {
|
||||
const h = {};
|
||||
for (const k in schema.methods[method]) {
|
||||
h[k] = schema.methods[method][k].bind(this);
|
||||
}
|
||||
return h;
|
||||
},
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
for (const method of Object.keys(schema.methods)) {
|
||||
const fn = schema.methods[method];
|
||||
if (schema.tree.hasOwnProperty(method)) {
|
||||
throw new Error('You have a method and a property in your schema both ' +
|
||||
'named "' + method + '"');
|
||||
}
|
||||
|
||||
// Avoid making custom methods if user sets a method to itself, e.g.
|
||||
// `schema.method(save, Document.prototype.save)`. Can happen when
|
||||
// calling `loadClass()` with a class that `extends Document`. See gh-12254
|
||||
if (typeof fn === 'function' &&
|
||||
Model.prototype[method] === fn) {
|
||||
delete schema.methods[method];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (schema.reserved[method] &&
|
||||
!get(schema, `methodOptions.${method}.suppressWarning`, false)) {
|
||||
utils.warn(`mongoose: the method name "${method}" is used by mongoose ` +
|
||||
'internally, overwriting it may cause bugs. If you\'re sure you know ' +
|
||||
'what you\'re doing, you can suppress this error by using ' +
|
||||
`\`schema.method('${method}', fn, { suppressWarning: true })\`.`);
|
||||
}
|
||||
if (typeof fn === 'function') {
|
||||
model.prototype[method] = fn;
|
||||
} else {
|
||||
apply(method, schema);
|
||||
}
|
||||
}
|
||||
|
||||
// Recursively call `applyMethods()` on child schemas
|
||||
model.$appliedMethods = true;
|
||||
for (const key of Object.keys(schema.paths)) {
|
||||
const type = schema.paths[key];
|
||||
if (type.$isSingleNested && !type.caster.$appliedMethods) {
|
||||
applyMethods(type.caster, type.schema);
|
||||
}
|
||||
if (type.$isMongooseDocumentArray && !type.Constructor.$appliedMethods) {
|
||||
applyMethods(type.Constructor, type.schema);
|
||||
}
|
||||
}
|
||||
};
|
71
node_modules/mongoose/lib/helpers/model/applyStaticHooks.js
generated
vendored
Normal file
71
node_modules/mongoose/lib/helpers/model/applyStaticHooks.js
generated
vendored
Normal file
|
@ -0,0 +1,71 @@
|
|||
'use strict';
|
||||
|
||||
const middlewareFunctions = require('../query/applyQueryMiddleware').middlewareFunctions;
|
||||
const promiseOrCallback = require('../promiseOrCallback');
|
||||
|
||||
module.exports = function applyStaticHooks(model, hooks, statics) {
|
||||
const kareemOptions = {
|
||||
useErrorHandlers: true,
|
||||
numCallbackParams: 1
|
||||
};
|
||||
|
||||
hooks = hooks.filter(hook => {
|
||||
// If the custom static overwrites an existing query middleware, don't apply
|
||||
// middleware to it by default. This avoids a potential backwards breaking
|
||||
// change with plugins like `mongoose-delete` that use statics to overwrite
|
||||
// built-in Mongoose functions.
|
||||
if (middlewareFunctions.indexOf(hook.name) !== -1) {
|
||||
return !!hook.model;
|
||||
}
|
||||
return hook.model !== false;
|
||||
});
|
||||
|
||||
model.$__insertMany = hooks.createWrapper('insertMany',
|
||||
model.$__insertMany, model, kareemOptions);
|
||||
|
||||
for (const key of Object.keys(statics)) {
|
||||
if (hooks.hasHooks(key)) {
|
||||
const original = model[key];
|
||||
|
||||
model[key] = function() {
|
||||
const numArgs = arguments.length;
|
||||
const lastArg = numArgs > 0 ? arguments[numArgs - 1] : null;
|
||||
const cb = typeof lastArg === 'function' ? lastArg : null;
|
||||
const args = Array.prototype.slice.
|
||||
call(arguments, 0, cb == null ? numArgs : numArgs - 1);
|
||||
// Special case: can't use `Kareem#wrap()` because it doesn't currently
|
||||
// support wrapped functions that return a promise.
|
||||
return promiseOrCallback(cb, callback => {
|
||||
hooks.execPre(key, model, args, function(err) {
|
||||
if (err != null) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
let postCalled = 0;
|
||||
const ret = original.apply(model, args.concat(post));
|
||||
if (ret != null && typeof ret.then === 'function') {
|
||||
ret.then(res => post(null, res), err => post(err));
|
||||
}
|
||||
|
||||
function post(error, res) {
|
||||
if (postCalled++ > 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (error != null) {
|
||||
return callback(error);
|
||||
}
|
||||
|
||||
hooks.execPost(key, model, [res], function(error) {
|
||||
if (error != null) {
|
||||
return callback(error);
|
||||
}
|
||||
callback(null, res);
|
||||
});
|
||||
}
|
||||
});
|
||||
}, model.events);
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue