Initial commit

This commit is contained in:
Arnaud Nelissen
2021-07-16 10:18:13 +02:00
commit 3af7ddab06
5894 changed files with 590836 additions and 0 deletions

1161
node_modules/mongoose/lib/aggregate.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

158
node_modules/mongoose/lib/browser.js generated vendored Normal file
View File

@@ -0,0 +1,158 @@
/* eslint-env browser */
'use strict';
require('./driver').set(require('./drivers/browser'));
const DocumentProvider = require('./document_provider.js');
const PromiseProvider = require('./promise_provider');
DocumentProvider.setBrowser(true);
/**
* The Mongoose [Promise](#promise_Promise) constructor.
*
* @method Promise
* @api public
*/
Object.defineProperty(exports, 'Promise', {
get: function() {
return PromiseProvider.get();
},
set: function(lib) {
PromiseProvider.set(lib);
}
});
/**
* Storage layer for mongoose promises
*
* @method PromiseProvider
* @api public
*/
exports.PromiseProvider = PromiseProvider;
/**
* The [MongooseError](#error_MongooseError) constructor.
*
* @method Error
* @api public
*/
exports.Error = require('./error/index');
/**
* The Mongoose [Schema](#schema_Schema) constructor
*
* ####Example:
*
* const mongoose = require('mongoose');
* const Schema = mongoose.Schema;
* const CatSchema = new Schema(..);
*
* @method Schema
* @api public
*/
exports.Schema = require('./schema');
/**
* The various Mongoose Types.
*
* ####Example:
*
* const mongoose = require('mongoose');
* const array = mongoose.Types.Array;
*
* ####Types:
*
* - [Array](/docs/schematypes.html#arrays)
* - [Buffer](/docs/schematypes.html#buffers)
* - [Embedded](/docs/schematypes.html#schemas)
* - [DocumentArray](/docs/api/documentarraypath.html)
* - [Decimal128](/docs/api.html#mongoose_Mongoose-Decimal128)
* - [ObjectId](/docs/schematypes.html#objectids)
* - [Map](/docs/schematypes.html#maps)
* - [Subdocument](/docs/schematypes.html#schemas)
*
* Using this exposed access to the `ObjectId` type, we can construct ids on demand.
*
* const ObjectId = mongoose.Types.ObjectId;
* const id1 = new ObjectId;
*
* @property Types
* @api public
*/
exports.Types = require('./types');
/**
* The Mongoose [VirtualType](#virtualtype_VirtualType) constructor
*
* @method VirtualType
* @api public
*/
exports.VirtualType = require('./virtualtype');
/**
* The various Mongoose SchemaTypes.
*
* ####Note:
*
* _Alias of mongoose.Schema.Types for backwards compatibility._
*
* @property SchemaTypes
* @see Schema.SchemaTypes #schema_Schema.Types
* @api public
*/
exports.SchemaType = require('./schematype.js');
/**
* Internal utils
*
* @property utils
* @api private
*/
exports.utils = require('./utils.js');
/**
* The Mongoose browser [Document](/api/document.html) constructor.
*
* @method Document
* @api public
*/
exports.Document = DocumentProvider();
/**
* Return a new browser model. In the browser, a model is just
* a simplified document with a schema - it does **not** have
* functions like `findOne()`, etc.
*
* @method model
* @api public
* @param {String} name
* @param {Schema} schema
* @return Class
*/
exports.model = function(name, schema) {
class Model extends exports.Document {
constructor(obj, fields) {
super(obj, schema, fields);
}
}
Model.modelName = name;
return Model;
};
/*!
* Module exports.
*/
if (typeof window !== 'undefined') {
window.mongoose = module.exports;
window.Buffer = Buffer;
}

100
node_modules/mongoose/lib/browserDocument.js generated vendored Normal file
View File

@@ -0,0 +1,100 @@
/*!
* Module dependencies.
*/
'use strict';
const NodeJSDocument = require('./document');
const EventEmitter = require('events').EventEmitter;
const MongooseError = require('./error/index');
const Schema = require('./schema');
const ObjectId = require('./types/objectid');
const ValidationError = MongooseError.ValidationError;
const applyHooks = require('./helpers/model/applyHooks');
const isObject = require('./helpers/isObject');
/**
* Document constructor.
*
* @param {Object} obj the values to set
* @param {Object} [fields] optional object containing the fields which were selected in the query returning this document and any populated paths data
* @param {Boolean} [skipId] bool, should we auto create an ObjectId _id
* @inherits NodeJS EventEmitter http://nodejs.org/api/events.html#events_class_events_eventemitter
* @event `init`: Emitted on a document after it has was retrieved from the db and fully hydrated by Mongoose.
* @event `save`: Emitted when the document is successfully saved
* @api private
*/
function Document(obj, schema, fields, skipId, skipInit) {
if (!(this instanceof Document)) {
return new Document(obj, schema, fields, skipId, skipInit);
}
if (isObject(schema) && !schema.instanceOfSchema) {
schema = new Schema(schema);
}
// When creating EmbeddedDocument, it already has the schema and he doesn't need the _id
schema = this.schema || schema;
// Generate ObjectId if it is missing, but it requires a scheme
if (!this.schema && schema.options._id) {
obj = obj || {};
if (obj._id === undefined) {
obj._id = new ObjectId();
}
}
if (!schema) {
throw new MongooseError.MissingSchemaError();
}
this.$__setSchema(schema);
NodeJSDocument.call(this, obj, fields, skipId, skipInit);
applyHooks(this, schema, { decorateDoc: true });
// apply methods
for (const m in schema.methods) {
this[m] = schema.methods[m];
}
// apply statics
for (const s in schema.statics) {
this[s] = schema.statics[s];
}
}
/*!
* Inherit from the NodeJS document
*/
Document.prototype = Object.create(NodeJSDocument.prototype);
Document.prototype.constructor = Document;
/*!
* ignore
*/
Document.events = new EventEmitter();
/*!
* Browser doc exposes the event emitter API
*/
Document.$emitter = new EventEmitter();
['on', 'once', 'emit', 'listeners', 'removeListener', 'setMaxListeners',
'removeAllListeners', 'addListener'].forEach(function(emitterFn) {
Document[emitterFn] = function() {
return Document.$emitter[emitterFn].apply(Document.$emitter, arguments);
};
});
/*!
* Module exports.
*/
Document.ValidationError = ValidationError;
module.exports = exports = Document;

364
node_modules/mongoose/lib/cast.js generated vendored Normal file
View File

@@ -0,0 +1,364 @@
'use strict';
/*!
* Module dependencies.
*/
const CastError = require('./error/cast');
const StrictModeError = require('./error/strict');
const Types = require('./schema/index');
const castTextSearch = require('./schema/operators/text');
const get = require('./helpers/get');
const getSchemaDiscriminatorByValue = require('./helpers/discriminator/getSchemaDiscriminatorByValue');
const isOperator = require('./helpers/query/isOperator');
const util = require('util');
const isObject = require('./helpers/isObject');
const isMongooseObject = require('./helpers/isMongooseObject');
const ALLOWED_GEOWITHIN_GEOJSON_TYPES = ['Polygon', 'MultiPolygon'];
/**
* Handles internal casting for query filters.
*
* @param {Schema} schema
* @param {Object} obj Object to cast
* @param {Object} options the query options
* @param {Query} context passed to setters
* @api private
*/
module.exports = function cast(schema, obj, options, context) {
if (Array.isArray(obj)) {
throw new Error('Query filter must be an object, got an array ', util.inspect(obj));
}
if (obj == null) {
return obj;
}
// bson 1.x has the unfortunate tendency to remove filters that have a top-level
// `_bsontype` property. But we should still allow ObjectIds because
// `Collection#find()` has a special case to support `find(objectid)`.
// Should remove this when we upgrade to bson 4.x. See gh-8222, gh-8268
if (obj.hasOwnProperty('_bsontype') && obj._bsontype !== 'ObjectID') {
delete obj._bsontype;
}
if (schema != null && schema.discriminators != null && obj[schema.options.discriminatorKey] != null) {
schema = getSchemaDiscriminatorByValue(schema, obj[schema.options.discriminatorKey]) || schema;
}
const paths = Object.keys(obj);
let i = paths.length;
let _keys;
let any$conditionals;
let schematype;
let nested;
let path;
let type;
let val;
options = options || {};
while (i--) {
path = paths[i];
val = obj[path];
if (path === '$or' || path === '$nor' || path === '$and') {
if (!Array.isArray(val)) {
throw new CastError('Array', val, path);
}
for (let k = 0; k < val.length; ++k) {
if (val[k] == null || typeof val[k] !== 'object') {
throw new CastError('Object', val[k], path + '.' + k);
}
val[k] = cast(schema, val[k], options, context);
}
} else if (path === '$where') {
type = typeof val;
if (type !== 'string' && type !== 'function') {
throw new Error('Must have a string or function for $where');
}
if (type === 'function') {
obj[path] = val.toString();
}
continue;
} else if (path === '$elemMatch') {
val = cast(schema, val, options, context);
} else if (path === '$text') {
val = castTextSearch(val, path);
} else {
if (!schema) {
// no casting for Mixed types
continue;
}
schematype = schema.path(path);
// Check for embedded discriminator paths
if (!schematype) {
const split = path.split('.');
let j = split.length;
while (j--) {
const pathFirstHalf = split.slice(0, j).join('.');
const pathLastHalf = split.slice(j).join('.');
const _schematype = schema.path(pathFirstHalf);
const discriminatorKey = get(_schematype, 'schema.options.discriminatorKey');
// gh-6027: if we haven't found the schematype but this path is
// underneath an embedded discriminator and the embedded discriminator
// key is in the query, use the embedded discriminator schema
if (_schematype != null &&
get(_schematype, 'schema.discriminators') != null &&
discriminatorKey != null &&
pathLastHalf !== discriminatorKey) {
const discriminatorVal = get(obj, pathFirstHalf + '.' + discriminatorKey);
if (discriminatorVal != null) {
schematype = _schematype.schema.discriminators[discriminatorVal].
path(pathLastHalf);
}
}
}
}
if (!schematype) {
// Handle potential embedded array queries
const split = path.split('.');
let j = split.length;
let pathFirstHalf;
let pathLastHalf;
let remainingConds;
// Find the part of the var path that is a path of the Schema
while (j--) {
pathFirstHalf = split.slice(0, j).join('.');
schematype = schema.path(pathFirstHalf);
if (schematype) {
break;
}
}
// If a substring of the input path resolves to an actual real path...
if (schematype) {
// Apply the casting; similar code for $elemMatch in schema/array.js
if (schematype.caster && schematype.caster.schema) {
remainingConds = {};
pathLastHalf = split.slice(j).join('.');
remainingConds[pathLastHalf] = val;
obj[path] = cast(schematype.caster.schema, remainingConds, options, context)[pathLastHalf];
} else {
obj[path] = val;
}
continue;
}
if (isObject(val)) {
// handle geo schemas that use object notation
// { loc: { long: Number, lat: Number }
let geo = '';
if (val.$near) {
geo = '$near';
} else if (val.$nearSphere) {
geo = '$nearSphere';
} else if (val.$within) {
geo = '$within';
} else if (val.$geoIntersects) {
geo = '$geoIntersects';
} else if (val.$geoWithin) {
geo = '$geoWithin';
}
if (geo) {
const numbertype = new Types.Number('__QueryCasting__');
let value = val[geo];
if (val.$maxDistance != null) {
val.$maxDistance = numbertype.castForQueryWrapper({
val: val.$maxDistance,
context: context
});
}
if (val.$minDistance != null) {
val.$minDistance = numbertype.castForQueryWrapper({
val: val.$minDistance,
context: context
});
}
if (geo === '$within') {
const withinType = value.$center
|| value.$centerSphere
|| value.$box
|| value.$polygon;
if (!withinType) {
throw new Error('Bad $within parameter: ' + JSON.stringify(val));
}
value = withinType;
} else if (geo === '$near' &&
typeof value.type === 'string' && Array.isArray(value.coordinates)) {
// geojson; cast the coordinates
value = value.coordinates;
} else if ((geo === '$near' || geo === '$nearSphere' || geo === '$geoIntersects') &&
value.$geometry && typeof value.$geometry.type === 'string' &&
Array.isArray(value.$geometry.coordinates)) {
if (value.$maxDistance != null) {
value.$maxDistance = numbertype.castForQueryWrapper({
val: value.$maxDistance,
context: context
});
}
if (value.$minDistance != null) {
value.$minDistance = numbertype.castForQueryWrapper({
val: value.$minDistance,
context: context
});
}
if (isMongooseObject(value.$geometry)) {
value.$geometry = value.$geometry.toObject({
transform: false,
virtuals: false
});
}
value = value.$geometry.coordinates;
} else if (geo === '$geoWithin') {
if (value.$geometry) {
if (isMongooseObject(value.$geometry)) {
value.$geometry = value.$geometry.toObject({ virtuals: false });
}
const geoWithinType = value.$geometry.type;
if (ALLOWED_GEOWITHIN_GEOJSON_TYPES.indexOf(geoWithinType) === -1) {
throw new Error('Invalid geoJSON type for $geoWithin "' +
geoWithinType + '", must be "Polygon" or "MultiPolygon"');
}
value = value.$geometry.coordinates;
} else {
value = value.$box || value.$polygon || value.$center ||
value.$centerSphere;
if (isMongooseObject(value)) {
value = value.toObject({ virtuals: false });
}
}
}
_cast(value, numbertype, context);
continue;
}
}
if (schema.nested[path]) {
continue;
}
if (options.upsert && options.strict) {
if (options.strict === 'throw') {
throw new StrictModeError(path);
}
throw new StrictModeError(path, 'Path "' + path + '" is not in ' +
'schema, strict mode is `true`, and upsert is `true`.');
} else if (options.strictQuery === 'throw') {
throw new StrictModeError(path, 'Path "' + path + '" is not in ' +
'schema and strictQuery is \'throw\'.');
} else if (options.strictQuery) {
delete obj[path];
}
} else if (val == null) {
continue;
} else if (val.constructor.name === 'Object') {
any$conditionals = Object.keys(val).some(isOperator);
if (!any$conditionals) {
obj[path] = schematype.castForQueryWrapper({
val: val,
context: context
});
} else {
const ks = Object.keys(val);
let $cond;
let k = ks.length;
while (k--) {
$cond = ks[k];
nested = val[$cond];
if ($cond === '$not') {
if (nested && schematype && !schematype.caster) {
_keys = Object.keys(nested);
if (_keys.length && isOperator(_keys[0])) {
for (const key in nested) {
nested[key] = schematype.castForQueryWrapper({
$conditional: key,
val: nested[key],
context: context
});
}
} else {
val[$cond] = schematype.castForQueryWrapper({
$conditional: $cond,
val: nested,
context: context
});
}
continue;
}
cast(schematype.caster ? schematype.caster.schema : schema, nested, options, context);
} else {
val[$cond] = schematype.castForQueryWrapper({
$conditional: $cond,
val: nested,
context: context
});
}
}
}
} else if (Array.isArray(val) && ['Buffer', 'Array'].indexOf(schematype.instance) === -1) {
const casted = [];
const valuesArray = val;
for (const _val of valuesArray) {
casted.push(schematype.castForQueryWrapper({
val: _val,
context: context
}));
}
obj[path] = { $in: casted };
} else {
obj[path] = schematype.castForQueryWrapper({
val: val,
context: context
});
}
}
}
return obj;
};
function _cast(val, numbertype, context) {
if (Array.isArray(val)) {
val.forEach(function(item, i) {
if (Array.isArray(item) || isObject(item)) {
return _cast(item, numbertype, context);
}
val[i] = numbertype.castForQueryWrapper({ val: item, context: context });
});
} else {
const nearKeys = Object.keys(val);
let nearLen = nearKeys.length;
while (nearLen--) {
const nkey = nearKeys[nearLen];
const item = val[nkey];
if (Array.isArray(item) || isObject(item)) {
_cast(item, numbertype, context);
val[nkey] = item;
} else {
val[nkey] = numbertype.castForQuery({ val: item, context: context });
}
}
}
}

32
node_modules/mongoose/lib/cast/boolean.js generated vendored Normal file
View File

@@ -0,0 +1,32 @@
'use strict';
const CastError = require('../error/cast');
/*!
* Given a value, cast it to a boolean, or throw a `CastError` if the value
* cannot be casted. `null` and `undefined` are considered valid.
*
* @param {Any} value
* @param {String} [path] optional the path to set on the CastError
* @return {Boolean|null|undefined}
* @throws {CastError} if `value` is not one of the allowed values
* @api private
*/
module.exports = function castBoolean(value, path) {
if (module.exports.convertToTrue.has(value)) {
return true;
}
if (module.exports.convertToFalse.has(value)) {
return false;
}
if (value == null) {
return value;
}
throw new CastError('boolean', value, path);
};
module.exports.convertToTrue = new Set([true, 'true', 1, '1', 'yes']);
module.exports.convertToFalse = new Set([false, 'false', 0, '0', 'no']);

41
node_modules/mongoose/lib/cast/date.js generated vendored Normal file
View File

@@ -0,0 +1,41 @@
'use strict';
const assert = require('assert');
module.exports = function castDate(value) {
// Support empty string because of empty form values. Originally introduced
// in https://github.com/Automattic/mongoose/commit/efc72a1898fc3c33a319d915b8c5463a22938dfe
if (value == null || value === '') {
return null;
}
if (value instanceof Date) {
assert.ok(!isNaN(value.valueOf()));
return value;
}
let date;
assert.ok(typeof value !== 'boolean');
if (value instanceof Number || typeof value === 'number') {
date = new Date(value);
} else if (typeof value === 'string' && !isNaN(Number(value)) && (Number(value) >= 275761 || Number(value) < -271820)) {
// string representation of milliseconds take this path
date = new Date(Number(value));
} else if (typeof value.valueOf === 'function') {
// support for moment.js. This is also the path strings will take because
// strings have a `valueOf()`
date = new Date(value.valueOf());
} else {
// fallback
date = new Date(value);
}
if (!isNaN(date.valueOf())) {
return date;
}
assert.ok(false);
};

36
node_modules/mongoose/lib/cast/decimal128.js generated vendored Normal file
View File

@@ -0,0 +1,36 @@
'use strict';
const Decimal128Type = require('../types/decimal128');
const assert = require('assert');
module.exports = function castDecimal128(value) {
if (value == null) {
return value;
}
if (typeof value === 'object' && typeof value.$numberDecimal === 'string') {
return Decimal128Type.fromString(value.$numberDecimal);
}
if (value instanceof Decimal128Type) {
return value;
}
if (typeof value === 'string') {
return Decimal128Type.fromString(value);
}
if (Buffer.isBuffer(value)) {
return new Decimal128Type(value);
}
if (typeof value === 'number') {
return Decimal128Type.fromString(String(value));
}
if (typeof value.valueOf === 'function' && typeof value.valueOf() === 'string') {
return Decimal128Type.fromString(value.valueOf());
}
assert.ok(false);
};

43
node_modules/mongoose/lib/cast/number.js generated vendored Normal file
View File

@@ -0,0 +1,43 @@
'use strict';
const assert = require('assert');
/*!
* Given a value, cast it to a number, or throw a `CastError` if the value
* cannot be casted. `null` and `undefined` are considered valid.
*
* @param {Any} value
* @param {String} [path] optional the path to set on the CastError
* @return {Boolean|null|undefined}
* @throws {Error} if `value` is not one of the allowed values
* @api private
*/
module.exports = function castNumber(val) {
if (val == null) {
return val;
}
if (val === '') {
return null;
}
if (typeof val === 'string' || typeof val === 'boolean') {
val = Number(val);
}
assert.ok(!isNaN(val));
if (val instanceof Number) {
return val.valueOf();
}
if (typeof val === 'number') {
return val;
}
if (!Array.isArray(val) && typeof val.valueOf === 'function') {
return Number(val.valueOf());
}
if (val.toString && !Array.isArray(val) && val.toString() == Number(val)) {
return Number(val);
}
assert.ok(false);
};

29
node_modules/mongoose/lib/cast/objectid.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
'use strict';
const ObjectId = require('../driver').get().ObjectId;
const assert = require('assert');
module.exports = function castObjectId(value) {
if (value == null) {
return value;
}
if (value instanceof ObjectId) {
return value;
}
if (value._id) {
if (value._id instanceof ObjectId) {
return value._id;
}
if (value._id.toString instanceof Function) {
return new ObjectId(value._id.toString());
}
}
if (value.toString instanceof Function) {
return new ObjectId(value.toString());
}
assert.ok(false);
};

37
node_modules/mongoose/lib/cast/string.js generated vendored Normal file
View File

@@ -0,0 +1,37 @@
'use strict';
const CastError = require('../error/cast');
/*!
* Given a value, cast it to a string, or throw a `CastError` if the value
* cannot be casted. `null` and `undefined` are considered valid.
*
* @param {Any} value
* @param {String} [path] optional the path to set on the CastError
* @return {string|null|undefined}
* @throws {CastError}
* @api private
*/
module.exports = function castString(value, path) {
// If null or undefined
if (value == null) {
return value;
}
// handle documents being passed
if (value._id && typeof value._id === 'string') {
return value._id;
}
// Re: gh-647 and gh-3030, we're ok with casting using `toString()`
// **unless** its the default Object.toString, because "[object Object]"
// doesn't really qualify as useful data
if (value.toString &&
value.toString !== Object.prototype.toString &&
!Array.isArray(value)) {
return value.toString();
}
throw new CastError('string', value, path);
};

322
node_modules/mongoose/lib/collection.js generated vendored Normal file
View File

@@ -0,0 +1,322 @@
'use strict';
/*!
* Module dependencies.
*/
const EventEmitter = require('events').EventEmitter;
const STATES = require('./connectionstate');
const immediate = require('./helpers/immediate');
/**
* Abstract Collection constructor
*
* This is the base class that drivers inherit from and implement.
*
* @param {String} name name of the collection
* @param {Connection} conn A MongooseConnection instance
* @param {Object} opts optional collection options
* @api public
*/
function Collection(name, conn, opts) {
if (opts === void 0) {
opts = {};
}
if (opts.capped === void 0) {
opts.capped = {};
}
if (typeof opts.capped === 'number') {
opts.capped = { size: opts.capped };
}
this.opts = opts;
this.name = name;
this.collectionName = name;
this.conn = conn;
this.queue = [];
this.buffer = true;
this.emitter = new EventEmitter();
if (STATES.connected === this.conn.readyState) {
this.onOpen();
}
}
/**
* The collection name
*
* @api public
* @property name
*/
Collection.prototype.name;
/**
* The collection name
*
* @api public
* @property collectionName
*/
Collection.prototype.collectionName;
/**
* The Connection instance
*
* @api public
* @property conn
*/
Collection.prototype.conn;
/**
* Called when the database connects
*
* @api private
*/
Collection.prototype.onOpen = function() {
this.buffer = false;
immediate(() => this.doQueue());
};
/**
* Called when the database disconnects
*
* @api private
*/
Collection.prototype.onClose = function(force) {
if (this._shouldBufferCommands() && !force) {
this.buffer = true;
}
};
/**
* Queues a method for later execution when its
* database connection opens.
*
* @param {String} name name of the method to queue
* @param {Array} args arguments to pass to the method when executed
* @api private
*/
Collection.prototype.addQueue = function(name, args) {
this.queue.push([name, args]);
return this;
};
/**
* Removes a queued method
*
* @param {String} name name of the method to queue
* @param {Array} args arguments to pass to the method when executed
* @api private
*/
Collection.prototype.removeQueue = function(name, args) {
const index = this.queue.findIndex(v => v[0] === name && v[1] === args);
if (index === -1) {
return false;
}
this.queue.splice(index, 1);
return true;
};
/**
* Executes all queued methods and clears the queue.
*
* @api private
*/
Collection.prototype.doQueue = function() {
for (const method of this.queue) {
if (typeof method[0] === 'function') {
method[0].apply(this, method[1]);
} else {
this[method[0]].apply(this, method[1]);
}
}
this.queue = [];
const _this = this;
immediate(function() {
_this.emitter.emit('queue');
});
return this;
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.ensureIndex = function() {
throw new Error('Collection#ensureIndex unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.createIndex = function() {
throw new Error('Collection#createIndex unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.findAndModify = function() {
throw new Error('Collection#findAndModify unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.findOneAndUpdate = function() {
throw new Error('Collection#findOneAndUpdate unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.findOneAndDelete = function() {
throw new Error('Collection#findOneAndDelete unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.findOneAndReplace = function() {
throw new Error('Collection#findOneAndReplace unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.findOne = function() {
throw new Error('Collection#findOne unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.find = function() {
throw new Error('Collection#find unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.insert = function() {
throw new Error('Collection#insert unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.insertOne = function() {
throw new Error('Collection#insertOne unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.insertMany = function() {
throw new Error('Collection#insertMany unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.save = function() {
throw new Error('Collection#save unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.update = function() {
throw new Error('Collection#update unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.getIndexes = function() {
throw new Error('Collection#getIndexes unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.mapReduce = function() {
throw new Error('Collection#mapReduce unimplemented by driver');
};
/**
* Abstract method that drivers must implement.
*/
Collection.prototype.watch = function() {
throw new Error('Collection#watch unimplemented by driver');
};
/*!
* ignore
*/
Collection.prototype._shouldBufferCommands = function _shouldBufferCommands() {
const opts = this.opts;
if (opts.bufferCommands != null) {
return opts.bufferCommands;
}
if (opts && opts.schemaUserProvidedOptions != null && opts.schemaUserProvidedOptions.bufferCommands != null) {
return opts.schemaUserProvidedOptions.bufferCommands;
}
return this.conn._shouldBufferCommands();
};
/*!
* ignore
*/
Collection.prototype._getBufferTimeoutMS = function _getBufferTimeoutMS() {
const conn = this.conn;
const opts = this.opts;
if (opts.bufferTimeoutMS != null) {
return opts.bufferTimeoutMS;
}
if (opts && opts.schemaUserProvidedOptions != null && opts.schemaUserProvidedOptions.bufferTimeoutMS != null) {
return opts.schemaUserProvidedOptions.bufferTimeoutMS;
}
if (conn.config.bufferTimeoutMS != null) {
return conn.config.bufferTimeoutMS;
}
if (conn.base != null && conn.base.get('bufferTimeoutMS') != null) {
return conn.base.get('bufferTimeoutMS');
}
return 10000;
};
/*!
* Module exports.
*/
module.exports = Collection;

1509
node_modules/mongoose/lib/connection.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

26
node_modules/mongoose/lib/connectionstate.js generated vendored Normal file
View File

@@ -0,0 +1,26 @@
/*!
* Connection states
*/
'use strict';
const STATES = module.exports = exports = Object.create(null);
const disconnected = 'disconnected';
const connected = 'connected';
const connecting = 'connecting';
const disconnecting = 'disconnecting';
const uninitialized = 'uninitialized';
STATES[0] = disconnected;
STATES[1] = connected;
STATES[2] = connecting;
STATES[3] = disconnecting;
STATES[99] = uninitialized;
STATES[disconnected] = 0;
STATES[connected] = 1;
STATES[connecting] = 2;
STATES[disconnecting] = 3;
STATES[uninitialized] = 99;

369
node_modules/mongoose/lib/cursor/AggregationCursor.js generated vendored Normal file
View File

@@ -0,0 +1,369 @@
/*!
* Module dependencies.
*/
'use strict';
const MongooseError = require('../error/mongooseError');
const Readable = require('stream').Readable;
const promiseOrCallback = require('../helpers/promiseOrCallback');
const eachAsync = require('../helpers/cursor/eachAsync');
const immediate = require('../helpers/immediate');
const util = require('util');
/**
* An AggregationCursor is a concurrency primitive for processing aggregation
* results one document at a time. It is analogous to QueryCursor.
*
* An AggregationCursor fulfills the Node.js streams3 API,
* in addition to several other mechanisms for loading documents from MongoDB
* one at a time.
*
* Creating an AggregationCursor executes the model's pre aggregate hooks,
* but **not** the model's post aggregate hooks.
*
* Unless you're an advanced user, do **not** instantiate this class directly.
* Use [`Aggregate#cursor()`](/docs/api.html#aggregate_Aggregate-cursor) instead.
*
* @param {Aggregate} agg
* @param {Object} options
* @inherits Readable
* @event `cursor`: Emitted when the cursor is created
* @event `error`: Emitted when an error occurred
* @event `data`: Emitted when the stream is flowing and the next doc is ready
* @event `end`: Emitted when the stream is exhausted
* @api public
*/
function AggregationCursor(agg) {
Readable.call(this, { objectMode: true });
this.cursor = null;
this.agg = agg;
this._transforms = [];
const model = agg._model;
delete agg.options.cursor.useMongooseAggCursor;
this._mongooseOptions = {};
_init(model, this, agg);
}
util.inherits(AggregationCursor, Readable);
/*!
* ignore
*/
function _init(model, c, agg) {
if (!model.collection.buffer) {
model.hooks.execPre('aggregate', agg, function() {
c.cursor = model.collection.aggregate(agg._pipeline, agg.options || {});
c.emit('cursor', c.cursor);
});
} else {
model.collection.emitter.once('queue', function() {
model.hooks.execPre('aggregate', agg, function() {
c.cursor = model.collection.aggregate(agg._pipeline, agg.options || {});
c.emit('cursor', c.cursor);
});
});
}
}
/*!
* Necessary to satisfy the Readable API
*/
AggregationCursor.prototype._read = function() {
const _this = this;
_next(this, function(error, doc) {
if (error) {
return _this.emit('error', error);
}
if (!doc) {
_this.push(null);
_this.cursor.close(function(error) {
if (error) {
return _this.emit('error', error);
}
setTimeout(function() {
// on node >= 14 streams close automatically (gh-8834)
const isNotClosedAutomatically = !_this.destroyed;
if (isNotClosedAutomatically) {
_this.emit('close');
}
}, 0);
});
return;
}
_this.push(doc);
});
};
if (Symbol.asyncIterator != null) {
const msg = 'Mongoose does not support using async iterators with an ' +
'existing aggregation cursor. See http://bit.ly/mongoose-async-iterate-aggregation';
AggregationCursor.prototype[Symbol.asyncIterator] = function() {
throw new MongooseError(msg);
};
}
/**
* Registers a transform function which subsequently maps documents retrieved
* via the streams interface or `.next()`
*
* ####Example
*
* // Map documents returned by `data` events
* Thing.
* find({ name: /^hello/ }).
* cursor().
* map(function (doc) {
* doc.foo = "bar";
* return doc;
* })
* on('data', function(doc) { console.log(doc.foo); });
*
* // Or map documents returned by `.next()`
* const cursor = Thing.find({ name: /^hello/ }).
* cursor().
* map(function (doc) {
* doc.foo = "bar";
* return doc;
* });
* cursor.next(function(error, doc) {
* console.log(doc.foo);
* });
*
* @param {Function} fn
* @return {AggregationCursor}
* @api public
* @method map
*/
AggregationCursor.prototype.map = function(fn) {
this._transforms.push(fn);
return this;
};
/*!
* Marks this cursor as errored
*/
AggregationCursor.prototype._markError = function(error) {
this._error = error;
return this;
};
/**
* Marks this cursor as closed. Will stop streaming and subsequent calls to
* `next()` will error.
*
* @param {Function} callback
* @return {Promise}
* @api public
* @method close
* @emits close
* @see MongoDB driver cursor#close http://mongodb.github.io/node-mongodb-native/2.1/api/Cursor.html#close
*/
AggregationCursor.prototype.close = function(callback) {
return promiseOrCallback(callback, cb => {
this.cursor.close(error => {
if (error) {
cb(error);
return this.listeners('error').length > 0 && this.emit('error', error);
}
this.emit('close');
cb(null);
});
});
};
/**
* Get the next document from this cursor. Will return `null` when there are
* no documents left.
*
* @param {Function} callback
* @return {Promise}
* @api public
* @method next
*/
AggregationCursor.prototype.next = function(callback) {
return promiseOrCallback(callback, cb => {
_next(this, cb);
});
};
/**
* Execute `fn` for every document in the cursor. If `fn` returns a promise,
* will wait for the promise to resolve before iterating on to the next one.
* Returns a promise that resolves when done.
*
* @param {Function} fn
* @param {Object} [options]
* @param {Number} [options.parallel] the number of promises to execute in parallel. Defaults to 1.
* @param {Function} [callback] executed when all docs have been processed
* @return {Promise}
* @api public
* @method eachAsync
*/
AggregationCursor.prototype.eachAsync = function(fn, opts, callback) {
const _this = this;
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
opts = opts || {};
return eachAsync(function(cb) { return _next(_this, cb); }, fn, opts, callback);
};
/**
* Returns an asyncIterator for use with [`for/await/of` loops](https://thecodebarbarian.com/getting-started-with-async-iterators-in-node-js
* You do not need to call this function explicitly, the JavaScript runtime
* will call it for you.
*
* ####Example
*
* // Async iterator without explicitly calling `cursor()`. Mongoose still
* // creates an AggregationCursor instance internally.
* const agg = Model.aggregate([{ $match: { age: { $gte: 25 } } }]);
* for await (const doc of agg) {
* console.log(doc.name);
* }
*
* // You can also use an AggregationCursor instance for async iteration
* const cursor = Model.aggregate([{ $match: { age: { $gte: 25 } } }]).cursor();
* for await (const doc of cursor) {
* console.log(doc.name);
* }
*
* Node.js 10.x supports async iterators natively without any flags. You can
* enable async iterators in Node.js 8.x using the [`--harmony_async_iteration` flag](https://github.com/tc39/proposal-async-iteration/issues/117#issuecomment-346695187).
*
* **Note:** This function is not set if `Symbol.asyncIterator` is undefined. If
* `Symbol.asyncIterator` is undefined, that means your Node.js version does not
* support async iterators.
*
* @method Symbol.asyncIterator
* @memberOf AggregationCursor
* @instance
* @api public
*/
if (Symbol.asyncIterator != null) {
AggregationCursor.prototype[Symbol.asyncIterator] = function() {
return this.transformNull()._transformForAsyncIterator();
};
}
/*!
* ignore
*/
AggregationCursor.prototype._transformForAsyncIterator = function() {
if (this._transforms.indexOf(_transformForAsyncIterator) === -1) {
this.map(_transformForAsyncIterator);
}
return this;
};
/*!
* ignore
*/
AggregationCursor.prototype.transformNull = function(val) {
if (arguments.length === 0) {
val = true;
}
this._mongooseOptions.transformNull = val;
return this;
};
/*!
* ignore
*/
function _transformForAsyncIterator(doc) {
return doc == null ? { done: true } : { value: doc, done: false };
}
/**
* Adds a [cursor flag](http://mongodb.github.io/node-mongodb-native/2.2/api/Cursor.html#addCursorFlag).
* Useful for setting the `noCursorTimeout` and `tailable` flags.
*
* @param {String} flag
* @param {Boolean} value
* @return {AggregationCursor} this
* @api public
* @method addCursorFlag
*/
AggregationCursor.prototype.addCursorFlag = function(flag, value) {
const _this = this;
_waitForCursor(this, function() {
_this.cursor.addCursorFlag(flag, value);
});
return this;
};
/*!
* ignore
*/
function _waitForCursor(ctx, cb) {
if (ctx.cursor) {
return cb();
}
ctx.once('cursor', function() {
cb();
});
}
/*!
* Get the next doc from the underlying cursor and mongooseify it
* (populate, etc.)
*/
function _next(ctx, cb) {
let callback = cb;
if (ctx._transforms.length) {
callback = function(err, doc) {
if (err || (doc === null && !ctx._mongooseOptions.transformNull)) {
return cb(err, doc);
}
cb(err, ctx._transforms.reduce(function(doc, fn) {
return fn(doc);
}, doc));
};
}
if (ctx._error) {
return immediate(function() {
callback(ctx._error);
});
}
if (ctx.cursor) {
return ctx.cursor.next(function(error, doc) {
if (error) {
return callback(error);
}
if (!doc) {
return callback(null, null);
}
callback(null, doc);
});
} else {
ctx.once('cursor', function() {
_next(ctx, cb);
});
}
}
module.exports = AggregationCursor;

61
node_modules/mongoose/lib/cursor/ChangeStream.js generated vendored Normal file
View File

@@ -0,0 +1,61 @@
'use strict';
/*!
* Module dependencies.
*/
const EventEmitter = require('events').EventEmitter;
/*!
* ignore
*/
class ChangeStream extends EventEmitter {
constructor(changeStreamThunk, pipeline, options) {
super();
this.driverChangeStream = null;
this.closed = false;
this.pipeline = pipeline;
this.options = options;
// This wrapper is necessary because of buffering.
changeStreamThunk((err, driverChangeStream) => {
if (err != null) {
this.emit('error', err);
return;
}
this.driverChangeStream = driverChangeStream;
this._bindEvents();
this.emit('ready');
});
}
_bindEvents() {
this.driverChangeStream.on('close', () => {
this.closed = true;
});
['close', 'change', 'end', 'error'].forEach(ev => {
this.driverChangeStream.on(ev, data => this.emit(ev, data));
});
}
_queue(cb) {
this.once('ready', () => cb());
}
close() {
this.closed = true;
if (this.driverChangeStream) {
this.driverChangeStream.close();
}
}
}
/*!
* ignore
*/
module.exports = ChangeStream;

507
node_modules/mongoose/lib/cursor/QueryCursor.js generated vendored Normal file
View File

@@ -0,0 +1,507 @@
/*!
* Module dependencies.
*/
'use strict';
const Readable = require('stream').Readable;
const promiseOrCallback = require('../helpers/promiseOrCallback');
const eachAsync = require('../helpers/cursor/eachAsync');
const helpers = require('../queryhelpers');
const immediate = require('../helpers/immediate');
const util = require('util');
/**
* A QueryCursor is a concurrency primitive for processing query results
* one document at a time. A QueryCursor fulfills the Node.js streams3 API,
* in addition to several other mechanisms for loading documents from MongoDB
* one at a time.
*
* QueryCursors execute the model's pre `find` hooks before loading any documents
* from MongoDB, and the model's post `find` hooks after loading each document.
*
* Unless you're an advanced user, do **not** instantiate this class directly.
* Use [`Query#cursor()`](/docs/api.html#query_Query-cursor) instead.
*
* @param {Query} query
* @param {Object} options query options passed to `.find()`
* @inherits Readable
* @event `cursor`: Emitted when the cursor is created
* @event `error`: Emitted when an error occurred
* @event `data`: Emitted when the stream is flowing and the next doc is ready
* @event `end`: Emitted when the stream is exhausted
* @api public
*/
function QueryCursor(query, options) {
Readable.call(this, { objectMode: true });
this.cursor = null;
this.query = query;
const _this = this;
const model = query.model;
this._mongooseOptions = {};
this._transforms = [];
this.model = model;
this.options = options || {};
model.hooks.execPre('find', query, () => {
this._transforms = this._transforms.concat(query._transforms.slice());
if (this.options.transform) {
this._transforms.push(options.transform);
}
// Re: gh-8039, you need to set the `cursor.batchSize` option, top-level
// `batchSize` option doesn't work.
if (this.options.batchSize) {
this.options.cursor = options.cursor || {};
this.options.cursor.batchSize = options.batchSize;
}
model.collection.find(query._conditions, this.options, function(err, cursor) {
if (_this._error) {
if (cursor != null) {
cursor.close(function() {});
}
_this.emit('cursor', null);
_this.listeners('error').length > 0 && _this.emit('error', _this._error);
return;
}
if (err) {
return _this.emit('error', err);
}
_this.cursor = cursor;
_this.emit('cursor', cursor);
});
});
}
util.inherits(QueryCursor, Readable);
/*!
* Necessary to satisfy the Readable API
*/
QueryCursor.prototype._read = function() {
const _this = this;
_next(this, function(error, doc) {
if (error) {
return _this.emit('error', error);
}
if (!doc) {
_this.push(null);
_this.cursor.close(function(error) {
if (error) {
return _this.emit('error', error);
}
setTimeout(function() {
// on node >= 14 streams close automatically (gh-8834)
const isNotClosedAutomatically = !_this.destroyed;
if (isNotClosedAutomatically) {
_this.emit('close');
}
}, 0);
});
return;
}
_this.push(doc);
});
};
/**
* Registers a transform function which subsequently maps documents retrieved
* via the streams interface or `.next()`
*
* ####Example
*
* // Map documents returned by `data` events
* Thing.
* find({ name: /^hello/ }).
* cursor().
* map(function (doc) {
* doc.foo = "bar";
* return doc;
* })
* on('data', function(doc) { console.log(doc.foo); });
*
* // Or map documents returned by `.next()`
* const cursor = Thing.find({ name: /^hello/ }).
* cursor().
* map(function (doc) {
* doc.foo = "bar";
* return doc;
* });
* cursor.next(function(error, doc) {
* console.log(doc.foo);
* });
*
* @param {Function} fn
* @return {QueryCursor}
* @api public
* @method map
*/
QueryCursor.prototype.map = function(fn) {
this._transforms.push(fn);
return this;
};
/*!
* Marks this cursor as errored
*/
QueryCursor.prototype._markError = function(error) {
this._error = error;
return this;
};
/**
* Marks this cursor as closed. Will stop streaming and subsequent calls to
* `next()` will error.
*
* @param {Function} callback
* @return {Promise}
* @api public
* @method close
* @emits close
* @see MongoDB driver cursor#close http://mongodb.github.io/node-mongodb-native/2.1/api/Cursor.html#close
*/
QueryCursor.prototype.close = function(callback) {
return promiseOrCallback(callback, cb => {
this.cursor.close(error => {
if (error) {
cb(error);
return this.listeners('error').length > 0 && this.emit('error', error);
}
this.emit('close');
cb(null);
});
}, this.model.events);
};
/**
* Get the next document from this cursor. Will return `null` when there are
* no documents left.
*
* @param {Function} callback
* @return {Promise}
* @api public
* @method next
*/
QueryCursor.prototype.next = function(callback) {
return promiseOrCallback(callback, cb => {
_next(this, function(error, doc) {
if (error) {
return cb(error);
}
cb(null, doc);
});
}, this.model.events);
};
/**
* Execute `fn` for every document in the cursor. If `fn` returns a promise,
* will wait for the promise to resolve before iterating on to the next one.
* Returns a promise that resolves when done.
*
* ####Example
*
* // Iterate over documents asynchronously
* Thing.
* find({ name: /^hello/ }).
* cursor().
* eachAsync(async function (doc, i) {
* doc.foo = doc.bar + i;
* await doc.save();
* })
*
* @param {Function} fn
* @param {Object} [options]
* @param {Number} [options.parallel] the number of promises to execute in parallel. Defaults to 1.
* @param {Function} [callback] executed when all docs have been processed
* @return {Promise}
* @api public
* @method eachAsync
*/
QueryCursor.prototype.eachAsync = function(fn, opts, callback) {
const _this = this;
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
opts = opts || {};
return eachAsync(function(cb) { return _next(_this, cb); }, fn, opts, callback);
};
/**
* The `options` passed in to the `QueryCursor` constructor.
*
* @api public
* @property options
*/
QueryCursor.prototype.options;
/**
* Adds a [cursor flag](http://mongodb.github.io/node-mongodb-native/2.2/api/Cursor.html#addCursorFlag).
* Useful for setting the `noCursorTimeout` and `tailable` flags.
*
* @param {String} flag
* @param {Boolean} value
* @return {AggregationCursor} this
* @api public
* @method addCursorFlag
*/
QueryCursor.prototype.addCursorFlag = function(flag, value) {
const _this = this;
_waitForCursor(this, function() {
_this.cursor.addCursorFlag(flag, value);
});
return this;
};
/*!
* ignore
*/
QueryCursor.prototype.transformNull = function(val) {
if (arguments.length === 0) {
val = true;
}
this._mongooseOptions.transformNull = val;
return this;
};
/*!
* ignore
*/
QueryCursor.prototype._transformForAsyncIterator = function() {
if (this._transforms.indexOf(_transformForAsyncIterator) === -1) {
this.map(_transformForAsyncIterator);
}
return this;
};
/**
* Returns an asyncIterator for use with [`for/await/of` loops](https://thecodebarbarian.com/getting-started-with-async-iterators-in-node-js).
* You do not need to call this function explicitly, the JavaScript runtime
* will call it for you.
*
* ####Example
*
* // Works without using `cursor()`
* for await (const doc of Model.find([{ $sort: { name: 1 } }])) {
* console.log(doc.name);
* }
*
* // Can also use `cursor()`
* for await (const doc of Model.find([{ $sort: { name: 1 } }]).cursor()) {
* console.log(doc.name);
* }
*
* Node.js 10.x supports async iterators natively without any flags. You can
* enable async iterators in Node.js 8.x using the [`--harmony_async_iteration` flag](https://github.com/tc39/proposal-async-iteration/issues/117#issuecomment-346695187).
*
* **Note:** This function is not if `Symbol.asyncIterator` is undefined. If
* `Symbol.asyncIterator` is undefined, that means your Node.js version does not
* support async iterators.
*
* @method Symbol.asyncIterator
* @memberOf Query
* @instance
* @api public
*/
if (Symbol.asyncIterator != null) {
QueryCursor.prototype[Symbol.asyncIterator] = function() {
return this.transformNull()._transformForAsyncIterator();
};
}
/*!
* ignore
*/
function _transformForAsyncIterator(doc) {
return doc == null ? { done: true } : { value: doc, done: false };
}
/*!
* Get the next doc from the underlying cursor and mongooseify it
* (populate, etc.)
*/
function _next(ctx, cb) {
let callback = cb;
if (ctx._transforms.length) {
callback = function(err, doc) {
if (err || (doc === null && !ctx._mongooseOptions.transformNull)) {
return cb(err, doc);
}
cb(err, ctx._transforms.reduce(function(doc, fn) {
return fn.call(ctx, doc);
}, doc));
};
}
if (ctx._error) {
return immediate(function() {
callback(ctx._error);
});
}
if (ctx.cursor) {
if (ctx.query._mongooseOptions.populate && !ctx._pop) {
ctx._pop = helpers.preparePopulationOptionsMQ(ctx.query,
ctx.query._mongooseOptions);
ctx._pop.__noPromise = true;
}
if (ctx.query._mongooseOptions.populate && ctx.options.batchSize > 1) {
if (ctx._batchDocs && ctx._batchDocs.length) {
// Return a cached populated doc
return _nextDoc(ctx, ctx._batchDocs.shift(), ctx._pop, callback);
} else if (ctx._batchExhausted) {
// Internal cursor reported no more docs. Act the same here
return callback(null, null);
} else {
// Request as many docs as batchSize, to populate them also in batch
ctx._batchDocs = [];
return ctx.cursor.next(_onNext.bind({ ctx, callback }));
}
} else {
return ctx.cursor.next(function(error, doc) {
if (error) {
return callback(error);
}
if (!doc) {
return callback(null, null);
}
if (!ctx.query._mongooseOptions.populate) {
return _nextDoc(ctx, doc, null, callback);
}
ctx.query.model.populate(doc, ctx._pop, function(err, doc) {
if (err) {
return callback(err);
}
return _nextDoc(ctx, doc, ctx._pop, callback);
});
});
}
} else {
ctx.once('cursor', function(cursor) {
if (cursor == null) {
return;
}
_next(ctx, cb);
});
}
}
/*!
* ignore
*/
function _onNext(error, doc) {
if (error) {
return this.callback(error);
}
if (!doc) {
this.ctx._batchExhausted = true;
return _populateBatch.call(this);
}
this.ctx._batchDocs.push(doc);
if (this.ctx._batchDocs.length < this.ctx.options.batchSize) {
this.ctx.cursor.next(_onNext.bind(this));
} else {
_populateBatch.call(this);
}
}
/*!
* ignore
*/
function _populateBatch() {
if (!this.ctx._batchDocs.length) {
return this.callback(null, null);
}
const _this = this;
this.ctx.query.model.populate(this.ctx._batchDocs, this.ctx._pop, function(err) {
if (err) {
return _this.callback(err);
}
_nextDoc(_this.ctx, _this.ctx._batchDocs.shift(), _this.ctx._pop, _this.callback);
});
}
/*!
* ignore
*/
function _nextDoc(ctx, doc, pop, callback) {
if (ctx.query._mongooseOptions.lean) {
return ctx.model.hooks.execPost('find', ctx.query, [[doc]], err => {
if (err != null) {
return callback(err);
}
callback(null, doc);
});
}
_create(ctx, doc, pop, (err, doc) => {
if (err != null) {
return callback(err);
}
ctx.model.hooks.execPost('find', ctx.query, [[doc]], err => {
if (err != null) {
return callback(err);
}
callback(null, doc);
});
});
}
/*!
* ignore
*/
function _waitForCursor(ctx, cb) {
if (ctx.cursor) {
return cb();
}
ctx.once('cursor', function(cursor) {
if (cursor == null) {
return;
}
cb();
});
}
/*!
* Convert a raw doc into a full mongoose doc.
*/
function _create(ctx, doc, populatedIds, cb) {
const instance = helpers.createModel(ctx.query.model, doc, ctx.query._fields);
const opts = populatedIds ?
{ populated: populatedIds } :
undefined;
instance.init(doc, opts, function(err) {
if (err) {
return cb(err);
}
cb(null, instance);
});
}
module.exports = QueryCursor;

4269
node_modules/mongoose/lib/document.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

30
node_modules/mongoose/lib/document_provider.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
'use strict';
/* eslint-env browser */
/*!
* Module dependencies.
*/
const Document = require('./document.js');
const BrowserDocument = require('./browserDocument.js');
let isBrowser = false;
/**
* Returns the Document constructor for the current context
*
* @api private
*/
module.exports = function() {
if (isBrowser) {
return BrowserDocument;
}
return Document;
};
/*!
* ignore
*/
module.exports.setBrowser = function(flag) {
isBrowser = flag;
};

15
node_modules/mongoose/lib/driver.js generated vendored Normal file
View File

@@ -0,0 +1,15 @@
'use strict';
/*!
* ignore
*/
let driver = null;
module.exports.get = function() {
return driver;
};
module.exports.set = function(v) {
driver = v;
};

4
node_modules/mongoose/lib/drivers/SPEC.md generated vendored Normal file
View File

@@ -0,0 +1,4 @@
# Driver Spec
TODO

View File

@@ -0,0 +1,7 @@
/*!
* ignore
*/
'use strict';
module.exports = function() {};

14
node_modules/mongoose/lib/drivers/browser/binary.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
/*!
* Module dependencies.
*/
'use strict';
const Binary = require('bson').Binary;
/*!
* Module exports.
*/
module.exports = exports = Binary;

View File

@@ -0,0 +1,7 @@
/*!
* ignore
*/
'use strict';
module.exports = require('bson').Decimal128;

16
node_modules/mongoose/lib/drivers/browser/index.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
/*!
* Module exports.
*/
'use strict';
exports.Binary = require('./binary');
exports.Collection = function() {
throw new Error('Cannot create a collection from browser library');
};
exports.getConnection = () => function() {
throw new Error('Cannot create a connection from browser library');
};
exports.Decimal128 = require('./decimal128');
exports.ObjectId = require('./objectid');
exports.ReadPreference = require('./ReadPreference');

28
node_modules/mongoose/lib/drivers/browser/objectid.js generated vendored Normal file
View File

@@ -0,0 +1,28 @@
/*!
* [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) ObjectId
* @constructor NodeMongoDbObjectId
* @see ObjectId
*/
'use strict';
const ObjectId = require('bson').ObjectID;
/*!
* Getter for convenience with populate, see gh-6115
*/
Object.defineProperty(ObjectId.prototype, '_id', {
enumerable: false,
configurable: true,
get: function() {
return this;
}
});
/*!
* ignore
*/
module.exports = exports = ObjectId;

View File

@@ -0,0 +1,47 @@
/*!
* Module dependencies.
*/
'use strict';
const mongodb = require('mongodb');
const ReadPref = mongodb.ReadPreference;
/*!
* Converts arguments to ReadPrefs the driver
* can understand.
*
* @param {String|Array} pref
* @param {Array} [tags]
*/
module.exports = function readPref(pref, tags) {
if (Array.isArray(pref)) {
tags = pref[1];
pref = pref[0];
}
if (pref instanceof ReadPref) {
return pref;
}
switch (pref) {
case 'p':
pref = 'primary';
break;
case 'pp':
pref = 'primaryPreferred';
break;
case 's':
pref = 'secondary';
break;
case 'sp':
pref = 'secondaryPreferred';
break;
case 'n':
pref = 'nearest';
break;
}
return new ReadPref(pref, tags);
};

View File

@@ -0,0 +1,10 @@
/*!
* Module dependencies.
*/
'use strict';
const Binary = require('mongodb').Binary;
module.exports = exports = Binary;

View File

@@ -0,0 +1,452 @@
'use strict';
/*!
* Module dependencies.
*/
const MongooseCollection = require('../../collection');
const MongooseError = require('../../error/mongooseError');
const Collection = require('mongodb').Collection;
const ObjectId = require('./objectid');
const get = require('../../helpers/get');
const sliced = require('sliced');
const stream = require('stream');
const util = require('util');
/**
* A [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) collection implementation.
*
* All methods methods from the [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) driver are copied and wrapped in queue management.
*
* @inherits Collection
* @api private
*/
function NativeCollection(name, conn, options) {
this.collection = null;
this.Promise = options.Promise || Promise;
this.modelName = options.modelName;
delete options.modelName;
this._closed = false;
MongooseCollection.apply(this, arguments);
}
/*!
* Inherit from abstract Collection.
*/
NativeCollection.prototype.__proto__ = MongooseCollection.prototype;
/**
* Called when the connection opens.
*
* @api private
*/
NativeCollection.prototype.onOpen = function() {
const _this = this;
// always get a new collection in case the user changed host:port
// of parent db instance when re-opening the connection.
if (!_this.opts.capped.size) {
// non-capped
callback(null, _this.conn.db.collection(_this.name));
return _this.collection;
}
if (_this.opts.autoCreate === false) {
_this.collection = _this.conn.db.collection(_this.name);
MongooseCollection.prototype.onOpen.call(_this);
return _this.collection;
}
// capped
return _this.conn.db.collection(_this.name, function(err, c) {
if (err) return callback(err);
// discover if this collection exists and if it is capped
_this.conn.db.listCollections({ name: _this.name }).toArray(function(err, docs) {
if (err) {
return callback(err);
}
const doc = docs[0];
const exists = !!doc;
if (exists) {
if (doc.options && doc.options.capped) {
callback(null, c);
} else {
const msg = 'A non-capped collection exists with the name: ' + _this.name + '\n\n'
+ ' To use this collection as a capped collection, please '
+ 'first convert it.\n'
+ ' http://www.mongodb.org/display/DOCS/Capped+Collections#CappedCollections-Convertingacollectiontocapped';
err = new Error(msg);
callback(err);
}
} else {
// create
const opts = Object.assign({}, _this.opts.capped);
opts.capped = true;
_this.conn.db.createCollection(_this.name, opts, callback);
}
});
});
function callback(err, collection) {
if (err) {
// likely a strict mode error
_this.conn.emit('error', err);
} else {
_this.collection = collection;
MongooseCollection.prototype.onOpen.call(_this);
}
}
};
/**
* Called when the connection closes
*
* @api private
*/
NativeCollection.prototype.onClose = function(force) {
MongooseCollection.prototype.onClose.call(this, force);
};
/*!
* ignore
*/
const syncCollectionMethods = { watch: true };
/*!
* Copy the collection methods and make them subject to queues
*/
function iter(i) {
NativeCollection.prototype[i] = function() {
const collection = this.collection;
const args = Array.from(arguments);
const _this = this;
const debug = get(_this, 'conn.base.options.debug');
const lastArg = arguments[arguments.length - 1];
const opId = new ObjectId();
// If user force closed, queueing will hang forever. See #5664
if (this.conn.$wasForceClosed) {
const error = new MongooseError('Connection was force closed');
if (args.length > 0 &&
typeof args[args.length - 1] === 'function') {
args[args.length - 1](error);
return;
} else {
throw error;
}
}
let _args = args;
let callback = null;
if (this._shouldBufferCommands() && this.buffer) {
if (syncCollectionMethods[i]) {
throw new Error('Collection method ' + i + ' is synchronous');
}
this.conn.emit('buffer', {
_id: opId,
modelName: _this.modelName,
collectionName: _this.name,
method: i,
args: args
});
let callback;
let _args;
let promise = null;
let timeout = null;
if (typeof lastArg === 'function') {
callback = function collectionOperationCallback() {
if (timeout != null) {
clearTimeout(timeout);
}
return lastArg.apply(this, arguments);
};
_args = args.slice(0, args.length - 1).concat([callback]);
} else {
promise = new this.Promise((resolve, reject) => {
callback = function collectionOperationCallback(err, res) {
if (timeout != null) {
clearTimeout(timeout);
}
if (err != null) {
return reject(err);
}
resolve(res);
};
_args = args.concat([callback]);
this.addQueue(i, _args);
});
}
const bufferTimeoutMS = this._getBufferTimeoutMS();
timeout = setTimeout(() => {
const removed = this.removeQueue(i, _args);
if (removed) {
const message = 'Operation `' + this.name + '.' + i + '()` buffering timed out after ' +
bufferTimeoutMS + 'ms';
const err = new MongooseError(message);
this.conn.emit('buffer-end', { _id: opId, modelName: _this.modelName, collectionName: _this.name, method: i, error: err });
callback(err);
}
}, bufferTimeoutMS);
if (typeof lastArg === 'function') {
this.addQueue(i, _args);
return;
}
return promise;
} else if (!syncCollectionMethods[i] && typeof lastArg === 'function') {
callback = function collectionOperationCallback(err, res) {
if (err != null) {
_this.conn.emit('operation-end', { _id: opId, modelName: _this.modelName, collectionName: _this.name, method: i, error: err });
} else {
_this.conn.emit('operation-end', { _id: opId, modelName: _this.modelName, collectionName: _this.name, method: i, result: res });
}
return lastArg.apply(this, arguments);
};
_args = args.slice(0, args.length - 1).concat([callback]);
}
if (debug) {
if (typeof debug === 'function') {
debug.apply(_this,
[_this.name, i].concat(sliced(args, 0, args.length - 1)));
} else if (debug instanceof stream.Writable) {
this.$printToStream(_this.name, i, args, debug);
} else {
const color = debug.color == null ? true : debug.color;
const shell = debug.shell == null ? false : debug.shell;
this.$print(_this.name, i, args, color, shell);
}
}
this.conn.emit('operation-start', { _id: opId, modelName: _this.modelName, collectionName: this.name, method: i, params: _args });
try {
if (collection == null) {
const message = 'Cannot call `' + this.name + '.' + i + '()` before initial connection ' +
'is complete if `bufferCommands = false`. Make sure you `await mongoose.connect()` if ' +
'you have `bufferCommands = false`.';
throw new MongooseError(message);
}
const ret = collection[i].apply(collection, _args);
if (ret != null && typeof ret.then === 'function') {
return ret.then(
res => {
this.conn.emit('operation-end', { _id: opId, modelName: this.modelName, collectionName: this.name, method: i, result: res });
return res;
},
err => {
this.conn.emit('operation-end', { _id: opId, modelName: this.modelName, collectionName: this.name, method: i, error: err });
throw err;
}
);
}
return ret;
} catch (error) {
// Collection operation may throw because of max bson size, catch it here
// See gh-3906
if (typeof callback === 'function') {
callback(error);
} else {
this.conn.emit('operation-end', { _id: opId, modelName: _this.modelName, collectionName: this.name, method: i, error: error });
}
if (typeof lastArg === 'function') {
lastArg(error);
} else {
throw error;
}
}
};
}
for (const key of Object.keys(Collection.prototype)) {
// Janky hack to work around gh-3005 until we can get rid of the mongoose
// collection abstraction
const descriptor = Object.getOwnPropertyDescriptor(Collection.prototype, key);
// Skip properties with getters because they may throw errors (gh-8528)
if (descriptor.get !== undefined) {
continue;
}
if (typeof Collection.prototype[key] !== 'function') {
continue;
}
iter(key);
}
/**
* Debug print helper
*
* @api public
* @method $print
*/
NativeCollection.prototype.$print = function(name, i, args, color, shell) {
const moduleName = color ? '\x1B[0;36mMongoose:\x1B[0m ' : 'Mongoose: ';
const functionCall = [name, i].join('.');
const _args = [];
for (let j = args.length - 1; j >= 0; --j) {
if (this.$format(args[j]) || _args.length) {
_args.unshift(this.$format(args[j], color, shell));
}
}
const params = '(' + _args.join(', ') + ')';
console.info(moduleName + functionCall + params);
};
/**
* Debug print helper
*
* @api public
* @method $print
*/
NativeCollection.prototype.$printToStream = function(name, i, args, stream) {
const functionCall = [name, i].join('.');
const _args = [];
for (let j = args.length - 1; j >= 0; --j) {
if (this.$format(args[j]) || _args.length) {
_args.unshift(this.$format(args[j]));
}
}
const params = '(' + _args.join(', ') + ')';
stream.write(functionCall + params, 'utf8');
};
/**
* Formatter for debug print args
*
* @api public
* @method $format
*/
NativeCollection.prototype.$format = function(arg, color, shell) {
const type = typeof arg;
if (type === 'function' || type === 'undefined') return '';
return format(arg, false, color, shell);
};
/*!
* Debug print helper
*/
function inspectable(representation) {
const ret = {
inspect: function() { return representation; }
};
if (util.inspect.custom) {
ret[util.inspect.custom] = ret.inspect;
}
return ret;
}
function map(o) {
return format(o, true);
}
function formatObjectId(x, key) {
x[key] = inspectable('ObjectId("' + x[key].toHexString() + '")');
}
function formatDate(x, key, shell) {
if (shell) {
x[key] = inspectable('ISODate("' + x[key].toUTCString() + '")');
} else {
x[key] = inspectable('new Date("' + x[key].toUTCString() + '")');
}
}
function format(obj, sub, color, shell) {
if (obj && typeof obj.toBSON === 'function') {
obj = obj.toBSON();
}
if (obj == null) {
return obj;
}
const clone = require('../../helpers/clone');
let x = clone(obj, { transform: false });
if (x.constructor.name === 'Binary') {
x = 'BinData(' + x.sub_type + ', "' + x.toString('base64') + '")';
} else if (x.constructor.name === 'ObjectID') {
x = inspectable('ObjectId("' + x.toHexString() + '")');
} else if (x.constructor.name === 'Date') {
x = inspectable('new Date("' + x.toUTCString() + '")');
} else if (x.constructor.name === 'Object') {
const keys = Object.keys(x);
const numKeys = keys.length;
let key;
for (let i = 0; i < numKeys; ++i) {
key = keys[i];
if (x[key]) {
let error;
if (typeof x[key].toBSON === 'function') {
try {
// `session.toBSON()` throws an error. This means we throw errors
// in debug mode when using transactions, see gh-6712. As a
// workaround, catch `toBSON()` errors, try to serialize without
// `toBSON()`, and rethrow if serialization still fails.
x[key] = x[key].toBSON();
} catch (_error) {
error = _error;
}
}
if (x[key].constructor.name === 'Binary') {
x[key] = 'BinData(' + x[key].sub_type + ', "' +
x[key].buffer.toString('base64') + '")';
} else if (x[key].constructor.name === 'Object') {
x[key] = format(x[key], true);
} else if (x[key].constructor.name === 'ObjectID') {
formatObjectId(x, key);
} else if (x[key].constructor.name === 'Date') {
formatDate(x, key, shell);
} else if (x[key].constructor.name === 'ClientSession') {
x[key] = inspectable('ClientSession("' +
get(x[key], 'id.id.buffer', '').toString('hex') + '")');
} else if (Array.isArray(x[key])) {
x[key] = x[key].map(map);
} else if (error != null) {
// If there was an error with `toBSON()` and the object wasn't
// already converted to a string representation, rethrow it.
// Open to better ideas on how to handle this.
throw error;
}
}
}
}
if (sub) {
return x;
}
return util.
inspect(x, false, 10, color).
replace(/\n/g, '').
replace(/\s{2,}/g, ' ');
}
/**
* Retrieves information about this collections indexes.
*
* @param {Function} callback
* @method getIndexes
* @api public
*/
NativeCollection.prototype.getIndexes = NativeCollection.prototype.indexInformation;
/*!
* Module exports.
*/
module.exports = NativeCollection;

View File

@@ -0,0 +1,211 @@
/*!
* Module dependencies.
*/
'use strict';
const MongooseConnection = require('../../connection');
const STATES = require('../../connectionstate');
const immediate = require('../../helpers/immediate');
const setTimeout = require('../../helpers/timers').setTimeout;
/**
* A [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) connection implementation.
*
* @inherits Connection
* @api private
*/
function NativeConnection() {
MongooseConnection.apply(this, arguments);
this._listening = false;
}
/**
* Expose the possible connection states.
* @api public
*/
NativeConnection.STATES = STATES;
/*!
* Inherits from Connection.
*/
NativeConnection.prototype.__proto__ = MongooseConnection.prototype;
/**
* Switches to a different database using the same connection pool.
*
* Returns a new connection object, with the new db. If you set the `useCache`
* option, `useDb()` will cache connections by `name`.
*
* **Note:** Calling `close()` on a `useDb()` connection will close the base connection as well.
*
* @param {String} name The database name
* @param {Object} [options]
* @param {Boolean} [options.useCache=false] If true, cache results so calling `useDb()` multiple times with the same name only creates 1 connection object.
* @param {Boolean} [options.noListener=false] If true, the new connection object won't listen to any events on the base connection. This is better for memory usage in cases where you're calling `useDb()` for every request.
* @return {Connection} New Connection Object
* @api public
*/
NativeConnection.prototype.useDb = function(name, options) {
// Return immediately if cached
options = options || {};
if (options.useCache && this.relatedDbs[name]) {
return this.relatedDbs[name];
}
// we have to manually copy all of the attributes...
const newConn = new this.constructor();
newConn.name = name;
newConn.base = this.base;
newConn.collections = {};
newConn.models = {};
newConn.replica = this.replica;
newConn.config = Object.assign({}, this.config, newConn.config);
newConn.name = this.name;
newConn.options = this.options;
newConn._readyState = this._readyState;
newConn._closeCalled = this._closeCalled;
newConn._hasOpened = this._hasOpened;
newConn._listening = false;
newConn.host = this.host;
newConn.port = this.port;
newConn.user = this.user;
newConn.pass = this.pass;
// First, when we create another db object, we are not guaranteed to have a
// db object to work with. So, in the case where we have a db object and it
// is connected, we can just proceed with setting everything up. However, if
// we do not have a db or the state is not connected, then we need to wait on
// the 'open' event of the connection before doing the rest of the setup
// the 'connected' event is the first time we'll have access to the db object
const _this = this;
newConn.client = _this.client;
if (this.db && this._readyState === STATES.connected) {
wireup();
} else {
this.once('connected', wireup);
}
function wireup() {
newConn.client = _this.client;
const _opts = {};
if (options.hasOwnProperty('noListener')) {
_opts.noListener = options.noListener;
}
newConn.db = _this.client.db(name, _opts);
newConn.onOpen();
// setup the events appropriately
if (options.noListener !== true) {
listen(newConn);
}
}
newConn.name = name;
// push onto the otherDbs stack, this is used when state changes
if (options.noListener !== true) {
this.otherDbs.push(newConn);
}
newConn.otherDbs.push(this);
// push onto the relatedDbs cache, this is used when state changes
if (options && options.useCache) {
this.relatedDbs[newConn.name] = newConn;
newConn.relatedDbs = this.relatedDbs;
}
return newConn;
};
/*!
* Register listeners for important events and bubble appropriately.
*/
function listen(conn) {
if (conn._listening) {
return;
}
conn._listening = true;
conn.client.on('close', function(force) {
if (conn._closeCalled) {
return;
}
conn._closeCalled = conn.client._closeCalled;
// the driver never emits an `open` event. auto_reconnect still
// emits a `close` event but since we never get another
// `open` we can't emit close
if (conn.db.serverConfig.autoReconnect) {
conn.readyState = STATES.disconnected;
conn.emit('close');
return;
}
conn.onClose(force);
});
conn.client.on('error', function(err) {
conn.emit('error', err);
});
if (!conn.client.s.options.useUnifiedTopology) {
conn.db.on('reconnect', function() {
conn.readyState = STATES.connected;
conn.emit('reconnect');
conn.emit('reconnected');
conn.onOpen();
});
conn.db.on('open', function(err, db) {
if (STATES.disconnected === conn.readyState && db && db.databaseName) {
conn.readyState = STATES.connected;
conn.emit('reconnect');
conn.emit('reconnected');
}
});
}
conn.client.on('timeout', function(err) {
conn.emit('timeout', err);
});
conn.client.on('parseError', function(err) {
conn.emit('parseError', err);
});
}
/**
* Closes the connection
*
* @param {Boolean} [force]
* @param {Function} [fn]
* @return {Connection} this
* @api private
*/
NativeConnection.prototype.doClose = function(force, fn) {
if (this.client == null) {
immediate(() => fn());
return this;
}
this.client.close(force, (err, res) => {
// Defer because the driver will wait at least 1ms before finishing closing
// the pool, see https://github.com/mongodb-js/mongodb-core/blob/a8f8e4ce41936babc3b9112bf42d609779f03b39/lib/connection/pool.js#L1026-L1030.
// If there's queued operations, you may still get some background work
// after the callback is called.
setTimeout(() => fn(err, res), 1);
});
return this;
};
/*!
* Module exports.
*/
module.exports = NativeConnection;

View File

@@ -0,0 +1,7 @@
/*!
* ignore
*/
'use strict';
module.exports = require('mongodb').Decimal128;

View File

@@ -0,0 +1,12 @@
/*!
* Module exports.
*/
'use strict';
exports.Binary = require('./binary');
exports.Collection = require('./collection');
exports.Decimal128 = require('./decimal128');
exports.ObjectId = require('./objectid');
exports.ReadPreference = require('./ReadPreference');
exports.getConnection = () => require('./connection');

View File

@@ -0,0 +1,16 @@
/*!
* [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) ObjectId
* @constructor NodeMongoDbObjectId
* @see ObjectId
*/
'use strict';
const ObjectId = require('mongodb').ObjectId;
/*!
* ignore
*/
module.exports = exports = ObjectId;

View File

@@ -0,0 +1,28 @@
/*!
* Module dependencies.
*/
'use strict';
const MongooseError = require('./');
class MissingSchemaError extends MongooseError {
/*!
* MissingSchema Error constructor.
*/
constructor() {
super('Schema hasn\'t been registered for document.\n'
+ 'Use mongoose.Document(name, schema)');
}
}
Object.defineProperty(MissingSchemaError.prototype, 'name', {
value: 'MongooseError'
});
/*!
* exports
*/
module.exports = MissingSchemaError;

150
node_modules/mongoose/lib/error/cast.js generated vendored Normal file
View File

@@ -0,0 +1,150 @@
'use strict';
/*!
* Module dependencies.
*/
const MongooseError = require('./mongooseError');
const get = require('../helpers/get');
const util = require('util');
/**
* Casting Error constructor.
*
* @param {String} type
* @param {String} value
* @inherits MongooseError
* @api private
*/
class CastError extends MongooseError {
constructor(type, value, path, reason, schemaType) {
// If no args, assume we'll `init()` later.
if (arguments.length > 0) {
const stringValue = getStringValue(value);
const valueType = getValueType(value);
const messageFormat = getMessageFormat(schemaType);
const msg = formatMessage(null, type, stringValue, path, messageFormat, valueType);
super(msg);
this.init(type, value, path, reason, schemaType);
} else {
super(formatMessage());
}
}
toJSON() {
return {
stringValue: this.stringValue,
valueType: this.valueType,
kind: this.kind,
value: this.value,
path: this.path,
reason: this.reason,
name: this.name,
message: this.message
};
}
/*!
* ignore
*/
init(type, value, path, reason, schemaType) {
this.stringValue = getStringValue(value);
this.messageFormat = getMessageFormat(schemaType);
this.kind = type;
this.value = value;
this.path = path;
this.reason = reason;
this.valueType = getValueType(value);
}
/*!
* ignore
* @param {Readonly<CastError>} other
*/
copy(other) {
this.messageFormat = other.messageFormat;
this.stringValue = other.stringValue;
this.kind = other.kind;
this.value = other.value;
this.path = other.path;
this.reason = other.reason;
this.message = other.message;
this.valueType = other.valueType;
}
/*!
* ignore
*/
setModel(model) {
this.model = model;
this.message = formatMessage(model, this.kind, this.stringValue, this.path,
this.messageFormat, this.valueType);
}
}
Object.defineProperty(CastError.prototype, 'name', {
value: 'CastError'
});
function getStringValue(value) {
let stringValue = util.inspect(value);
stringValue = stringValue.replace(/^'|'$/g, '"');
if (!stringValue.startsWith('"')) {
stringValue = '"' + stringValue + '"';
}
return stringValue;
}
function getValueType(value) {
if (value == null) {
return '' + value;
}
const t = typeof value;
if (t !== 'object') {
return t;
}
if (typeof value.constructor !== 'function') {
return t;
}
return value.constructor.name;
}
function getMessageFormat(schemaType) {
const messageFormat = get(schemaType, 'options.cast', null);
if (typeof messageFormat === 'string') {
return messageFormat;
}
}
/*!
* ignore
*/
function formatMessage(model, kind, stringValue, path, messageFormat, valueType) {
if (messageFormat != null) {
let ret = messageFormat.
replace('{KIND}', kind).
replace('{VALUE}', stringValue).
replace('{PATH}', path);
if (model != null) {
ret = ret.replace('{MODEL}', model.modelName);
}
return ret;
} else {
const valueTypeMsg = valueType ? ' (type ' + valueType + ')' : '';
let ret = 'Cast to ' + kind + ' failed for value ' +
stringValue + valueTypeMsg + ' at path "' + path + '"';
if (model != null) {
ret += ' for model "' + model.modelName + '"';
}
return ret;
}
}
/*!
* exports
*/
module.exports = CastError;

34
node_modules/mongoose/lib/error/disconnected.js generated vendored Normal file
View File

@@ -0,0 +1,34 @@
/*!
* Module dependencies.
*/
'use strict';
const MongooseError = require('./');
/**
* The connection failed to reconnect and will never successfully reconnect to
* MongoDB without manual intervention.
* @api private
*/
class DisconnectedError extends MongooseError {
/**
* @param {String} connectionString
*/
constructor(connectionString) {
super('Ran out of retries trying to reconnect to "' +
connectionString + '". Try setting `server.reconnectTries` and ' +
'`server.reconnectInterval` to something higher.');
}
}
Object.defineProperty(DisconnectedError.prototype, 'name', {
value: 'DisconnectedError'
});
/*!
* exports
*/
module.exports = DisconnectedError;

37
node_modules/mongoose/lib/error/divergentArray.js generated vendored Normal file
View File

@@ -0,0 +1,37 @@
/*!
* Module dependencies.
*/
'use strict';
const MongooseError = require('./');
class DivergentArrayError extends MongooseError {
/*!
* DivergentArrayError constructor.
* @param {Array<String>} paths
*/
constructor(paths) {
const msg = 'For your own good, using `document.save()` to update an array '
+ 'which was selected using an $elemMatch projection OR '
+ 'populated using skip, limit, query conditions, or exclusion of '
+ 'the _id field when the operation results in a $pop or $set of '
+ 'the entire array is not supported. The following '
+ 'path(s) would have been modified unsafely:\n'
+ ' ' + paths.join('\n ') + '\n'
+ 'Use Model.update() to update these arrays instead.';
// TODO write up a docs page (FAQ) and link to it
super(msg);
}
}
Object.defineProperty(DivergentArrayError.prototype, 'name', {
value: 'DivergentArrayError'
});
/*!
* exports
*/
module.exports = DivergentArrayError;

205
node_modules/mongoose/lib/error/index.js generated vendored Normal file
View File

@@ -0,0 +1,205 @@
'use strict';
/**
* MongooseError constructor. MongooseError is the base class for all
* Mongoose-specific errors.
*
* ####Example:
* const Model = mongoose.model('Test', new Schema({ answer: Number }));
* const doc = new Model({ answer: 'not a number' });
* const err = doc.validateSync();
*
* err instanceof mongoose.Error; // true
*
* @constructor Error
* @param {String} msg Error message
* @inherits Error https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Error
*/
const MongooseError = require('./mongooseError');
/**
* The name of the error. The name uniquely identifies this Mongoose error. The
* possible values are:
*
* - `MongooseError`: general Mongoose error
* - `CastError`: Mongoose could not convert a value to the type defined in the schema path. May be in a `ValidationError` class' `errors` property.
* - `DisconnectedError`: This [connection](connections.html) timed out in trying to reconnect to MongoDB and will not successfully reconnect to MongoDB unless you explicitly reconnect.
* - `DivergentArrayError`: You attempted to `save()` an array that was modified after you loaded it with a `$elemMatch` or similar projection
* - `MissingSchemaError`: You tried to access a model with [`mongoose.model()`](api.html#mongoose_Mongoose-model) that was not defined
* - `DocumentNotFoundError`: The document you tried to [`save()`](api.html#document_Document-save) was not found
* - `ValidatorError`: error from an individual schema path's validator
* - `ValidationError`: error returned from [`validate()`](api.html#document_Document-validate) or [`validateSync()`](api.html#document_Document-validateSync). Contains zero or more `ValidatorError` instances in `.errors` property.
* - `MissingSchemaError`: You called `mongoose.Document()` without a schema
* - `ObjectExpectedError`: Thrown when you set a nested path to a non-object value with [strict mode set](guide.html#strict).
* - `ObjectParameterError`: Thrown when you pass a non-object value to a function which expects an object as a paramter
* - `OverwriteModelError`: Thrown when you call [`mongoose.model()`](api.html#mongoose_Mongoose-model) to re-define a model that was already defined.
* - `ParallelSaveError`: Thrown when you call [`save()`](api.html#model_Model-save) on a document when the same document instance is already saving.
* - `StrictModeError`: Thrown when you set a path that isn't the schema and [strict mode](guide.html#strict) is set to `throw`.
* - `VersionError`: Thrown when the [document is out of sync](guide.html#versionKey)
*
* @api public
* @property {String} name
* @memberOf Error
* @instance
*/
/*!
* Module exports.
*/
module.exports = exports = MongooseError;
/**
* The default built-in validator error messages.
*
* @see Error.messages #error_messages_MongooseError-messages
* @api public
* @memberOf Error
* @static messages
*/
MongooseError.messages = require('./messages');
// backward compat
MongooseError.Messages = MongooseError.messages;
/**
* An instance of this error class will be returned when `save()` fails
* because the underlying
* document was not found. The constructor takes one parameter, the
* conditions that mongoose passed to `update()` when trying to update
* the document.
*
* @api public
* @memberOf Error
* @static DocumentNotFoundError
*/
MongooseError.DocumentNotFoundError = require('./notFound');
/**
* An instance of this error class will be returned when mongoose failed to
* cast a value.
*
* @api public
* @memberOf Error
* @static CastError
*/
MongooseError.CastError = require('./cast');
/**
* An instance of this error class will be returned when [validation](/docs/validation.html) failed.
* The `errors` property contains an object whose keys are the paths that failed and whose values are
* instances of CastError or ValidationError.
*
* @api public
* @memberOf Error
* @static ValidationError
*/
MongooseError.ValidationError = require('./validation');
/**
* A `ValidationError` has a hash of `errors` that contain individual
* `ValidatorError` instances.
*
* ####Example:
*
* const schema = Schema({ name: { type: String, required: true } });
* const Model = mongoose.model('Test', schema);
* const doc = new Model({});
*
* // Top-level error is a ValidationError, **not** a ValidatorError
* const err = doc.validateSync();
* err instanceof mongoose.Error.ValidationError; // true
*
* // A ValidationError `err` has 0 or more ValidatorErrors keyed by the
* // path in the `err.errors` property.
* err.errors['name'] instanceof mongoose.Error.ValidatorError;
*
* err.errors['name'].kind; // 'required'
* err.errors['name'].path; // 'name'
* err.errors['name'].value; // undefined
*
* Instances of `ValidatorError` have the following properties:
*
* - `kind`: The validator's `type`, like `'required'` or `'regexp'`
* - `path`: The path that failed validation
* - `value`: The value that failed validation
*
* @api public
* @memberOf Error
* @static ValidatorError
*/
MongooseError.ValidatorError = require('./validator');
/**
* An instance of this error class will be returned when you call `save()` after
* the document in the database was changed in a potentially unsafe way. See
* the [`versionKey` option](/docs/guide.html#versionKey) for more information.
*
* @api public
* @memberOf Error
* @static VersionError
*/
MongooseError.VersionError = require('./version');
/**
* An instance of this error class will be returned when you call `save()` multiple
* times on the same document in parallel. See the [FAQ](/docs/faq.html) for more
* information.
*
* @api public
* @memberOf Error
* @static ParallelSaveError
*/
MongooseError.ParallelSaveError = require('./parallelSave');
/**
* Thrown when a model with the given name was already registered on the connection.
* See [the FAQ about `OverwriteModelError`](/docs/faq.html#overwrite-model-error).
*
* @api public
* @memberOf Error
* @static OverwriteModelError
*/
MongooseError.OverwriteModelError = require('./overwriteModel');
/**
* Thrown when you try to access a model that has not been registered yet
*
* @api public
* @memberOf Error
* @static MissingSchemaError
*/
MongooseError.MissingSchemaError = require('./missingSchema');
/**
* An instance of this error will be returned if you used an array projection
* and then modified the array in an unsafe way.
*
* @api public
* @memberOf Error
* @static DivergentArrayError
*/
MongooseError.DivergentArrayError = require('./divergentArray');
/**
* Thrown when your try to pass values to model contrtuctor that
* were not specified in schema or change immutable properties when
* `strict` mode is `"throw"`
*
* @api public
* @memberOf Error
* @static StrictModeError
*/
MongooseError.StrictModeError = require('./strict');

47
node_modules/mongoose/lib/error/messages.js generated vendored Normal file
View File

@@ -0,0 +1,47 @@
/**
* The default built-in validator error messages. These may be customized.
*
* // customize within each schema or globally like so
* const mongoose = require('mongoose');
* mongoose.Error.messages.String.enum = "Your custom message for {PATH}.";
*
* As you might have noticed, error messages support basic templating
*
* - `{PATH}` is replaced with the invalid document path
* - `{VALUE}` is replaced with the invalid value
* - `{TYPE}` is replaced with the validator type such as "regexp", "min", or "user defined"
* - `{MIN}` is replaced with the declared min value for the Number.min validator
* - `{MAX}` is replaced with the declared max value for the Number.max validator
*
* Click the "show code" link below to see all defaults.
*
* @static messages
* @receiver MongooseError
* @api public
*/
'use strict';
const msg = module.exports = exports = {};
msg.DocumentNotFoundError = null;
msg.general = {};
msg.general.default = 'Validator failed for path `{PATH}` with value `{VALUE}`';
msg.general.required = 'Path `{PATH}` is required.';
msg.Number = {};
msg.Number.min = 'Path `{PATH}` ({VALUE}) is less than minimum allowed value ({MIN}).';
msg.Number.max = 'Path `{PATH}` ({VALUE}) is more than maximum allowed value ({MAX}).';
msg.Number.enum = '`{VALUE}` is not a valid enum value for path `{PATH}`.';
msg.Date = {};
msg.Date.min = 'Path `{PATH}` ({VALUE}) is before minimum allowed value ({MIN}).';
msg.Date.max = 'Path `{PATH}` ({VALUE}) is after maximum allowed value ({MAX}).';
msg.String = {};
msg.String.enum = '`{VALUE}` is not a valid enum value for path `{PATH}`.';
msg.String.match = 'Path `{PATH}` is invalid ({VALUE}).';
msg.String.minlength = 'Path `{PATH}` (`{VALUE}`) is shorter than the minimum allowed length ({MINLENGTH}).';
msg.String.maxlength = 'Path `{PATH}` (`{VALUE}`) is longer than the maximum allowed length ({MAXLENGTH}).';

30
node_modules/mongoose/lib/error/missingSchema.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
/*!
* Module dependencies.
*/
'use strict';
const MongooseError = require('./');
class MissingSchemaError extends MongooseError {
/*!
* MissingSchema Error constructor.
* @param {String} name
*/
constructor(name) {
const msg = 'Schema hasn\'t been registered for model "' + name + '".\n'
+ 'Use mongoose.model(name, schema)';
super(msg);
}
}
Object.defineProperty(MissingSchemaError.prototype, 'name', {
value: 'MissingSchemaError'
});
/*!
* exports
*/
module.exports = MissingSchemaError;

13
node_modules/mongoose/lib/error/mongooseError.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
'use strict';
/*!
* ignore
*/
class MongooseError extends Error { }
Object.defineProperty(MongooseError.prototype, 'name', {
value: 'MongooseError'
});
module.exports = MongooseError;

44
node_modules/mongoose/lib/error/notFound.js generated vendored Normal file
View File

@@ -0,0 +1,44 @@
'use strict';
/*!
* Module dependencies.
*/
const MongooseError = require('./');
const util = require('util');
class DocumentNotFoundError extends MongooseError {
/*!
* OverwriteModel Error constructor.
*/
constructor(filter, model, numAffected, result) {
let msg;
const messages = MongooseError.messages;
if (messages.DocumentNotFoundError != null) {
msg = typeof messages.DocumentNotFoundError === 'function' ?
messages.DocumentNotFoundError(filter, model) :
messages.DocumentNotFoundError;
} else {
msg = 'No document found for query "' + util.inspect(filter) +
'" on model "' + model + '"';
}
super(msg);
this.result = result;
this.numAffected = numAffected;
this.filter = filter;
// Backwards compat
this.query = filter;
}
}
Object.defineProperty(DocumentNotFoundError.prototype, 'name', {
value: 'DocumentNotFoundError'
});
/*!
* exports
*/
module.exports = DocumentNotFoundError;

30
node_modules/mongoose/lib/error/objectExpected.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
/*!
* Module dependencies.
*/
'use strict';
const MongooseError = require('./');
class ObjectExpectedError extends MongooseError {
/**
* Strict mode error constructor
*
* @param {string} type
* @param {string} value
* @api private
*/
constructor(path, val) {
const typeDescription = Array.isArray(val) ? 'array' : 'primitive value';
super('Tried to set nested object field `' + path +
`\` to ${typeDescription} \`` + val + '` and strict mode is set to throw.');
this.path = path;
}
}
Object.defineProperty(ObjectExpectedError.prototype, 'name', {
value: 'ObjectExpectedError'
});
module.exports = ObjectExpectedError;

30
node_modules/mongoose/lib/error/objectParameter.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
/*!
* Module dependencies.
*/
'use strict';
const MongooseError = require('./');
class ObjectParameterError extends MongooseError {
/**
* Constructor for errors that happen when a parameter that's expected to be
* an object isn't an object
*
* @param {Any} value
* @param {String} paramName
* @param {String} fnName
* @api private
*/
constructor(value, paramName, fnName) {
super('Parameter "' + paramName + '" to ' + fnName +
'() must be an object, got ' + value.toString());
}
}
Object.defineProperty(ObjectParameterError.prototype, 'name', {
value: 'ObjectParameterError'
});
module.exports = ObjectParameterError;

29
node_modules/mongoose/lib/error/overwriteModel.js generated vendored Normal file
View File

@@ -0,0 +1,29 @@
/*!
* Module dependencies.
*/
'use strict';
const MongooseError = require('./');
class OverwriteModelError extends MongooseError {
/*!
* OverwriteModel Error constructor.
* @param {String} name
*/
constructor(name) {
super('Cannot overwrite `' + name + '` model once compiled.');
}
}
Object.defineProperty(OverwriteModelError.prototype, 'name', {
value: 'OverwriteModelError'
});
/*!
* exports
*/
module.exports = OverwriteModelError;

30
node_modules/mongoose/lib/error/parallelSave.js generated vendored Normal file
View File

@@ -0,0 +1,30 @@
'use strict';
/*!
* Module dependencies.
*/
const MongooseError = require('./');
class ParallelSaveError extends MongooseError {
/**
* ParallelSave Error constructor.
*
* @param {Document} doc
* @api private
*/
constructor(doc) {
const msg = 'Can\'t save() the same doc multiple times in parallel. Document: ';
super(msg + doc._id);
}
}
Object.defineProperty(ParallelSaveError.prototype, 'name', {
value: 'ParallelSaveError'
});
/*!
* exports
*/
module.exports = ParallelSaveError;

31
node_modules/mongoose/lib/error/parallelValidate.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
'use strict';
/*!
* Module dependencies.
*/
const MongooseError = require('./mongooseError');
class ParallelValidateError extends MongooseError {
/**
* ParallelValidate Error constructor.
*
* @param {Document} doc
* @api private
*/
constructor(doc) {
const msg = 'Can\'t validate() the same doc multiple times in parallel. Document: ';
super(msg + doc._id);
}
}
Object.defineProperty(ParallelValidateError.prototype, 'name', {
value: 'ParallelValidateError'
});
/*!
* exports
*/
module.exports = ParallelValidateError;

61
node_modules/mongoose/lib/error/serverSelection.js generated vendored Normal file
View File

@@ -0,0 +1,61 @@
/*!
* Module dependencies.
*/
'use strict';
const MongooseError = require('./mongooseError');
const allServersUnknown = require('../helpers/topology/allServersUnknown');
const isAtlas = require('../helpers/topology/isAtlas');
const isSSLError = require('../helpers/topology/isSSLError');
/*!
* ignore
*/
const atlasMessage = 'Could not connect to any servers in your MongoDB Atlas cluster. ' +
'One common reason is that you\'re trying to access the database from ' +
'an IP that isn\'t whitelisted. Make sure your current IP address is on your Atlas ' +
'cluster\'s IP whitelist: https://docs.atlas.mongodb.com/security-whitelist/';
const sslMessage = 'Mongoose is connecting with SSL enabled, but the server is ' +
'not accepting SSL connections. Please ensure that the MongoDB server you are ' +
'connecting to is configured to accept SSL connections. Learn more: ' +
'https://mongoosejs.com/docs/tutorials/ssl.html';
class MongooseServerSelectionError extends MongooseError {
/**
* MongooseServerSelectionError constructor
*
* @api private
*/
assimilateError(err) {
const reason = err.reason;
// Special message for a case that is likely due to IP whitelisting issues.
const isAtlasWhitelistError = isAtlas(reason) &&
allServersUnknown(reason) &&
err.message.indexOf('bad auth') === -1 &&
err.message.indexOf('Authentication failed') === -1;
if (isAtlasWhitelistError) {
this.message = atlasMessage;
} else if (isSSLError(reason)) {
this.message = sslMessage;
} else {
this.message = err.message;
}
for (const key in err) {
if (key !== 'name') {
this[key] = err[key];
}
}
return this;
}
}
Object.defineProperty(MongooseServerSelectionError.prototype, 'name', {
value: 'MongooseServerSelectionError'
});
module.exports = MongooseServerSelectionError;

33
node_modules/mongoose/lib/error/strict.js generated vendored Normal file
View File

@@ -0,0 +1,33 @@
/*!
* Module dependencies.
*/
'use strict';
const MongooseError = require('./');
class StrictModeError extends MongooseError {
/**
* Strict mode error constructor
*
* @param {String} path
* @param {String} [msg]
* @param {Boolean} [immutable]
* @inherits MongooseError
* @api private
*/
constructor(path, msg, immutable) {
msg = msg || 'Field `' + path + '` is not in schema and strict ' +
'mode is set to throw.';
super(msg);
this.isImmutableError = !!immutable;
this.path = path;
}
}
Object.defineProperty(StrictModeError.prototype, 'name', {
value: 'StrictModeError'
});
module.exports = StrictModeError;

111
node_modules/mongoose/lib/error/validation.js generated vendored Normal file
View File

@@ -0,0 +1,111 @@
/*!
* Module requirements
*/
'use strict';
const MongooseError = require('./mongooseError');
const util = require('util');
class ValidationError extends MongooseError {
/**
* Document Validation Error
*
* @api private
* @param {Document} [instance]
* @inherits MongooseError
*/
constructor(instance) {
let _message;
if (instance && instance.constructor.name === 'model') {
_message = instance.constructor.modelName + ' validation failed';
} else {
_message = 'Validation failed';
}
super(_message);
this.errors = {};
this._message = _message;
if (instance) {
instance.errors = this.errors;
}
}
/**
* Console.log helper
*/
toString() {
return this.name + ': ' + _generateMessage(this);
}
/*!
* inspect helper
*/
inspect() {
return Object.assign(new Error(this.message), this);
}
/*!
* add message
*/
addError(path, error) {
this.errors[path] = error;
this.message = this._message + ': ' + _generateMessage(this);
}
}
if (util.inspect.custom) {
/*!
* Avoid Node deprecation warning DEP0079
*/
ValidationError.prototype[util.inspect.custom] = ValidationError.prototype.inspect;
}
/*!
* Helper for JSON.stringify
* Ensure `name` and `message` show up in toJSON output re: gh-9847
*/
Object.defineProperty(ValidationError.prototype, 'toJSON', {
enumerable: false,
writable: false,
configurable: true,
value: function() {
return Object.assign({}, this, { name: this.name, message: this.message });
}
});
Object.defineProperty(ValidationError.prototype, 'name', {
value: 'ValidationError'
});
/*!
* ignore
*/
function _generateMessage(err) {
const keys = Object.keys(err.errors || {});
const len = keys.length;
const msgs = [];
let key;
for (let i = 0; i < len; ++i) {
key = keys[i];
if (err === err.errors[key]) {
continue;
}
msgs.push(key + ': ' + err.errors[key].message);
}
return msgs.join(', ');
}
/*!
* Module exports
*/
module.exports = ValidationError;

94
node_modules/mongoose/lib/error/validator.js generated vendored Normal file
View File

@@ -0,0 +1,94 @@
/*!
* Module dependencies.
*/
'use strict';
const MongooseError = require('./');
class ValidatorError extends MongooseError {
/**
* Schema validator error
*
* @param {Object} properties
* @api private
*/
constructor(properties) {
let msg = properties.message;
if (!msg) {
msg = MongooseError.messages.general.default;
}
const message = formatMessage(msg, properties);
super(message);
properties = Object.assign({}, properties, { message: message });
this.properties = properties;
this.kind = properties.type;
this.path = properties.path;
this.value = properties.value;
this.reason = properties.reason;
}
/*!
* toString helper
* TODO remove? This defaults to `${this.name}: ${this.message}`
*/
toString() {
return this.message;
}
/*!
* Ensure `name` and `message` show up in toJSON output re: gh-9296
*/
toJSON() {
return Object.assign({ name: this.name, message: this.message }, this);
}
}
Object.defineProperty(ValidatorError.prototype, 'name', {
value: 'ValidatorError'
});
/*!
* The object used to define this validator. Not enumerable to hide
* it from `require('util').inspect()` output re: gh-3925
*/
Object.defineProperty(ValidatorError.prototype, 'properties', {
enumerable: false,
writable: true,
value: null
});
// Exposed for testing
ValidatorError.prototype.formatMessage = formatMessage;
/*!
* Formats error messages
*/
function formatMessage(msg, properties) {
if (typeof msg === 'function') {
return msg(properties);
}
const propertyNames = Object.keys(properties);
for (const propertyName of propertyNames) {
if (propertyName === 'message') {
continue;
}
msg = msg.replace('{' + propertyName.toUpperCase() + '}', properties[propertyName]);
}
return msg;
}
/*!
* exports
*/
module.exports = ValidatorError;

36
node_modules/mongoose/lib/error/version.js generated vendored Normal file
View File

@@ -0,0 +1,36 @@
'use strict';
/*!
* Module dependencies.
*/
const MongooseError = require('./');
class VersionError extends MongooseError {
/**
* Version Error constructor.
*
* @param {Document} doc
* @param {Number} currentVersion
* @param {Array<String>} modifiedPaths
* @api private
*/
constructor(doc, currentVersion, modifiedPaths) {
const modifiedPathsStr = modifiedPaths.join(', ');
super('No matching document found for id "' + doc._id +
'" version ' + currentVersion + ' modifiedPaths "' + modifiedPathsStr + '"');
this.version = currentVersion;
this.modifiedPaths = modifiedPaths;
}
}
Object.defineProperty(VersionError.prototype, 'name', {
value: 'VersionError'
});
/*!
* exports
*/
module.exports = VersionError;

View File

@@ -0,0 +1,50 @@
'use strict';
module.exports = function stringifyFunctionOperators(pipeline) {
if (!Array.isArray(pipeline)) {
return;
}
for (const stage of pipeline) {
if (stage == null) {
continue;
}
const canHaveAccumulator = stage.$group || stage.$bucket || stage.$bucketAuto;
if (canHaveAccumulator != null) {
for (const key of Object.keys(canHaveAccumulator)) {
handleAccumulator(canHaveAccumulator[key]);
}
}
const stageType = Object.keys(stage)[0];
if (stageType && typeof stage[stageType] === 'object') {
const stageOptions = stage[stageType];
for (const key of Object.keys(stageOptions)) {
if (stageOptions[key] != null &&
stageOptions[key].$function != null &&
typeof stageOptions[key].$function.body === 'function') {
stageOptions[key].$function.body = stageOptions[key].$function.body.toString();
}
}
}
if (stage.$facet != null) {
for (const key of Object.keys(stage.$facet)) {
stringifyFunctionOperators(stage.$facet[key]);
}
}
}
};
function handleAccumulator(operator) {
if (operator == null || operator.$accumulator == null) {
return;
}
for (const key of ['init', 'accumulate', 'merge', 'finalize']) {
if (typeof operator.$accumulator[key] === 'function') {
operator.$accumulator[key] = String(operator.$accumulator[key]);
}
}
}

33
node_modules/mongoose/lib/helpers/arrayDepth.js generated vendored Normal file
View File

@@ -0,0 +1,33 @@
'use strict';
module.exports = arrayDepth;
function arrayDepth(arr) {
if (!Array.isArray(arr)) {
return { min: 0, max: 0, containsNonArrayItem: true };
}
if (arr.length === 0) {
return { min: 1, max: 1, containsNonArrayItem: false };
}
if (arr.length === 1 && !Array.isArray(arr[0])) {
return { min: 1, max: 1, containsNonArrayItem: false };
}
const res = arrayDepth(arr[0]);
for (let i = 1; i < arr.length; ++i) {
const _res = arrayDepth(arr[i]);
if (_res.min < res.min) {
res.min = _res.min;
}
if (_res.max > res.max) {
res.max = _res.max;
}
res.containsNonArrayItem = res.containsNonArrayItem || _res.containsNonArrayItem;
}
res.min = res.min + 1;
res.max = res.max + 1;
return res;
}

143
node_modules/mongoose/lib/helpers/clone.js generated vendored Normal file
View File

@@ -0,0 +1,143 @@
'use strict';
const cloneRegExp = require('regexp-clone');
const Decimal = require('../types/decimal128');
const ObjectId = require('../types/objectid');
const specialProperties = require('./specialProperties');
const isMongooseObject = require('./isMongooseObject');
const getFunctionName = require('./getFunctionName');
const isBsonType = require('./isBsonType');
const isObject = require('./isObject');
const symbols = require('./symbols');
const utils = require('../utils');
/*!
* Object clone with Mongoose natives support.
*
* If options.minimize is true, creates a minimal data object. Empty objects and undefined values will not be cloned. This makes the data payload sent to MongoDB as small as possible.
*
* Functions are never cloned.
*
* @param {Object} obj the object to clone
* @param {Object} options
* @param {Boolean} isArrayChild true if cloning immediately underneath an array. Special case for minimize.
* @return {Object} the cloned object
* @api private
*/
function clone(obj, options, isArrayChild) {
if (obj == null) {
return obj;
}
if (Array.isArray(obj)) {
return cloneArray(obj, options);
}
if (isMongooseObject(obj)) {
// Single nested subdocs should apply getters later in `applyGetters()`
// when calling `toObject()`. See gh-7442, gh-8295
if (options && options._skipSingleNestedGetters && obj.$isSingleNested) {
options = Object.assign({}, options, { getters: false });
}
if (utils.isPOJO(obj) && obj.$__ != null && obj._doc != null) {
return obj._doc;
}
if (options && options.json && typeof obj.toJSON === 'function') {
return obj.toJSON(options);
}
return obj.toObject(options);
}
if (obj.constructor) {
switch (getFunctionName(obj.constructor)) {
case 'Object':
return cloneObject(obj, options, isArrayChild);
case 'Date':
return new obj.constructor(+obj);
case 'RegExp':
return cloneRegExp(obj);
default:
// ignore
break;
}
}
if (obj instanceof ObjectId) {
return new ObjectId(obj.id);
}
if (isBsonType(obj, 'Decimal128')) {
if (options && options.flattenDecimals) {
return obj.toJSON();
}
return Decimal.fromString(obj.toString());
}
if (!obj.constructor && isObject(obj)) {
// object created with Object.create(null)
return cloneObject(obj, options, isArrayChild);
}
if (obj[symbols.schemaTypeSymbol]) {
return obj.clone();
}
// If we're cloning this object to go into a MongoDB command,
// and there's a `toBSON()` function, assume this object will be
// stored as a primitive in MongoDB and doesn't need to be cloned.
if (options && options.bson && typeof obj.toBSON === 'function') {
return obj;
}
if (obj.valueOf != null) {
return obj.valueOf();
}
return cloneObject(obj, options, isArrayChild);
}
module.exports = clone;
/*!
* ignore
*/
function cloneObject(obj, options, isArrayChild) {
const minimize = options && options.minimize;
const ret = {};
let hasKeys;
for (const k of Object.keys(obj)) {
if (specialProperties.has(k)) {
continue;
}
// Don't pass `isArrayChild` down
const val = clone(obj[k], options);
if (!minimize || (typeof val !== 'undefined')) {
if (minimize === false && typeof val === 'undefined') {
delete ret[k];
} else {
hasKeys || (hasKeys = true);
ret[k] = val;
}
}
}
return minimize && !isArrayChild ? hasKeys && ret : ret;
}
function cloneArray(arr, options) {
const ret = [];
for (const item of arr) {
ret.push(clone(item, options, true));
}
return ret;
}

106
node_modules/mongoose/lib/helpers/common.js generated vendored Normal file
View File

@@ -0,0 +1,106 @@
'use strict';
/*!
* Module dependencies.
*/
const Binary = require('../driver').get().Binary;
const Decimal128 = require('../types/decimal128');
const ObjectId = require('../types/objectid');
const isMongooseObject = require('./isMongooseObject');
exports.flatten = flatten;
exports.modifiedPaths = modifiedPaths;
/*!
* ignore
*/
function flatten(update, path, options, schema) {
let keys;
if (update && isMongooseObject(update) && !Buffer.isBuffer(update)) {
keys = Object.keys(update.toObject({ transform: false, virtuals: false }));
} else {
keys = Object.keys(update || {});
}
const numKeys = keys.length;
const result = {};
path = path ? path + '.' : '';
for (let i = 0; i < numKeys; ++i) {
const key = keys[i];
const val = update[key];
result[path + key] = val;
// Avoid going into mixed paths if schema is specified
const keySchema = schema && schema.path && schema.path(path + key);
const isNested = schema && schema.nested && schema.nested[path + key];
if (keySchema && keySchema.instance === 'Mixed') continue;
if (shouldFlatten(val)) {
if (options && options.skipArrays && Array.isArray(val)) {
continue;
}
const flat = flatten(val, path + key, options, schema);
for (const k in flat) {
result[k] = flat[k];
}
if (Array.isArray(val)) {
result[path + key] = val;
}
}
if (isNested) {
const paths = Object.keys(schema.paths);
for (const p of paths) {
if (p.startsWith(path + key + '.') && !result.hasOwnProperty(p)) {
result[p] = void 0;
}
}
}
}
return result;
}
/*!
* ignore
*/
function modifiedPaths(update, path, result) {
const keys = Object.keys(update || {});
const numKeys = keys.length;
result = result || {};
path = path ? path + '.' : '';
for (let i = 0; i < numKeys; ++i) {
const key = keys[i];
let val = update[key];
result[path + key] = true;
if (isMongooseObject(val) && !Buffer.isBuffer(val)) {
val = val.toObject({ transform: false, virtuals: false });
}
if (shouldFlatten(val)) {
modifiedPaths(val, path + key, result);
}
}
return result;
}
/*!
* ignore
*/
function shouldFlatten(val) {
return val &&
typeof val === 'object' &&
!(val instanceof Date) &&
!(val instanceof ObjectId) &&
(!Array.isArray(val) || val.length > 0) &&
!(val instanceof Buffer) &&
!(val instanceof Decimal128) &&
!(val instanceof Binary);
}

157
node_modules/mongoose/lib/helpers/cursor/eachAsync.js generated vendored Normal file
View File

@@ -0,0 +1,157 @@
'use strict';
/*!
* Module dependencies.
*/
const immediate = require('../immediate');
const promiseOrCallback = require('../promiseOrCallback');
/**
* Execute `fn` for every document in the cursor. If `fn` returns a promise,
* will wait for the promise to resolve before iterating on to the next one.
* Returns a promise that resolves when done.
*
* @param {Function} next the thunk to call to get the next document
* @param {Function} fn
* @param {Object} options
* @param {Function} [callback] executed when all docs have been processed
* @return {Promise}
* @api public
* @method eachAsync
*/
module.exports = function eachAsync(next, fn, options, callback) {
const parallel = options.parallel || 1;
const batchSize = options.batchSize;
const enqueue = asyncQueue();
return promiseOrCallback(callback, cb => {
if (batchSize != null) {
if (typeof batchSize !== 'number') {
throw new TypeError('batchSize must be a number');
}
if (batchSize < 1) {
throw new TypeError('batchSize must be at least 1');
}
if (batchSize !== Math.floor(batchSize)) {
throw new TypeError('batchSize must be a positive integer');
}
}
iterate(cb);
});
function iterate(finalCallback) {
let drained = false;
let handleResultsInProgress = 0;
let currentDocumentIndex = 0;
let documentsBatch = [];
let error = null;
for (let i = 0; i < parallel; ++i) {
enqueue(fetch);
}
function fetch(done) {
if (drained || error) {
return done();
}
next(function(err, doc) {
if (drained || error != null) {
return done();
}
if (err != null) {
error = err;
finalCallback(err);
return done();
}
if (doc == null) {
drained = true;
if (handleResultsInProgress <= 0) {
finalCallback(null);
} else if (batchSize != null && documentsBatch.length) {
handleNextResult(documentsBatch, currentDocumentIndex++, handleNextResultCallBack);
}
return done();
}
++handleResultsInProgress;
// Kick off the subsequent `next()` before handling the result, but
// make sure we know that we still have a result to handle re: #8422
immediate(() => done());
if (batchSize != null) {
documentsBatch.push(doc);
}
// If the current documents size is less than the provided patch size don't process the documents yet
if (batchSize != null && documentsBatch.length !== batchSize) {
setTimeout(() => enqueue(fetch), 0);
return;
}
const docsToProcess = batchSize != null ? documentsBatch : doc;
function handleNextResultCallBack(err) {
if (batchSize != null) {
handleResultsInProgress -= documentsBatch.length;
documentsBatch = [];
} else {
--handleResultsInProgress;
}
if (err != null) {
error = err;
return finalCallback(err);
}
if (drained && handleResultsInProgress <= 0) {
return finalCallback(null);
}
setTimeout(() => enqueue(fetch), 0);
}
handleNextResult(docsToProcess, currentDocumentIndex++, handleNextResultCallBack);
});
}
}
function handleNextResult(doc, i, callback) {
const promise = fn(doc, i);
if (promise && typeof promise.then === 'function') {
promise.then(
function() { callback(null); },
function(error) { callback(error || new Error('`eachAsync()` promise rejected without error')); });
} else {
callback(null);
}
}
};
// `next()` can only execute one at a time, so make sure we always execute
// `next()` in series, while still allowing multiple `fn()` instances to run
// in parallel.
function asyncQueue() {
const _queue = [];
let inProgress = null;
let id = 0;
return function enqueue(fn) {
if (_queue.length === 0 && inProgress == null) {
inProgress = id++;
return fn(_step);
}
_queue.push(fn);
};
function _step() {
inProgress = null;
if (_queue.length > 0) {
inProgress = id++;
const fn = _queue.shift();
fn(_step);
}
}
}

View File

@@ -0,0 +1,16 @@
'use strict';
const ObjectId = require('../../types/objectid');
module.exports = function areDiscriminatorValuesEqual(a, b) {
if (typeof a === 'string' && typeof b === 'string') {
return a === b;
}
if (typeof a === 'number' && typeof b === 'number') {
return a === b;
}
if (a instanceof ObjectId && b instanceof ObjectId) {
return a.toString() === b.toString();
}
return false;
};

View File

@@ -0,0 +1,12 @@
'use strict';
module.exports = function checkEmbeddedDiscriminatorKeyProjection(userProjection, path, schema, selected, addedPaths) {
const userProjectedInPath = Object.keys(userProjection).
reduce((cur, key) => cur || key.startsWith(path + '.'), false);
const _discriminatorKey = path + '.' + schema.options.discriminatorKey;
if (!userProjectedInPath &&
addedPaths.length === 1 &&
addedPaths[0] === _discriminatorKey) {
selected.splice(selected.indexOf(_discriminatorKey), 1);
}
};

View File

@@ -0,0 +1,25 @@
'use strict';
const getDiscriminatorByValue = require('./getDiscriminatorByValue');
/*!
* Find the correct constructor, taking into account discriminators
*/
module.exports = function getConstructor(Constructor, value) {
const discriminatorKey = Constructor.schema.options.discriminatorKey;
if (value != null &&
Constructor.discriminators &&
value[discriminatorKey] != null) {
if (Constructor.discriminators[value[discriminatorKey]]) {
Constructor = Constructor.discriminators[value[discriminatorKey]];
} else {
const constructorByValue = getDiscriminatorByValue(Constructor.discriminators, value[discriminatorKey]);
if (constructorByValue) {
Constructor = constructorByValue;
}
}
}
return Constructor;
};

View File

@@ -0,0 +1,27 @@
'use strict';
const areDiscriminatorValuesEqual = require('./areDiscriminatorValuesEqual');
/*!
* returns discriminator by discriminatorMapping.value
*
* @param {Model} model
* @param {string} value
*/
module.exports = function getDiscriminatorByValue(discriminators, value) {
if (discriminators == null) {
return null;
}
for (const name of Object.keys(discriminators)) {
const it = discriminators[name];
if (
it.schema &&
it.schema.discriminatorMapping &&
areDiscriminatorValuesEqual(it.schema.discriminatorMapping.value, value)
) {
return it;
}
}
return null;
};

View File

@@ -0,0 +1,26 @@
'use strict';
const areDiscriminatorValuesEqual = require('./areDiscriminatorValuesEqual');
/*!
* returns discriminator by discriminatorMapping.value
*
* @param {Schema} schema
* @param {string} value
*/
module.exports = function getSchemaDiscriminatorByValue(schema, value) {
if (schema == null || schema.discriminators == null) {
return null;
}
for (const key of Object.keys(schema.discriminators)) {
const discriminatorSchema = schema.discriminators[key];
if (discriminatorSchema.discriminatorMapping == null) {
continue;
}
if (areDiscriminatorValuesEqual(discriminatorSchema.discriminatorMapping.value, value)) {
return discriminatorSchema;
}
}
return null;
};

View File

@@ -0,0 +1,28 @@
'use strict';
/*!
* ignore
*/
module.exports = function cleanModifiedSubpaths(doc, path, options) {
options = options || {};
const skipDocArrays = options.skipDocArrays;
let deleted = 0;
if (!doc) {
return deleted;
}
for (const modifiedPath of Object.keys(doc.$__.activePaths.states.modify)) {
if (skipDocArrays) {
const schemaType = doc.$__schema.path(modifiedPath);
if (schemaType && schemaType.$isMongooseDocumentArray) {
continue;
}
}
if (modifiedPath.startsWith(path + '.')) {
delete doc.$__.activePaths.states.modify[modifiedPath];
++deleted;
}
}
return deleted;
};

211
node_modules/mongoose/lib/helpers/document/compile.js generated vendored Normal file
View File

@@ -0,0 +1,211 @@
'use strict';
const documentSchemaSymbol = require('../../helpers/symbols').documentSchemaSymbol;
const get = require('../../helpers/get');
const internalToObjectOptions = require('../../options').internalToObjectOptions;
const utils = require('../../utils');
let Document;
const getSymbol = require('../../helpers/symbols').getSymbol;
const scopeSymbol = require('../../helpers/symbols').scopeSymbol;
/*!
* exports
*/
exports.compile = compile;
exports.defineKey = defineKey;
/*!
* Compiles schemas.
*/
function compile(tree, proto, prefix, options) {
Document = Document || require('../../document');
const keys = Object.keys(tree);
const len = keys.length;
let limb;
let key;
for (let i = 0; i < len; ++i) {
key = keys[i];
limb = tree[key];
const hasSubprops = utils.isPOJO(limb) && Object.keys(limb).length &&
(!limb[options.typeKey] || (options.typeKey === 'type' && limb.type.type));
const subprops = hasSubprops ? limb : null;
defineKey(key, subprops, proto, prefix, keys, options);
}
}
/*!
* Defines the accessor named prop on the incoming prototype.
*/
function defineKey(prop, subprops, prototype, prefix, keys, options) {
Document = Document || require('../../document');
const path = (prefix ? prefix + '.' : '') + prop;
prefix = prefix || '';
if (subprops) {
Object.defineProperty(prototype, prop, {
enumerable: true,
configurable: true,
get: function() {
const _this = this;
if (!this.$__.getters) {
this.$__.getters = {};
}
if (!this.$__.getters[path]) {
const nested = Object.create(Document.prototype, getOwnPropertyDescriptors(this));
// save scope for nested getters/setters
if (!prefix) {
nested.$__[scopeSymbol] = this;
}
nested.$__.nestedPath = path;
Object.defineProperty(nested, 'schema', {
enumerable: false,
configurable: true,
writable: false,
value: prototype.schema
});
Object.defineProperty(nested, '$__schema', {
enumerable: false,
configurable: true,
writable: false,
value: prototype.schema
});
Object.defineProperty(nested, documentSchemaSymbol, {
enumerable: false,
configurable: true,
writable: false,
value: prototype.schema
});
Object.defineProperty(nested, 'toObject', {
enumerable: false,
configurable: true,
writable: false,
value: function() {
return utils.clone(_this.get(path, null, {
virtuals: get(this, 'schema.options.toObject.virtuals', null)
}));
}
});
Object.defineProperty(nested, '$__get', {
enumerable: false,
configurable: true,
writable: false,
value: function() {
return _this.get(path, null, {
virtuals: get(this, 'schema.options.toObject.virtuals', null)
});
}
});
Object.defineProperty(nested, 'toJSON', {
enumerable: false,
configurable: true,
writable: false,
value: function() {
return _this.get(path, null, {
virtuals: get(_this, 'schema.options.toJSON.virtuals', null)
});
}
});
Object.defineProperty(nested, '$__isNested', {
enumerable: false,
configurable: true,
writable: false,
value: true
});
const _isEmptyOptions = Object.freeze({
minimize: true,
virtuals: false,
getters: false,
transform: false
});
Object.defineProperty(nested, '$isEmpty', {
enumerable: false,
configurable: true,
writable: false,
value: function() {
return Object.keys(this.get(path, null, _isEmptyOptions) || {}).length === 0;
}
});
Object.defineProperty(nested, '$__parent', {
enumerable: false,
configurable: true,
writable: false,
value: this
});
compile(subprops, nested, path, options);
this.$__.getters[path] = nested;
}
return this.$__.getters[path];
},
set: function(v) {
if (v != null && v.$__isNested) {
// Convert top-level to POJO, but leave subdocs hydrated so `$set`
// can handle them. See gh-9293.
v = v.$__get();
} else if (v instanceof Document && !v.$__isNested) {
v = v.toObject(internalToObjectOptions);
}
const doc = this.$__[scopeSymbol] || this;
doc.$set(path, v);
}
});
} else {
Object.defineProperty(prototype, prop, {
enumerable: true,
configurable: true,
get: function() {
return this[getSymbol].call(this.$__[scopeSymbol] || this, path);
},
set: function(v) {
this.$set.call(this.$__[scopeSymbol] || this, path, v);
}
});
}
}
// gets descriptors for all properties of `object`
// makes all properties non-enumerable to match previous behavior to #2211
function getOwnPropertyDescriptors(object) {
const result = {};
Object.getOwnPropertyNames(object).forEach(function(key) {
result[key] = Object.getOwnPropertyDescriptor(object, key);
// Assume these are schema paths, ignore them re: #5470
if (result[key].get) {
delete result[key];
return;
}
result[key].enumerable = [
'isNew',
'$__',
'errors',
'_doc',
'$locals',
'$op',
'__parentArray',
'__index',
'$isDocumentArrayElement'
].indexOf(key) === -1;
});
return result;
}

View File

@@ -0,0 +1,43 @@
'use strict';
const get = require('../get');
/*!
* Like `schema.path()`, except with a document, because impossible to
* determine path type without knowing the embedded discriminator key.
*/
module.exports = function getEmbeddedDiscriminatorPath(doc, path, options) {
options = options || {};
const typeOnly = options.typeOnly;
const parts = path.split('.');
let schema = null;
let type = 'adhocOrUndefined';
for (let i = 0; i < parts.length; ++i) {
const subpath = parts.slice(0, i + 1).join('.');
schema = doc.schema.path(subpath);
if (schema == null) {
type = 'adhocOrUndefined';
continue;
}
if (schema.instance === 'Mixed') {
return typeOnly ? 'real' : schema;
}
type = doc.schema.pathType(subpath);
if ((schema.$isSingleNested || schema.$isMongooseDocumentArrayElement) &&
schema.schema.discriminators != null) {
const discriminators = schema.schema.discriminators;
const discriminatorKey = doc.get(subpath + '.' +
get(schema, 'schema.options.discriminatorKey'));
if (discriminatorKey == null || discriminators[discriminatorKey] == null) {
continue;
}
const rest = parts.slice(i + 1).join('.');
return getEmbeddedDiscriminatorPath(doc.get(subpath), rest, options);
}
}
// Are we getting the whole schema or just the type, 'real', 'nested', etc.
return typeOnly ? type : schema;
};

View File

@@ -0,0 +1,17 @@
'use strict';
const utils = require('../../utils');
/**
* Using spread operator on a Mongoose document gives you a
* POJO that has a tendency to cause infinite recursion. So
* we use this function on `set()` to prevent that.
*/
module.exports = function handleSpreadDoc(v) {
if (utils.isPOJO(v) && v.$__ != null && v._doc != null) {
return v._doc;
}
return v;
};

25
node_modules/mongoose/lib/helpers/each.js generated vendored Normal file
View File

@@ -0,0 +1,25 @@
'use strict';
module.exports = function each(arr, cb, done) {
if (arr.length === 0) {
return done();
}
let remaining = arr.length;
let err = null;
for (const v of arr) {
cb(v, function(_err) {
if (err != null) {
return;
}
if (_err != null) {
err = _err;
return done(err);
}
if (--remaining <= 0) {
return done();
}
});
}
};

64
node_modules/mongoose/lib/helpers/get.js generated vendored Normal file
View File

@@ -0,0 +1,64 @@
'use strict';
/*!
* Simplified lodash.get to work around the annoying null quirk. See:
* https://github.com/lodash/lodash/issues/3659
*/
module.exports = function get(obj, path, def) {
let parts;
let isPathArray = false;
if (typeof path === 'string') {
if (path.indexOf('.') === -1) {
const _v = getProperty(obj, path);
if (_v == null) {
return def;
}
return _v;
}
parts = path.split('.');
} else {
isPathArray = true;
parts = path;
if (parts.length === 1) {
const _v = getProperty(obj, parts[0]);
if (_v == null) {
return def;
}
return _v;
}
}
let rest = path;
let cur = obj;
for (const part of parts) {
if (cur == null) {
return def;
}
// `lib/cast.js` depends on being able to get dotted paths in updates,
// like `{ $set: { 'a.b': 42 } }`
if (!isPathArray && cur[rest] != null) {
return cur[rest];
}
cur = getProperty(cur, part);
if (!isPathArray) {
rest = rest.substr(part.length + 1);
}
}
return cur == null ? def : cur;
};
function getProperty(obj, prop) {
if (obj == null) {
return obj;
}
if (obj instanceof Map) {
return obj.get(prop);
}
return obj[prop];
}

View File

@@ -0,0 +1,27 @@
'use strict';
function getDefaultBulkwriteResult() {
return {
result: {
ok: 1,
writeErrors: [],
writeConcernErrors: [],
insertedIds: [],
nInserted: 0,
nUpserted: 0,
nMatched: 0,
nModified: 0,
nRemoved: 0,
upserted: []
},
insertedCount: 0,
matchedCount: 0,
modifiedCount: 0,
deletedCount: 0,
upsertedCount: 0,
upsertedIds: {},
insertedIds: {},
n: 0
};
}
module.exports = getDefaultBulkwriteResult;

8
node_modules/mongoose/lib/helpers/getFunctionName.js generated vendored Normal file
View File

@@ -0,0 +1,8 @@
'use strict';
module.exports = function(fn) {
if (fn.name) {
return fn.name;
}
return (fn.toString().trim().match(/^function\s*([^\s(]+)/) || [])[1];
};

14
node_modules/mongoose/lib/helpers/immediate.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
/*!
* Centralize this so we can more easily work around issues with people
* stubbing out `process.nextTick()` in tests using sinon:
* https://github.com/sinonjs/lolex#automatically-incrementing-mocked-time
* See gh-6074
*/
'use strict';
const nextTick = process.nextTick.bind(process);
module.exports = function immediate(cb) {
return nextTick(cb);
};

View File

@@ -0,0 +1,18 @@
'use strict';
const get = require('../get');
module.exports = function isDefaultIdIndex(index) {
if (Array.isArray(index)) {
// Mongoose syntax
const keys = Object.keys(index[0]);
return keys.length === 1 && keys[0] === '_id' && index[0]._id !== 'hashed';
}
if (typeof index !== 'object') {
return false;
}
const key = get(index, 'key', {});
return Object.keys(key).length === 1 && key.hasOwnProperty('_id');
};

View File

@@ -0,0 +1,95 @@
'use strict';
const get = require('../get');
const utils = require('../../utils');
/**
* Given a Mongoose index definition (key + options objects) and a MongoDB server
* index definition, determine if the two indexes are equal.
*
* @param {Object} key the Mongoose index spec
* @param {Object} options the Mongoose index definition's options
* @param {Object} dbIndex the index in MongoDB as returned by `listIndexes()`
* @api private
*/
module.exports = function isIndexEqual(key, options, dbIndex) {
// Special case: text indexes have a special format in the db. For example,
// `{ name: 'text' }` becomes:
// {
// v: 2,
// key: { _fts: 'text', _ftsx: 1 },
// name: 'name_text',
// ns: 'test.tests',
// background: true,
// weights: { name: 1 },
// default_language: 'english',
// language_override: 'language',
// textIndexVersion: 3
// }
if (dbIndex.textIndexVersion != null) {
const weights = dbIndex.weights;
if (Object.keys(weights).length !== Object.keys(key).length) {
return false;
}
for (const prop of Object.keys(weights)) {
if (!(prop in key)) {
return false;
}
const weight = weights[prop];
if (weight !== get(options, 'weights.' + prop) && !(weight === 1 && get(options, 'weights.' + prop) == null)) {
return false;
}
}
if (options['default_language'] !== dbIndex['default_language']) {
return dbIndex['default_language'] === 'english' && options['default_language'] == null;
}
return true;
}
const optionKeys = [
'unique',
'partialFilterExpression',
'sparse',
'expireAfterSeconds',
'collation'
];
for (const key of optionKeys) {
if (!(key in options) && !(key in dbIndex)) {
continue;
}
if (key === 'collation') {
if (options[key] == null || dbIndex[key] == null) {
return options[key] == null && dbIndex[key] == null;
}
const definedKeys = Object.keys(options.collation);
const schemaCollation = options.collation;
const dbCollation = dbIndex.collation;
for (const opt of definedKeys) {
if (get(schemaCollation, opt) !== get(dbCollation, opt)) {
return false;
}
}
} else if (!utils.deepEqual(options[key], dbIndex[key])) {
return false;
}
}
const schemaIndexKeys = Object.keys(key);
const dbIndexKeys = Object.keys(dbIndex.key);
if (schemaIndexKeys.length !== dbIndexKeys.length) {
return false;
}
for (let i = 0; i < schemaIndexKeys.length; ++i) {
if (schemaIndexKeys[i] !== dbIndexKeys[i]) {
return false;
}
if (!utils.deepEqual(key[schemaIndexKeys[i]], dbIndex.key[dbIndexKeys[i]])) {
return false;
}
}
return true;
};

13
node_modules/mongoose/lib/helpers/isBsonType.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
'use strict';
const get = require('./get');
/*!
* Get the bson type, if it exists
*/
function isBsonType(obj, typename) {
return get(obj, '_bsontype', void 0) === typename;
}
module.exports = isBsonType;

21
node_modules/mongoose/lib/helpers/isMongooseObject.js generated vendored Normal file
View File

@@ -0,0 +1,21 @@
'use strict';
/*!
* Returns if `v` is a mongoose object that has a `toObject()` method we can use.
*
* This is for compatibility with libs like Date.js which do foolish things to Natives.
*
* @param {any} v
* @api private
*/
module.exports = function(v) {
if (v == null) {
return false;
}
return v.$__ != null || // Document
v.isMongooseArray || // Array or Document Array
v.isMongooseBuffer || // Buffer
v.$isMongooseMap; // Map
};

16
node_modules/mongoose/lib/helpers/isObject.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
'use strict';
/*!
* Determines if `arg` is an object.
*
* @param {Object|Array|String|Function|RegExp|any} arg
* @api private
* @return {Boolean}
*/
module.exports = function(arg) {
if (Buffer.isBuffer(arg)) {
return true;
}
return Object.prototype.toString.call(arg) === '[object Object]';
};

6
node_modules/mongoose/lib/helpers/isPromise.js generated vendored Normal file
View File

@@ -0,0 +1,6 @@
'use strict';
function isPromise(val) {
return !!val && (typeof val === 'object' || typeof val === 'function') && typeof val.then === 'function';
}
module.exports = isPromise;

138
node_modules/mongoose/lib/helpers/model/applyHooks.js generated vendored Normal file
View File

@@ -0,0 +1,138 @@
'use strict';
const symbols = require('../../schema/symbols');
const promiseOrCallback = require('../promiseOrCallback');
/*!
* ignore
*/
module.exports = applyHooks;
/*!
* ignore
*/
applyHooks.middlewareFunctions = [
'deleteOne',
'save',
'validate',
'remove',
'updateOne',
'init'
];
/*!
* Register hooks for this model
*
* @param {Model} model
* @param {Schema} schema
*/
function applyHooks(model, schema, options) {
options = options || {};
const kareemOptions = {
useErrorHandlers: true,
numCallbackParams: 1,
nullResultByDefault: true,
contextParameter: true
};
const objToDecorate = options.decorateDoc ? model : model.prototype;
model.$appliedHooks = true;
for (const key of Object.keys(schema.paths)) {
const type = schema.paths[key];
let childModel = null;
if (type.$isSingleNested) {
childModel = type.caster;
} else if (type.$isMongooseDocumentArray) {
childModel = type.Constructor;
} else {
continue;
}
if (childModel.$appliedHooks) {
continue;
}
applyHooks(childModel, type.schema, options);
if (childModel.discriminators != null) {
const keys = Object.keys(childModel.discriminators);
for (const key of keys) {
applyHooks(childModel.discriminators[key],
childModel.discriminators[key].schema, options);
}
}
}
// Built-in hooks rely on hooking internal functions in order to support
// promises and make it so that `doc.save.toString()` provides meaningful
// information.
const middleware = schema.s.hooks.
filter(hook => {
if (hook.name === 'updateOne' || hook.name === 'deleteOne') {
return !!hook['document'];
}
if (hook.name === 'remove' || hook.name === 'init') {
return hook['document'] == null || !!hook['document'];
}
if (hook.query != null || hook.document != null) {
return hook.document !== false;
}
return true;
}).
filter(hook => {
// If user has overwritten the method, don't apply built-in middleware
if (schema.methods[hook.name]) {
return !hook.fn[symbols.builtInMiddleware];
}
return true;
});
model._middleware = middleware;
objToDecorate.$__originalValidate = objToDecorate.$__originalValidate || objToDecorate.$__validate;
for (const method of ['save', 'validate', 'remove', 'deleteOne']) {
const toWrap = method === 'validate' ? '$__originalValidate' : `$__${method}`;
const wrapped = middleware.
createWrapper(method, objToDecorate[toWrap], null, kareemOptions);
objToDecorate[`$__${method}`] = wrapped;
}
objToDecorate.$__init = middleware.
createWrapperSync('init', objToDecorate.$__init, null, kareemOptions);
// Support hooks for custom methods
const customMethods = Object.keys(schema.methods);
const customMethodOptions = Object.assign({}, kareemOptions, {
// Only use `checkForPromise` for custom methods, because mongoose
// query thunks are not as consistent as I would like about returning
// a nullish value rather than the query. If a query thunk returns
// a query, `checkForPromise` causes infinite recursion
checkForPromise: true
});
for (const method of customMethods) {
if (!middleware.hasHooks(method)) {
// Don't wrap if there are no hooks for the custom method to avoid
// surprises. Also, `createWrapper()` enforces consistent async,
// so wrapping a sync method would break it.
continue;
}
const originalMethod = objToDecorate[method];
objToDecorate[method] = function() {
const args = Array.prototype.slice.call(arguments);
const cb = args.slice(-1).pop();
const argsWithoutCallback = typeof cb === 'function' ?
args.slice(0, args.length - 1) : args;
return promiseOrCallback(cb, callback => {
return this[`$__${method}`].apply(this,
argsWithoutCallback.concat([callback]));
}, model.events);
};
objToDecorate[`$__${method}`] = middleware.
createWrapper(method, originalMethod, null, customMethodOptions);
}
}

View File

@@ -0,0 +1,56 @@
'use strict';
const get = require('../get');
/*!
* Register methods for this model
*
* @param {Model} model
* @param {Schema} schema
*/
module.exports = function applyMethods(model, schema) {
function apply(method, schema) {
Object.defineProperty(model.prototype, method, {
get: function() {
const h = {};
for (const k in schema.methods[method]) {
h[k] = schema.methods[method][k].bind(this);
}
return h;
},
configurable: true
});
}
for (const method of Object.keys(schema.methods)) {
const fn = schema.methods[method];
if (schema.tree.hasOwnProperty(method)) {
throw new Error('You have a method and a property in your schema both ' +
'named "' + method + '"');
}
if (schema.reserved[method] &&
!get(schema, `methodOptions.${method}.suppressWarning`, false)) {
console.warn(`mongoose: the method name "${method}" is used by mongoose ` +
'internally, overwriting it may cause bugs. If you\'re sure you know ' +
'what you\'re doing, you can suppress this error by using ' +
`\`schema.method('${method}', fn, { suppressWarning: true })\`.`);
}
if (typeof fn === 'function') {
model.prototype[method] = fn;
} else {
apply(method, schema);
}
}
// Recursively call `applyMethods()` on child schemas
model.$appliedMethods = true;
for (const key of Object.keys(schema.paths)) {
const type = schema.paths[key];
if (type.$isSingleNested && !type.caster.$appliedMethods) {
applyMethods(type.caster, type.schema);
}
if (type.$isMongooseDocumentArray && !type.Constructor.$appliedMethods) {
applyMethods(type.Constructor, type.schema);
}
}
};

View File

@@ -0,0 +1,71 @@
'use strict';
const middlewareFunctions = require('../query/applyQueryMiddleware').middlewareFunctions;
const promiseOrCallback = require('../promiseOrCallback');
module.exports = function applyStaticHooks(model, hooks, statics) {
const kareemOptions = {
useErrorHandlers: true,
numCallbackParams: 1
};
hooks = hooks.filter(hook => {
// If the custom static overwrites an existing query middleware, don't apply
// middleware to it by default. This avoids a potential backwards breaking
// change with plugins like `mongoose-delete` that use statics to overwrite
// built-in Mongoose functions.
if (middlewareFunctions.indexOf(hook.name) !== -1) {
return !!hook.model;
}
return hook.model !== false;
});
model.$__insertMany = hooks.createWrapper('insertMany',
model.$__insertMany, model, kareemOptions);
for (const key of Object.keys(statics)) {
if (hooks.hasHooks(key)) {
const original = model[key];
model[key] = function() {
const numArgs = arguments.length;
const lastArg = numArgs > 0 ? arguments[numArgs - 1] : null;
const cb = typeof lastArg === 'function' ? lastArg : null;
const args = Array.prototype.slice.
call(arguments, 0, cb == null ? numArgs : numArgs - 1);
// Special case: can't use `Kareem#wrap()` because it doesn't currently
// support wrapped functions that return a promise.
return promiseOrCallback(cb, callback => {
hooks.execPre(key, model, args, function(err) {
if (err != null) {
return callback(err);
}
let postCalled = 0;
const ret = original.apply(model, args.concat(post));
if (ret != null && typeof ret.then === 'function') {
ret.then(res => post(null, res), err => post(err));
}
function post(error, res) {
if (postCalled++ > 0) {
return;
}
if (error != null) {
return callback(error);
}
hooks.execPost(key, model, [res], function(error) {
if (error != null) {
return callback(error);
}
callback(null, res);
});
}
});
}, model.events);
};
}
}
};

View File

@@ -0,0 +1,12 @@
'use strict';
/*!
* Register statics for this model
* @param {Model} model
* @param {Schema} schema
*/
module.exports = function applyStatics(model, schema) {
for (const i in schema.statics) {
model[i] = schema.statics[i];
}
};

View File

@@ -0,0 +1,224 @@
'use strict';
const getDiscriminatorByValue = require('../../helpers/discriminator/getDiscriminatorByValue');
const applyTimestampsToChildren = require('../update/applyTimestampsToChildren');
const applyTimestampsToUpdate = require('../update/applyTimestampsToUpdate');
const cast = require('../../cast');
const castUpdate = require('../query/castUpdate');
const setDefaultsOnInsert = require('../setDefaultsOnInsert');
/*!
* Given a model and a bulkWrite op, return a thunk that handles casting and
* validating the individual op.
*/
module.exports = function castBulkWrite(originalModel, op, options) {
const now = originalModel.base.now();
if (op['insertOne']) {
return (callback) => {
const model = decideModelByObject(originalModel, op['insertOne']['document']);
const doc = new model(op['insertOne']['document']);
if (model.schema.options.timestamps) {
doc.initializeTimestamps();
}
if (options.session != null) {
doc.$session(options.session);
}
op['insertOne']['document'] = doc;
op['insertOne']['document'].validate({ __noPromise: true }, function(error) {
if (error) {
return callback(error, null);
}
callback(null);
});
};
} else if (op['updateOne']) {
return (callback) => {
try {
if (!op['updateOne']['filter']) {
throw new Error('Must provide a filter object.');
}
if (!op['updateOne']['update']) {
throw new Error('Must provide an update object.');
}
const model = decideModelByObject(originalModel, op['updateOne']['filter']);
const schema = model.schema;
const strict = options.strict != null ? options.strict : model.schema.options.strict;
_addDiscriminatorToObject(schema, op['updateOne']['filter']);
if (model.schema.$timestamps != null && op['updateOne'].timestamps !== false) {
const createdAt = model.schema.$timestamps.createdAt;
const updatedAt = model.schema.$timestamps.updatedAt;
applyTimestampsToUpdate(now, createdAt, updatedAt, op['updateOne']['update'], {});
}
applyTimestampsToChildren(now, op['updateOne']['update'], model.schema);
if (op['updateOne'].setDefaultsOnInsert) {
setDefaultsOnInsert(op['updateOne']['filter'], model.schema, op['updateOne']['update'], {
setDefaultsOnInsert: true,
upsert: op['updateOne'].upsert
});
}
op['updateOne']['filter'] = cast(model.schema, op['updateOne']['filter'], {
strict: strict,
upsert: op['updateOne'].upsert
});
op['updateOne']['update'] = castUpdate(model.schema, op['updateOne']['update'], {
strict: strict,
overwrite: false,
upsert: op['updateOne'].upsert
}, model, op['updateOne']['filter']);
} catch (error) {
return callback(error, null);
}
callback(null);
};
} else if (op['updateMany']) {
return (callback) => {
try {
if (!op['updateMany']['filter']) {
throw new Error('Must provide a filter object.');
}
if (!op['updateMany']['update']) {
throw new Error('Must provide an update object.');
}
const model = decideModelByObject(originalModel, op['updateMany']['filter']);
const schema = model.schema;
const strict = options.strict != null ? options.strict : model.schema.options.strict;
if (op['updateMany'].setDefaultsOnInsert) {
setDefaultsOnInsert(op['updateMany']['filter'], model.schema, op['updateMany']['update'], {
setDefaultsOnInsert: true,
upsert: op['updateMany'].upsert
});
}
if (model.schema.$timestamps != null && op['updateMany'].timestamps !== false) {
const createdAt = model.schema.$timestamps.createdAt;
const updatedAt = model.schema.$timestamps.updatedAt;
applyTimestampsToUpdate(now, createdAt, updatedAt, op['updateMany']['update'], {});
}
applyTimestampsToChildren(now, op['updateMany']['update'], model.schema);
_addDiscriminatorToObject(schema, op['updateMany']['filter']);
op['updateMany']['filter'] = cast(model.schema, op['updateMany']['filter'], {
strict: strict,
upsert: op['updateMany'].upsert
});
op['updateMany']['update'] = castUpdate(model.schema, op['updateMany']['update'], {
strict: strict,
overwrite: false,
upsert: op['updateMany'].upsert
}, model, op['updateMany']['filter']);
} catch (error) {
return callback(error, null);
}
callback(null);
};
} else if (op['replaceOne']) {
return (callback) => {
const model = decideModelByObject(originalModel, op['replaceOne']['filter']);
const schema = model.schema;
const strict = options.strict != null ? options.strict : model.schema.options.strict;
_addDiscriminatorToObject(schema, op['replaceOne']['filter']);
try {
op['replaceOne']['filter'] = cast(model.schema, op['replaceOne']['filter'], {
strict: strict,
upsert: op['replaceOne'].upsert
});
} catch (error) {
return callback(error, null);
}
// set `skipId`, otherwise we get "_id field cannot be changed"
const doc = new model(op['replaceOne']['replacement'], strict, true);
if (model.schema.options.timestamps) {
doc.initializeTimestamps();
}
if (options.session != null) {
doc.$session(options.session);
}
op['replaceOne']['replacement'] = doc;
op['replaceOne']['replacement'].validate({ __noPromise: true }, function(error) {
if (error) {
return callback(error, null);
}
op['replaceOne']['replacement'] = op['replaceOne']['replacement'].toBSON();
callback(null);
});
};
} else if (op['deleteOne']) {
return (callback) => {
const model = decideModelByObject(originalModel, op['deleteOne']['filter']);
const schema = model.schema;
_addDiscriminatorToObject(schema, op['deleteOne']['filter']);
try {
op['deleteOne']['filter'] = cast(model.schema,
op['deleteOne']['filter']);
} catch (error) {
return callback(error, null);
}
callback(null);
};
} else if (op['deleteMany']) {
return (callback) => {
const model = decideModelByObject(originalModel, op['deleteMany']['filter']);
const schema = model.schema;
_addDiscriminatorToObject(schema, op['deleteMany']['filter']);
try {
op['deleteMany']['filter'] = cast(model.schema,
op['deleteMany']['filter']);
} catch (error) {
return callback(error, null);
}
callback(null);
};
} else {
return (callback) => {
callback(new Error('Invalid op passed to `bulkWrite()`'), null);
};
}
};
function _addDiscriminatorToObject(schema, obj) {
if (schema == null) {
return;
}
if (schema.discriminatorMapping && !schema.discriminatorMapping.isRoot) {
obj[schema.discriminatorMapping.key] = schema.discriminatorMapping.value;
}
}
/*!
* gets discriminator model if discriminator key is present in object
*/
function decideModelByObject(model, object) {
const discriminatorKey = model.schema.options.discriminatorKey;
if (object != null && object.hasOwnProperty(discriminatorKey)) {
model = getDiscriminatorByValue(model.discriminators, object[discriminatorKey]) || model;
}
return model;
}

View File

@@ -0,0 +1,205 @@
'use strict';
const Mixed = require('../../schema/mixed');
const defineKey = require('../document/compile').defineKey;
const get = require('../get');
const utils = require('../../utils');
const CUSTOMIZABLE_DISCRIMINATOR_OPTIONS = {
toJSON: true,
toObject: true,
_id: true,
id: true
};
/*!
* ignore
*/
module.exports = function discriminator(model, name, schema, tiedValue, applyPlugins) {
if (!(schema && schema.instanceOfSchema)) {
throw new Error('You must pass a valid discriminator Schema');
}
if (model.schema.discriminatorMapping &&
!model.schema.discriminatorMapping.isRoot) {
throw new Error('Discriminator "' + name +
'" can only be a discriminator of the root model');
}
if (applyPlugins) {
const applyPluginsToDiscriminators = get(model.base,
'options.applyPluginsToDiscriminators', false);
// Even if `applyPluginsToDiscriminators` isn't set, we should still apply
// global plugins to schemas embedded in the discriminator schema (gh-7370)
model.base._applyPlugins(schema, {
skipTopLevel: !applyPluginsToDiscriminators
});
}
const key = model.schema.options.discriminatorKey;
const existingPath = model.schema.path(key);
if (existingPath != null) {
if (!utils.hasUserDefinedProperty(existingPath.options, 'select')) {
existingPath.options.select = true;
}
existingPath.options.$skipDiscriminatorCheck = true;
} else {
const baseSchemaAddition = {};
baseSchemaAddition[key] = {
default: void 0,
select: true,
$skipDiscriminatorCheck: true
};
baseSchemaAddition[key][model.schema.options.typeKey] = String;
model.schema.add(baseSchemaAddition);
defineKey(key, null, model.prototype, null, [key], model.schema.options);
}
if (schema.path(key) && schema.path(key).options.$skipDiscriminatorCheck !== true) {
throw new Error('Discriminator "' + name +
'" cannot have field with name "' + key + '"');
}
let value = name;
if ((typeof tiedValue === 'string' && tiedValue.length) || tiedValue != null) {
value = tiedValue;
}
function merge(schema, baseSchema) {
// Retain original schema before merging base schema
schema._baseSchema = baseSchema;
if (baseSchema.paths._id &&
baseSchema.paths._id.options &&
!baseSchema.paths._id.options.auto) {
schema.remove('_id');
}
// Find conflicting paths: if something is a path in the base schema
// and a nested path in the child schema, overwrite the base schema path.
// See gh-6076
const baseSchemaPaths = Object.keys(baseSchema.paths);
const conflictingPaths = [];
for (const path of baseSchemaPaths) {
if (schema.nested[path]) {
conflictingPaths.push(path);
continue;
}
if (path.indexOf('.') === -1) {
continue;
}
const sp = path.split('.').slice(0, -1);
let cur = '';
for (const piece of sp) {
cur += (cur.length ? '.' : '') + piece;
if (schema.paths[cur] instanceof Mixed ||
schema.singleNestedPaths[cur] instanceof Mixed) {
conflictingPaths.push(path);
}
}
}
utils.merge(schema, baseSchema, {
isDiscriminatorSchemaMerge: true,
omit: { discriminators: true, base: true },
omitNested: conflictingPaths.reduce((cur, path) => {
cur['tree.' + path] = true;
return cur;
}, {})
});
// Clean up conflicting paths _after_ merging re: gh-6076
for (const conflictingPath of conflictingPaths) {
delete schema.paths[conflictingPath];
}
// Rebuild schema models because schemas may have been merged re: #7884
schema.childSchemas.forEach(obj => {
obj.model.prototype.$__setSchema(obj.schema);
});
const obj = {};
obj[key] = {
default: value,
select: true,
set: function(newName) {
if (newName === value || (Array.isArray(value) && utils.deepEqual(newName, value))) {
return value;
}
throw new Error('Can\'t set discriminator key "' + key + '"');
},
$skipDiscriminatorCheck: true
};
obj[key][schema.options.typeKey] = existingPath ? existingPath.options[schema.options.typeKey] : String;
schema.add(obj);
schema.discriminatorMapping = { key: key, value: value, isRoot: false };
if (baseSchema.options.collection) {
schema.options.collection = baseSchema.options.collection;
}
const toJSON = schema.options.toJSON;
const toObject = schema.options.toObject;
const _id = schema.options._id;
const id = schema.options.id;
const keys = Object.keys(schema.options);
schema.options.discriminatorKey = baseSchema.options.discriminatorKey;
for (const _key of keys) {
if (!CUSTOMIZABLE_DISCRIMINATOR_OPTIONS[_key]) {
// Special case: compiling a model sets `pluralization = true` by default. Avoid throwing an error
// for that case. See gh-9238
if (_key === 'pluralization' && schema.options[_key] == true && baseSchema.options[_key] == null) {
continue;
}
if (!utils.deepEqual(schema.options[_key], baseSchema.options[_key])) {
throw new Error('Can\'t customize discriminator option ' + _key +
' (can only modify ' +
Object.keys(CUSTOMIZABLE_DISCRIMINATOR_OPTIONS).join(', ') +
')');
}
}
}
schema.options = utils.clone(baseSchema.options);
if (toJSON) schema.options.toJSON = toJSON;
if (toObject) schema.options.toObject = toObject;
if (typeof _id !== 'undefined') {
schema.options._id = _id;
}
schema.options.id = id;
schema.s.hooks = model.schema.s.hooks.merge(schema.s.hooks);
schema.plugins = Array.prototype.slice.call(baseSchema.plugins);
schema.callQueue = baseSchema.callQueue.concat(schema.callQueue);
delete schema._requiredpaths; // reset just in case Schema#requiredPaths() was called on either schema
}
// merges base schema into new discriminator schema and sets new type field.
merge(schema, model.schema);
if (!model.discriminators) {
model.discriminators = {};
}
if (!model.schema.discriminatorMapping) {
model.schema.discriminatorMapping = { key: key, value: null, isRoot: true };
}
if (!model.schema.discriminators) {
model.schema.discriminators = {};
}
model.schema.discriminators[name] = schema;
if (model.discriminators[name]) {
throw new Error('Discriminator with name "' + name + '" already exists');
}
return schema;
};

12
node_modules/mongoose/lib/helpers/once.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
'use strict';
module.exports = function once(fn) {
let called = false;
return function() {
if (called) {
return;
}
called = true;
return fn.apply(null, arguments);
};
};

55
node_modules/mongoose/lib/helpers/parallelLimit.js generated vendored Normal file
View File

@@ -0,0 +1,55 @@
'use strict';
module.exports = parallelLimit;
/*!
* ignore
*/
function parallelLimit(fns, limit, callback) {
let numInProgress = 0;
let numFinished = 0;
let error = null;
if (limit <= 0) {
throw new Error('Limit must be positive');
}
if (fns.length === 0) {
return callback(null, []);
}
for (let i = 0; i < fns.length && i < limit; ++i) {
_start();
}
function _start() {
fns[numFinished + numInProgress](_done(numFinished + numInProgress));
++numInProgress;
}
const results = [];
function _done(index) {
return (err, res) => {
--numInProgress;
++numFinished;
if (error != null) {
return;
}
if (err != null) {
error = err;
return callback(error);
}
results[index] = res;
if (numFinished === fns.length) {
return callback(null, results);
} else if (numFinished + numInProgress < fns.length) {
_start();
}
};
}
}

13
node_modules/mongoose/lib/helpers/path/parentPaths.js generated vendored Normal file
View File

@@ -0,0 +1,13 @@
'use strict';
module.exports = function parentPaths(path) {
const pieces = path.split('.');
let cur = '';
const ret = [];
for (let i = 0; i < pieces.length; ++i) {
cur += (cur.length > 0 ? '.' : '') + pieces[i];
ret.push(cur);
}
return ret;
};

View File

@@ -0,0 +1,16 @@
'use strict';
module.exports = function setDottedPath(obj, path, val) {
const parts = path.split('.');
let cur = obj;
for (const part of parts.slice(0, -1)) {
if (cur[part] == null) {
cur[part] = {};
}
cur = cur[part];
}
const last = parts[parts.length - 1];
cur[last] = val;
};

View File

@@ -0,0 +1,10 @@
'use strict';
module.exports = function SkipPopulateValue(val) {
if (!(this instanceof SkipPopulateValue)) {
return new SkipPopulateValue(val);
}
this.val = val;
return this;
};

View File

@@ -0,0 +1,98 @@
'use strict';
const leanPopulateMap = require('./leanPopulateMap');
const modelSymbol = require('../symbols').modelSymbol;
const utils = require('../../utils');
module.exports = assignRawDocsToIdStructure;
/*!
* Assign `vals` returned by mongo query to the `rawIds`
* structure returned from utils.getVals() honoring
* query sort order if specified by user.
*
* This can be optimized.
*
* Rules:
*
* if the value of the path is not an array, use findOne rules, else find.
* for findOne the results are assigned directly to doc path (including null results).
* for find, if user specified sort order, results are assigned directly
* else documents are put back in original order of array if found in results
*
* @param {Array} rawIds
* @param {Array} vals
* @param {Boolean} sort
* @api private
*/
function assignRawDocsToIdStructure(rawIds, resultDocs, resultOrder, options, recursed) {
// honor user specified sort order
const newOrder = [];
const sorting = options.sort && rawIds.length > 1;
const nullIfNotFound = options.$nullIfNotFound;
let doc;
let sid;
let id;
for (let i = 0; i < rawIds.length; ++i) {
id = rawIds[i];
if (Array.isArray(id)) {
// handle [ [id0, id2], [id3] ]
assignRawDocsToIdStructure(id, resultDocs, resultOrder, options, true);
newOrder.push(id);
continue;
}
if (id === null && !sorting) {
// keep nulls for findOne unless sorting, which always
// removes them (backward compat)
newOrder.push(id);
continue;
}
sid = String(id);
doc = resultDocs[sid];
// If user wants separate copies of same doc, use this option
if (options.clone && doc != null) {
if (options.lean) {
const _model = leanPopulateMap.get(doc);
doc = utils.clone(doc);
leanPopulateMap.set(doc, _model);
} else {
doc = doc.constructor.hydrate(doc._doc);
}
}
if (recursed) {
if (doc) {
if (sorting) {
newOrder[resultOrder[sid]] = doc;
} else {
newOrder.push(doc);
}
} else if (id != null && id[modelSymbol] != null) {
newOrder.push(id);
} else {
newOrder.push(options.retainNullValues || nullIfNotFound ? null : id);
}
} else {
// apply findOne behavior - if document in results, assign, else assign null
newOrder[i] = doc || null;
}
}
rawIds.length = 0;
if (newOrder.length) {
// reassign the documents based on corrected order
// forEach skips over sparse entries in arrays so we
// can safely use this to our advantage dealing with sorted
// result sets too.
newOrder.forEach(function(doc, i) {
rawIds[i] = doc;
});
}
}

View File

@@ -0,0 +1,285 @@
'use strict';
const SkipPopulateValue = require('./SkipPopulateValue');
const assignRawDocsToIdStructure = require('./assignRawDocsToIdStructure');
const get = require('../get');
const getVirtual = require('./getVirtual');
const leanPopulateMap = require('./leanPopulateMap');
const lookupLocalFields = require('./lookupLocalFields');
const mpath = require('mpath');
const sift = require('sift').default;
const utils = require('../../utils');
module.exports = function assignVals(o) {
// Options that aren't explicitly listed in `populateOptions`
const userOptions = Object.assign({}, get(o, 'allOptions.options.options'), get(o, 'allOptions.options'));
// `o.options` contains options explicitly listed in `populateOptions`, like
// `match` and `limit`.
const populateOptions = Object.assign({}, o.options, userOptions, {
justOne: o.justOne
});
populateOptions.$nullIfNotFound = o.isVirtual;
const populatedModel = o.populatedModel;
const originalIds = [].concat(o.rawIds);
// replace the original ids in our intermediate _ids structure
// with the documents found by query
o.allIds = [].concat(o.allIds);
assignRawDocsToIdStructure(o.rawIds, o.rawDocs, o.rawOrder, populateOptions);
// now update the original documents being populated using the
// result structure that contains real documents.
const docs = o.docs;
const rawIds = o.rawIds;
const options = o.options;
const count = o.count && o.isVirtual;
let i;
function setValue(val) {
if (count) {
return val;
}
if (val instanceof SkipPopulateValue) {
return val.val;
}
const _allIds = o.allIds[i];
if (o.justOne === true && Array.isArray(val)) {
// Might be an embedded discriminator (re: gh-9244) with multiple models, so make sure to pick the right
// model before assigning.
const ret = [];
for (const doc of val) {
const _docPopulatedModel = leanPopulateMap.get(doc);
if (_docPopulatedModel == null || _docPopulatedModel === populatedModel) {
ret.push(doc);
}
}
// Since we don't want to have to create a new mongoosearray, make sure to
// modify the array in place
while (val.length > ret.length) {
Array.prototype.pop.apply(val, []);
}
for (let i = 0; i < ret.length; ++i) {
val[i] = ret[i];
}
return valueFilter(val[0], options, populateOptions, _allIds);
} else if (o.justOne === false && !Array.isArray(val)) {
return valueFilter([val], options, populateOptions, _allIds);
}
return valueFilter(val, options, populateOptions, _allIds);
}
for (i = 0; i < docs.length; ++i) {
const _path = o.path.endsWith('.$*') ? o.path.slice(0, -3) : o.path;
const existingVal = mpath.get(_path, docs[i], lookupLocalFields);
if (existingVal == null && !getVirtual(o.originalModel.schema, _path)) {
continue;
}
let valueToSet;
if (count) {
valueToSet = numDocs(rawIds[i]);
} else if (Array.isArray(o.match)) {
valueToSet = Array.isArray(rawIds[i]) ?
rawIds[i].filter(sift(o.match[i])) :
[rawIds[i]].filter(sift(o.match[i]))[0];
} else {
valueToSet = rawIds[i];
}
// If we're populating a map, the existing value will be an object, so
// we need to transform again
const originalSchema = o.originalModel.schema;
const isDoc = get(docs[i], '$__', null) != null;
let isMap = isDoc ?
existingVal instanceof Map :
utils.isPOJO(existingVal);
// If we pass the first check, also make sure the local field's schematype
// is map (re: gh-6460)
isMap = isMap && get(originalSchema._getSchema(_path), '$isSchemaMap');
if (!o.isVirtual && isMap) {
const _keys = existingVal instanceof Map ?
Array.from(existingVal.keys()) :
Object.keys(existingVal);
valueToSet = valueToSet.reduce((cur, v, i) => {
cur.set(_keys[i], v);
return cur;
}, new Map());
}
if (isDoc && Array.isArray(valueToSet)) {
for (const val of valueToSet) {
if (val != null && val.$__ != null) {
val.$__.parent = docs[i];
}
}
} else if (isDoc && valueToSet != null && valueToSet.$__ != null) {
valueToSet.$__.parent = docs[i];
}
if (o.isVirtual && isDoc) {
docs[i].populated(_path, o.justOne ? originalIds[0] : originalIds, o.allOptions);
// If virtual populate and doc is already init-ed, need to walk through
// the actual doc to set rather than setting `_doc` directly
mpath.set(_path, valueToSet, docs[i], setValue);
continue;
}
const parts = _path.split('.');
let cur = docs[i];
const curPath = parts[0];
for (let j = 0; j < parts.length - 1; ++j) {
// If we get to an array with a dotted path, like `arr.foo`, don't set
// `foo` on the array.
if (Array.isArray(cur) && !utils.isArrayIndex(parts[j])) {
break;
}
if (parts[j] === '$*') {
break;
}
if (cur[parts[j]] == null) {
// If nothing to set, avoid creating an unnecessary array. Otherwise
// we'll end up with a single doc in the array with only defaults.
// See gh-8342, gh-8455
const schematype = originalSchema._getSchema(curPath);
if (valueToSet == null && schematype != null && schematype.$isMongooseArray) {
break;
}
cur[parts[j]] = {};
}
cur = cur[parts[j]];
// If the property in MongoDB is a primitive, we won't be able to populate
// the nested path, so skip it. See gh-7545
if (typeof cur !== 'object') {
break;
}
}
if (docs[i].$__) {
docs[i].populated(_path, o.allIds[i], o.allOptions);
}
// If lean, need to check that each individual virtual respects
// `justOne`, because you may have a populated virtual with `justOne`
// underneath an array. See gh-6867
mpath.set(_path, valueToSet, docs[i], lookupLocalFields, setValue, false);
}
};
function numDocs(v) {
if (Array.isArray(v)) {
// If setting underneath an array of populated subdocs, we may have an
// array of arrays. See gh-7573
if (v.some(el => Array.isArray(el))) {
return v.map(el => numDocs(el));
}
return v.length;
}
return v == null ? 0 : 1;
}
/*!
* 1) Apply backwards compatible find/findOne behavior to sub documents
*
* find logic:
* a) filter out non-documents
* b) remove _id from sub docs when user specified
*
* findOne
* a) if no doc found, set to null
* b) remove _id from sub docs when user specified
*
* 2) Remove _ids when specified by users query.
*
* background:
* _ids are left in the query even when user excludes them so
* that population mapping can occur.
*/
function valueFilter(val, assignmentOpts, populateOptions, allIds) {
const userSpecifiedTransform = typeof populateOptions.transform === 'function';
const transform = userSpecifiedTransform ? populateOptions.transform : noop;
if (Array.isArray(val)) {
// find logic
const ret = [];
const numValues = val.length;
for (let i = 0; i < numValues; ++i) {
let subdoc = val[i];
const _allIds = Array.isArray(allIds) ? allIds[i] : allIds;
if (!isPopulatedObject(subdoc) && (!populateOptions.retainNullValues || subdoc != null) && !userSpecifiedTransform) {
continue;
} else if (userSpecifiedTransform) {
subdoc = transform(isPopulatedObject(subdoc) ? subdoc : null, _allIds);
}
maybeRemoveId(subdoc, assignmentOpts);
ret.push(subdoc);
if (assignmentOpts.originalLimit &&
ret.length >= assignmentOpts.originalLimit) {
break;
}
}
// Since we don't want to have to create a new mongoosearray, make sure to
// modify the array in place
while (val.length > ret.length) {
Array.prototype.pop.apply(val, []);
}
for (let i = 0; i < ret.length; ++i) {
val[i] = ret[i];
}
return val;
}
// findOne
if (isPopulatedObject(val) || utils.isPOJO(val)) {
maybeRemoveId(val, assignmentOpts);
return transform(val, allIds);
}
if (val instanceof Map) {
return val;
}
if (populateOptions.justOne === false) {
return [];
}
return val == null ? transform(val, allIds) : transform(null, allIds);
}
/*!
* Remove _id from `subdoc` if user specified "lean" query option
*/
function maybeRemoveId(subdoc, assignmentOpts) {
if (subdoc != null && assignmentOpts.excludeId) {
if (typeof subdoc.$__setValue === 'function') {
delete subdoc._doc._id;
} else {
delete subdoc._id;
}
}
}
/*!
* Determine if `obj` is something we can set a populated path to. Can be a
* document, a lean document, or an array/map that contains docs.
*/
function isPopulatedObject(obj) {
if (obj == null) {
return false;
}
return Array.isArray(obj) ||
obj.$isMongooseMap ||
obj.$__ != null ||
leanPopulateMap.has(obj);
}
function noop(v) {
return v;
}

View File

@@ -0,0 +1,79 @@
'use strict';
const SkipPopulateValue = require('./SkipPopulateValue');
const parentPaths = require('../path/parentPaths');
module.exports = function createPopulateQueryFilter(ids, _match, _foreignField, model, skipInvalidIds) {
const match = _formatMatch(_match);
if (_foreignField.size === 1) {
const foreignField = Array.from(_foreignField)[0];
const foreignSchemaType = model.schema.path(foreignField);
if (foreignField !== '_id' || !match['_id']) {
ids = _filterInvalidIds(ids, foreignSchemaType, skipInvalidIds);
match[foreignField] = { $in: ids };
}
const _parentPaths = parentPaths(foreignField);
for (let i = 0; i < _parentPaths.length - 1; ++i) {
const cur = _parentPaths[i];
if (match[cur] != null && match[cur].$elemMatch != null) {
match[cur].$elemMatch[foreignField.slice(cur.length + 1)] = { $in: ids };
delete match[foreignField];
break;
}
}
} else {
const $or = [];
if (Array.isArray(match.$or)) {
match.$and = [{ $or: match.$or }, { $or: $or }];
delete match.$or;
} else {
match.$or = $or;
}
for (const foreignField of _foreignField) {
if (foreignField !== '_id' || !match['_id']) {
const foreignSchemaType = model.schema.path(foreignField);
ids = _filterInvalidIds(ids, foreignSchemaType, skipInvalidIds);
$or.push({ [foreignField]: { $in: ids } });
}
}
}
return match;
};
/*!
* Optionally filter out invalid ids that don't conform to foreign field's schema
* to avoid cast errors (gh-7706)
*/
function _filterInvalidIds(ids, foreignSchemaType, skipInvalidIds) {
ids = ids.filter(v => !(v instanceof SkipPopulateValue));
if (!skipInvalidIds) {
return ids;
}
return ids.filter(id => {
try {
foreignSchemaType.cast(id);
return true;
} catch (err) {
return false;
}
});
}
/*!
* Format `mod.match` given that it may be an array that we need to $or if
* the client has multiple docs with match functions
*/
function _formatMatch(match) {
if (Array.isArray(match)) {
if (match.length > 1) {
return { $or: [].concat(match.map(m => Object.assign({}, m))) };
}
return Object.assign({}, match[0]);
}
return Object.assign({}, match);
}

View File

@@ -0,0 +1,541 @@
'use strict';
const MongooseError = require('../../error/index');
const SkipPopulateValue = require('./SkipPopulateValue');
const get = require('../get');
const getDiscriminatorByValue = require('../discriminator/getDiscriminatorByValue');
const isPathExcluded = require('../projection/isPathExcluded');
const getSchemaTypes = require('./getSchemaTypes');
const getVirtual = require('./getVirtual');
const lookupLocalFields = require('./lookupLocalFields');
const mpath = require('mpath');
const normalizeRefPath = require('./normalizeRefPath');
const util = require('util');
const utils = require('../../utils');
const modelSymbol = require('../symbols').modelSymbol;
const populateModelSymbol = require('../symbols').populateModelSymbol;
const schemaMixedSymbol = require('../../schema/symbols').schemaMixedSymbol;
module.exports = function getModelsMapForPopulate(model, docs, options) {
let i;
let doc;
const len = docs.length;
const available = {};
const map = [];
const modelNameFromQuery = options.model && options.model.modelName || options.model;
let schema;
let refPath;
let Model;
let currentOptions;
let modelNames;
let modelName;
const originalModel = options.model;
let isVirtual = false;
const modelSchema = model.schema;
let allSchemaTypes = getSchemaTypes(modelSchema, null, options.path);
allSchemaTypes = Array.isArray(allSchemaTypes) ? allSchemaTypes : [allSchemaTypes].filter(v => v != null);
const _firstWithRefPath = allSchemaTypes.find(schematype => get(schematype, 'options.refPath', null) != null);
for (i = 0; i < len; i++) {
doc = docs[i];
let justOne = null;
schema = getSchemaTypes(modelSchema, doc, options.path);
// Special case: populating a path that's a DocumentArray unless
// there's an explicit `ref` or `refPath` re: gh-8946
if (schema != null &&
schema.$isMongooseDocumentArray &&
schema.options.ref == null &&
schema.options.refPath == null) {
continue;
}
// Populating a nested path should always be a no-op re: #9073.
// People shouldn't do this, but apparently they do.
if (options._localModel != null && options._localModel.schema.nested[options.path]) {
continue;
}
const isUnderneathDocArray = schema && schema.$isUnderneathDocArray;
if (isUnderneathDocArray && get(options, 'options.sort') != null) {
return new MongooseError('Cannot populate with `sort` on path ' + options.path +
' because it is a subproperty of a document array');
}
modelNames = null;
let isRefPath = !!_firstWithRefPath;
let normalizedRefPath = _firstWithRefPath ? get(_firstWithRefPath, 'options.refPath', null) : null;
let schemaOptions = null;
if (Array.isArray(schema)) {
const schemasArray = schema;
for (const _schema of schemasArray) {
let _modelNames;
let res;
try {
res = _getModelNames(doc, _schema);
_modelNames = res.modelNames;
isRefPath = isRefPath || res.isRefPath;
normalizedRefPath = normalizeRefPath(normalizedRefPath, doc, options.path) ||
res.refPath;
justOne = res.justOne;
} catch (error) {
return error;
}
if (isRefPath && !res.isRefPath) {
continue;
}
if (!_modelNames) {
continue;
}
modelNames = modelNames || [];
for (const modelName of _modelNames) {
if (modelNames.indexOf(modelName) === -1) {
modelNames.push(modelName);
}
}
}
} else {
try {
const res = _getModelNames(doc, schema);
modelNames = res.modelNames;
isRefPath = res.isRefPath;
normalizedRefPath = res.refPath;
justOne = res.justOne;
schemaOptions = get(schema, 'options.populate', null);
} catch (error) {
return error;
}
if (!modelNames) {
continue;
}
}
const _virtualRes = getVirtual(model.schema, options.path);
const virtual = _virtualRes == null ? null : _virtualRes.virtual;
let localField;
let count = false;
if (virtual && virtual.options) {
const virtualPrefix = _virtualRes.nestedSchemaPath ?
_virtualRes.nestedSchemaPath + '.' : '';
if (typeof virtual.options.localField === 'function') {
localField = virtualPrefix + virtual.options.localField.call(doc, doc);
} else if (Array.isArray(virtual.options.localField)) {
localField = virtual.options.localField.map(field => virtualPrefix + field);
} else {
localField = virtualPrefix + virtual.options.localField;
}
count = virtual.options.count;
if (virtual.options.skip != null && !options.hasOwnProperty('skip')) {
options.skip = virtual.options.skip;
}
if (virtual.options.limit != null && !options.hasOwnProperty('limit')) {
options.limit = virtual.options.limit;
}
if (virtual.options.perDocumentLimit != null && !options.hasOwnProperty('perDocumentLimit')) {
options.perDocumentLimit = virtual.options.perDocumentLimit;
}
} else {
localField = options.path;
}
let foreignField = virtual && virtual.options ?
virtual.options.foreignField :
'_id';
// `justOne = null` means we don't know from the schema whether the end
// result should be an array or a single doc. This can result from
// populating a POJO using `Model.populate()`
if ('justOne' in options && options.justOne !== void 0) {
justOne = options.justOne;
} else if (virtual && virtual.options && virtual.options.refPath) {
const normalizedRefPath =
normalizeRefPath(virtual.options.refPath, doc, options.path);
justOne = !!virtual.options.justOne;
isVirtual = true;
const refValue = utils.getValue(normalizedRefPath, doc);
modelNames = Array.isArray(refValue) ? refValue : [refValue];
} else if (virtual && virtual.options && virtual.options.ref) {
let normalizedRef;
if (typeof virtual.options.ref === 'function') {
normalizedRef = virtual.options.ref.call(doc, doc);
} else {
normalizedRef = virtual.options.ref;
}
justOne = !!virtual.options.justOne;
isVirtual = true;
if (!modelNames) {
modelNames = [].concat(normalizedRef);
}
} else if (schema && !schema[schemaMixedSymbol]) {
// Skip Mixed types because we explicitly don't do casting on those.
if (options.path.endsWith('.' + schema.path)) {
justOne = Array.isArray(schema) ?
schema.every(schema => !schema.$isMongooseArray) :
!schema.$isMongooseArray;
}
}
if (!modelNames) {
continue;
}
if (virtual && (!localField || !foreignField)) {
return new MongooseError('If you are populating a virtual, you must set the ' +
'localField and foreignField options');
}
options.isVirtual = isVirtual;
options.virtual = virtual;
if (typeof localField === 'function') {
localField = localField.call(doc, doc);
}
if (typeof foreignField === 'function') {
foreignField = foreignField.call(doc);
}
let match = get(options, 'match', null) ||
get(currentOptions, 'match', null) ||
get(options, 'virtual.options.match', null) ||
get(options, 'virtual.options.options.match', null);
let hasMatchFunction = typeof match === 'function';
if (hasMatchFunction) {
match = match.call(doc, doc);
}
if (Array.isArray(localField) && Array.isArray(foreignField) && localField.length === foreignField.length) {
match = Object.assign({}, match);
for (let i = 1; i < localField.length; ++i) {
match[foreignField[i]] = convertTo_id(mpath.get(localField[i], doc, lookupLocalFields), schema);
hasMatchFunction = true;
}
localField = localField[0];
foreignField = foreignField[0];
}
const localFieldPathType = modelSchema._getPathType(localField);
const localFieldPath = localFieldPathType === 'real' ? modelSchema.path(localField) : localFieldPathType.schema;
const localFieldGetters = localFieldPath && localFieldPath.getters ? localFieldPath.getters : [];
let ret;
const _populateOptions = get(options, 'options', {});
const getters = 'getters' in _populateOptions ?
_populateOptions.getters :
options.isVirtual && get(virtual, 'options.getters', false);
if (localFieldGetters.length > 0 && getters) {
const hydratedDoc = (doc.$__ != null) ? doc : model.hydrate(doc);
const localFieldValue = mpath.get(localField, doc, lookupLocalFields);
if (Array.isArray(localFieldValue)) {
const localFieldHydratedValue = mpath.get(localField.split('.').slice(0, -1), hydratedDoc, lookupLocalFields);
ret = localFieldValue.map((localFieldArrVal, localFieldArrIndex) =>
localFieldPath.applyGetters(localFieldArrVal, localFieldHydratedValue[localFieldArrIndex]));
} else {
ret = localFieldPath.applyGetters(localFieldValue, hydratedDoc);
}
} else {
ret = convertTo_id(mpath.get(localField, doc, lookupLocalFields), schema);
}
const id = String(utils.getValue(foreignField, doc));
options._docs[id] = Array.isArray(ret) ? ret.slice() : ret;
// Re: gh-8452. Embedded discriminators may not have `refPath`, so clear
// out embedded discriminator docs that don't have a `refPath` on the
// populated path.
if (isRefPath && normalizedRefPath != null) {
const pieces = normalizedRefPath.split('.');
let cur = '';
for (let j = 0; j < pieces.length; ++j) {
const piece = pieces[j];
cur = cur + (cur.length === 0 ? '' : '.') + piece;
const schematype = modelSchema.path(cur);
if (schematype != null &&
schematype.$isMongooseArray &&
schematype.caster.discriminators != null &&
Object.keys(schematype.caster.discriminators).length > 0) {
const subdocs = utils.getValue(cur, doc);
const remnant = options.path.substr(cur.length + 1);
const discriminatorKey = schematype.caster.schema.options.discriminatorKey;
modelNames = [];
for (const subdoc of subdocs) {
const discriminatorName = utils.getValue(discriminatorKey, subdoc);
const discriminator = schematype.caster.discriminators[discriminatorName];
const discriminatorSchema = discriminator && discriminator.schema;
if (discriminatorSchema == null) {
continue;
}
const _path = discriminatorSchema.path(remnant);
if (_path == null || _path.options.refPath == null) {
const docValue = utils.getValue(localField.substr(cur.length + 1), subdoc);
ret = ret.map(v => v === docValue ? SkipPopulateValue(v) : v);
continue;
}
const modelName = utils.getValue(pieces.slice(j + 1).join('.'), subdoc);
modelNames.push(modelName);
}
}
}
}
let k = modelNames.length;
while (k--) {
modelName = modelNames[k];
if (modelName == null) {
continue;
}
// `PopulateOptions#connection`: if the model is passed as a string, the
// connection matters because different connections have different models.
const connection = options.connection != null ? options.connection : model.db;
try {
Model = originalModel && originalModel[modelSymbol] ?
originalModel :
modelName[modelSymbol] ? modelName : connection.model(modelName);
} catch (error) {
// If `ret` is undefined, we'll add an empty entry to modelsMap. We shouldn't
// execute a query, but it is necessary to make sure `justOne` gets handled
// correctly for setting an empty array (see gh-8455)
if (ret !== undefined) {
return error;
}
}
let ids = ret;
const flat = Array.isArray(ret) ? utils.array.flatten(ret) : [];
if (isRefPath && Array.isArray(ret) && flat.length === modelNames.length) {
ids = flat.filter((val, i) => modelNames[i] === modelName);
}
if (!available[modelName] || currentOptions.perDocumentLimit != null || get(currentOptions, 'options.perDocumentLimit') != null) {
currentOptions = {
model: Model
};
if (isVirtual && get(virtual, 'options.options')) {
currentOptions.options = utils.clone(virtual.options.options);
} else if (schemaOptions != null) {
currentOptions.options = Object.assign({}, schemaOptions);
}
utils.merge(currentOptions, options);
// Used internally for checking what model was used to populate this
// path.
options[populateModelSymbol] = Model;
available[modelName] = {
model: Model,
options: currentOptions,
match: hasMatchFunction ? [match] : match,
docs: [doc],
ids: [ids],
allIds: [ret],
localField: new Set([localField]),
foreignField: new Set([foreignField]),
justOne: justOne,
isVirtual: isVirtual,
virtual: virtual,
count: count,
[populateModelSymbol]: Model
};
map.push(available[modelName]);
} else {
available[modelName].localField.add(localField);
available[modelName].foreignField.add(foreignField);
available[modelName].docs.push(doc);
available[modelName].ids.push(ids);
available[modelName].allIds.push(ret);
if (hasMatchFunction) {
available[modelName].match.push(match);
}
}
}
}
return map;
function _getModelNames(doc, schema) {
let modelNames;
let discriminatorKey;
let isRefPath = false;
let justOne = null;
if (schema && schema.caster) {
schema = schema.caster;
}
if (schema && schema.$isSchemaMap) {
schema = schema.$__schemaType;
}
if (!schema && model.discriminators) {
discriminatorKey = model.schema.discriminatorMapping.key;
}
refPath = schema && schema.options && schema.options.refPath;
const normalizedRefPath = normalizeRefPath(refPath, doc, options.path);
if (modelNameFromQuery) {
modelNames = [modelNameFromQuery]; // query options
} else if (normalizedRefPath) {
if (options._queryProjection != null && isPathExcluded(options._queryProjection, normalizedRefPath)) {
throw new MongooseError('refPath `' + normalizedRefPath +
'` must not be excluded in projection, got ' +
util.inspect(options._queryProjection));
}
if (modelSchema.virtuals.hasOwnProperty(normalizedRefPath) && doc.$__ == null) {
modelNames = [modelSchema.virtuals[normalizedRefPath].applyGetters(void 0, doc)];
} else {
modelNames = utils.getValue(normalizedRefPath, doc);
}
if (Array.isArray(modelNames)) {
modelNames = utils.array.flatten(modelNames);
}
isRefPath = true;
} else {
let modelForCurrentDoc = model;
let schemaForCurrentDoc;
let discriminatorValue;
if (!schema && discriminatorKey && (discriminatorValue = utils.getValue(discriminatorKey, doc))) {
// `modelNameForFind` is the discriminator value, so we might need
// find the discriminated model name
const discriminatorModel = getDiscriminatorByValue(model.discriminators, discriminatorValue) || model;
if (discriminatorModel != null) {
modelForCurrentDoc = discriminatorModel;
} else {
try {
modelForCurrentDoc = model.db.model(discriminatorValue);
} catch (error) {
return error;
}
}
schemaForCurrentDoc = modelForCurrentDoc.schema._getSchema(options.path);
if (schemaForCurrentDoc && schemaForCurrentDoc.caster) {
schemaForCurrentDoc = schemaForCurrentDoc.caster;
}
} else {
schemaForCurrentDoc = schema;
}
const _virtualRes = getVirtual(modelForCurrentDoc.schema, options.path);
const virtual = _virtualRes == null ? null : _virtualRes.virtual;
if (schemaForCurrentDoc != null) {
justOne = !schemaForCurrentDoc.$isMongooseArray && !schemaForCurrentDoc._arrayPath;
}
let ref;
let refPath;
if ((ref = get(schemaForCurrentDoc, 'options.ref')) != null) {
ref = handleRefFunction(ref, doc);
modelNames = [ref];
} else if ((ref = get(virtual, 'options.ref')) != null) {
ref = handleRefFunction(ref, doc);
// When referencing nested arrays, the ref should be an Array
// of modelNames.
if (Array.isArray(ref)) {
modelNames = ref;
} else {
modelNames = [ref];
}
isVirtual = true;
} else if ((refPath = get(schemaForCurrentDoc, 'options.refPath')) != null) {
isRefPath = true;
refPath = normalizeRefPath(refPath, doc, options.path);
modelNames = utils.getValue(refPath, doc);
if (Array.isArray(modelNames)) {
modelNames = utils.array.flatten(modelNames);
}
} else {
// We may have a discriminator, in which case we don't want to
// populate using the base model by default
modelNames = discriminatorKey ? null : [model.modelName];
}
}
if (!modelNames) {
return { modelNames: modelNames, isRefPath: isRefPath, refPath: normalizedRefPath, justOne: justOne };
}
if (!Array.isArray(modelNames)) {
modelNames = [modelNames];
}
return { modelNames: modelNames, isRefPath: isRefPath, refPath: normalizedRefPath, justOne: justOne };
}
};
/*!
* ignore
*/
function handleRefFunction(ref, doc) {
if (typeof ref === 'function' && !ref[modelSymbol]) {
return ref.call(doc, doc);
}
return ref;
}
/*!
* Retrieve the _id of `val` if a Document or Array of Documents.
*
* @param {Array|Document|Any} val
* @return {Array|Document|Any}
*/
function convertTo_id(val, schema) {
if (val != null && val.$__ != null) {
return val._id;
}
if (val != null && val._id != null && (schema == null || !schema.$isSchemaMap)) {
return val._id;
}
if (Array.isArray(val)) {
for (let i = 0; i < val.length; ++i) {
if (val[i] != null && val[i].$__ != null) {
val[i] = val[i]._id;
}
}
if (val.isMongooseArray && val.$schema()) {
return val.$schema()._castForPopulate(val, val.$parent());
}
return [].concat(val);
}
// `populate('map')` may be an object if populating on a doc that hasn't
// been hydrated yet
if (val != null &&
val.constructor.name === 'Object' &&
// The intent here is we should only flatten the object if we expect
// to get a Map in the end. Avoid doing this for mixed types.
(schema == null || schema[schemaMixedSymbol] == null)) {
const ret = [];
for (const key of Object.keys(val)) {
ret.push(val[key]);
}
return ret;
}
// If doc has already been hydrated, e.g. `doc.populate('map').execPopulate()`
// then `val` will already be a map
if (val instanceof Map) {
return Array.from(val.values());
}
return val;
}

View File

@@ -0,0 +1,209 @@
'use strict';
/*!
* ignore
*/
const Mixed = require('../../schema/mixed');
const get = require('../get');
const leanPopulateMap = require('./leanPopulateMap');
const mpath = require('mpath');
const populateModelSymbol = require('../symbols').populateModelSymbol;
/*!
* @param {Schema} schema
* @param {Object} doc POJO
* @param {string} path
*/
module.exports = function getSchemaTypes(schema, doc, path) {
const pathschema = schema.path(path);
const topLevelDoc = doc;
if (pathschema) {
return pathschema;
}
function search(parts, schema, subdoc, nestedPath) {
let p = parts.length + 1;
let foundschema;
let trypath;
while (p--) {
trypath = parts.slice(0, p).join('.');
foundschema = schema.path(trypath);
if (foundschema == null) {
continue;
}
if (foundschema.caster) {
// array of Mixed?
if (foundschema.caster instanceof Mixed) {
return foundschema.caster;
}
let schemas = null;
if (foundschema.schema != null && foundschema.schema.discriminators != null) {
const discriminators = foundschema.schema.discriminators;
const discriminatorKeyPath = trypath + '.' +
foundschema.schema.options.discriminatorKey;
const keys = subdoc ? mpath.get(discriminatorKeyPath, subdoc) || [] : [];
schemas = Object.keys(discriminators).
reduce(function(cur, discriminator) {
const tiedValue = discriminators[discriminator].discriminatorMapping.value;
if (doc == null || keys.indexOf(discriminator) !== -1 || keys.indexOf(tiedValue) !== -1) {
cur.push(discriminators[discriminator]);
}
return cur;
}, []);
}
// Now that we found the array, we need to check if there
// are remaining document paths to look up for casting.
// Also we need to handle array.$.path since schema.path
// doesn't work for that.
// If there is no foundschema.schema we are dealing with
// a path like array.$
if (p !== parts.length && foundschema.schema) {
let ret;
if (parts[p] === '$') {
if (p + 1 === parts.length) {
// comments.$
return foundschema;
}
// comments.$.comments.$.title
ret = search(
parts.slice(p + 1),
schema,
subdoc ? mpath.get(trypath, subdoc) : null,
nestedPath.concat(parts.slice(0, p))
);
if (ret) {
ret.$isUnderneathDocArray = ret.$isUnderneathDocArray ||
!foundschema.schema.$isSingleNested;
}
return ret;
}
if (schemas != null && schemas.length > 0) {
ret = [];
for (const schema of schemas) {
const _ret = search(
parts.slice(p),
schema,
subdoc ? mpath.get(trypath, subdoc) : null,
nestedPath.concat(parts.slice(0, p))
);
if (_ret != null) {
_ret.$isUnderneathDocArray = _ret.$isUnderneathDocArray ||
!foundschema.schema.$isSingleNested;
if (_ret.$isUnderneathDocArray) {
ret.$isUnderneathDocArray = true;
}
ret.push(_ret);
}
}
return ret;
} else {
ret = search(
parts.slice(p),
foundschema.schema,
subdoc ? mpath.get(trypath, subdoc) : null,
nestedPath.concat(parts.slice(0, p))
);
if (ret) {
ret.$isUnderneathDocArray = ret.$isUnderneathDocArray ||
!foundschema.schema.$isSingleNested;
}
return ret;
}
} else if (p !== parts.length &&
foundschema.$isMongooseArray &&
foundschema.casterConstructor.$isMongooseArray) {
// Nested arrays. Drill down to the bottom of the nested array.
let type = foundschema;
while (type.$isMongooseArray && !type.$isMongooseDocumentArray) {
type = type.casterConstructor;
}
const ret = search(
parts.slice(p),
type.schema,
null,
nestedPath.concat(parts.slice(0, p))
);
if (ret != null) {
return ret;
}
if (type.schema.discriminators) {
const discriminatorPaths = [];
for (const discriminatorName of Object.keys(type.schema.discriminators)) {
const _schema = type.schema.discriminators[discriminatorName] || type.schema;
const ret = search(parts.slice(p), _schema, null, nestedPath.concat(parts.slice(0, p)));
if (ret != null) {
discriminatorPaths.push(ret);
}
}
if (discriminatorPaths.length > 0) {
return discriminatorPaths;
}
}
}
}
const fullPath = nestedPath.concat([trypath]).join('.');
if (topLevelDoc != null && topLevelDoc.$__ && topLevelDoc.populated(fullPath) && p < parts.length) {
const model = doc.$__.populated[fullPath].options[populateModelSymbol];
if (model != null) {
const ret = search(
parts.slice(p),
model.schema,
subdoc ? mpath.get(trypath, subdoc) : null,
nestedPath.concat(parts.slice(0, p))
);
if (ret) {
ret.$isUnderneathDocArray = ret.$isUnderneathDocArray ||
!model.schema.$isSingleNested;
}
return ret;
}
}
const _val = get(topLevelDoc, trypath);
if (_val != null) {
const model = Array.isArray(_val) && _val.length > 0 ?
leanPopulateMap.get(_val[0]) :
leanPopulateMap.get(_val);
// Populated using lean, `leanPopulateMap` value is the foreign model
const schema = model != null ? model.schema : null;
if (schema != null) {
const ret = search(
parts.slice(p),
schema,
subdoc ? mpath.get(trypath, subdoc) : null,
nestedPath.concat(parts.slice(0, p))
);
if (ret != null) {
ret.$isUnderneathDocArray = ret.$isUnderneathDocArray ||
!schema.$isSingleNested;
return ret;
}
}
}
return foundschema;
}
}
// look for arrays
const parts = path.split('.');
for (let i = 0; i < parts.length; ++i) {
if (parts[i] === '$') {
// Re: gh-5628, because `schema.path()` doesn't take $ into account.
parts[i] = '0';
}
}
return search(parts, schema, doc, []);
};

View File

@@ -0,0 +1,72 @@
'use strict';
module.exports = getVirtual;
/*!
* ignore
*/
function getVirtual(schema, name) {
if (schema.virtuals[name]) {
return { virtual: schema.virtuals[name], path: void 0 };
}
const parts = name.split('.');
let cur = '';
let nestedSchemaPath = '';
for (let i = 0; i < parts.length; ++i) {
cur += (cur.length > 0 ? '.' : '') + parts[i];
if (schema.virtuals[cur]) {
if (i === parts.length - 1) {
return { virtual: schema.virtuals[cur], path: nestedSchemaPath };
}
continue;
}
if (schema.nested[cur]) {
continue;
}
if (schema.paths[cur] && schema.paths[cur].schema) {
schema = schema.paths[cur].schema;
const rest = parts.slice(i + 1).join('.');
if (schema.virtuals[rest]) {
if (i === parts.length - 2) {
return {
virtual: schema.virtuals[rest],
nestedSchemaPath: [nestedSchemaPath, cur].filter(v => !!v).join('.')
};
}
continue;
}
if (i + 1 < parts.length && schema.discriminators) {
for (const key of Object.keys(schema.discriminators)) {
const res = getVirtual(schema.discriminators[key], rest);
if (res != null) {
const _path = [nestedSchemaPath, cur, res.nestedSchemaPath].
filter(v => !!v).join('.');
return {
virtual: res.virtual,
nestedSchemaPath: _path
};
}
}
}
nestedSchemaPath += (nestedSchemaPath.length > 0 ? '.' : '') + cur;
cur = '';
continue;
}
if (schema.discriminators) {
for (const discriminatorKey of Object.keys(schema.discriminators)) {
const virtualFromDiscriminator = getVirtual(schema.discriminators[discriminatorKey], name);
if (virtualFromDiscriminator) return virtualFromDiscriminator;
}
}
return null;
}
}

View File

@@ -0,0 +1,7 @@
'use strict';
/*!
* ignore
*/
module.exports = new WeakMap();

View File

@@ -0,0 +1,28 @@
'use strict';
module.exports = function lookupLocalFields(cur, path, val) {
if (cur == null) {
return cur;
}
if (cur._doc != null) {
cur = cur._doc;
}
if (arguments.length >= 3) {
if (typeof cur !== 'object') {
return void 0;
}
cur[path] = val;
return val;
}
// Support populating paths under maps using `map.$*.subpath`
if (path === '$*') {
return cur instanceof Map ?
Array.from(cur.values()) :
Object.keys(cur).map(key => cur[key]);
}
return cur[path];
};

View File

@@ -0,0 +1,45 @@
'use strict';
module.exports = function normalizeRefPath(refPath, doc, populatedPath) {
if (refPath == null) {
return refPath;
}
if (typeof refPath === 'function') {
refPath = refPath.call(doc, doc, populatedPath);
}
// If populated path has numerics, the end `refPath` should too. For example,
// if populating `a.0.b` instead of `a.b` and `b` has `refPath = a.c`, we
// should return `a.0.c` for the refPath.
const hasNumericProp = /(\.\d+$|\.\d+\.)/g;
if (hasNumericProp.test(populatedPath)) {
const chunks = populatedPath.split(hasNumericProp);
if (chunks[chunks.length - 1] === '') {
throw new Error('Can\'t populate individual element in an array');
}
let _refPath = '';
let _remaining = refPath;
// 2nd, 4th, etc. will be numeric props. For example: `[ 'a', '.0.', 'b' ]`
for (let i = 0; i < chunks.length; i += 2) {
const chunk = chunks[i];
if (_remaining.startsWith(chunk + '.')) {
_refPath += _remaining.substr(0, chunk.length) + chunks[i + 1];
_remaining = _remaining.substr(chunk.length + 1);
} else if (i === chunks.length - 1) {
_refPath += _remaining;
_remaining = '';
break;
} else {
throw new Error('Could not normalize ref path, chunk ' + chunk + ' not in populated path');
}
}
return _refPath;
}
return refPath;
};

View File

@@ -0,0 +1,31 @@
'use strict';
const get = require('../get');
const mpath = require('mpath');
const parseProjection = require('../projection/parseProjection');
/*!
* ignore
*/
module.exports = function removeDeselectedForeignField(foreignFields, options, docs) {
const projection = parseProjection(get(options, 'select', null), true) ||
parseProjection(get(options, 'options.select', null), true);
if (projection == null) {
return;
}
for (const foreignField of foreignFields) {
if (!projection.hasOwnProperty('-' + foreignField)) {
continue;
}
for (const val of docs) {
if (val.$__ != null) {
mpath.unset(foreignField, val._doc);
} else {
mpath.unset(foreignField, val);
}
}
}
};

View File

@@ -0,0 +1,19 @@
'use strict';
const MongooseError = require('../../error/mongooseError');
const util = require('util');
module.exports = validateRef;
function validateRef(ref, path) {
if (typeof ref === 'string') {
return;
}
if (typeof ref === 'function') {
return;
}
throw new MongooseError('Invalid ref at path "' + path + '". Got ' +
util.inspect(ref, { depth: 0 }));
}

15
node_modules/mongoose/lib/helpers/printJestWarning.js generated vendored Normal file
View File

@@ -0,0 +1,15 @@
'use strict';
if (typeof jest !== 'undefined' && typeof window !== 'undefined') {
console.warn('Mongoose: looks like you\'re trying to test a Mongoose app ' +
'with Jest\'s default jsdom test environment. Please make sure you read ' +
'Mongoose\'s docs on configuring Jest to test Node.js apps: ' +
'http://mongoosejs.com/docs/jest.html');
}
if (typeof jest !== 'undefined' && process.nextTick.toString().indexOf('nextTick') === -1) {
console.warn('Mongoose: looks like you\'re trying to test a Mongoose app ' +
'with Jest\'s mock timers enabled. Please make sure you read ' +
'Mongoose\'s docs on configuring Jest to test Node.js apps: ' +
'http://mongoosejs.com/docs/jest.html');
}

View File

@@ -0,0 +1,18 @@
'use strict';
/*!
* ignore
*/
module.exports = function isDefiningProjection(val) {
if (val == null) {
// `undefined` or `null` become exclusive projections
return true;
}
if (typeof val === 'object') {
// Only cases where a value does **not** define whether the whole projection
// is inclusive or exclusive are `$meta` and `$slice`.
return !('$meta' in val) && !('$slice' in val);
}
return true;
};

Some files were not shown because too many files have changed in this diff Show More