@@ -1,21 +0,0 @@ | |||
language: node_js | |||
sudo: false | |||
node_js: [10, 9, 8, 7, 6, 5, 4] | |||
install: | |||
- travis_retry npm install | |||
before_script: | |||
- wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-3.6.6.tgz | |||
- tar -zxvf mongodb-linux-x86_64-3.6.6.tgz | |||
- mkdir -p ./data/db/27017 | |||
- mkdir -p ./data/db/27000 | |||
- printf "\n--timeout 8000" >> ./test/mocha.opts | |||
- ./mongodb-linux-x86_64-3.6.6/bin/mongod --fork --dbpath ./data/db/27017 --syslog --port 27017 | |||
- sleep 2 | |||
matrix: | |||
include: | |||
- name: "👕Linter" | |||
node_js: 10 | |||
before_script: skip | |||
script: npm run lint | |||
notifications: | |||
email: false |
@@ -1,8 +0,0 @@ | |||
/** | |||
* Export lib/mongoose | |||
* | |||
*/ | |||
'use strict'; | |||
module.exports = require('./lib/browser'); |
@@ -1,9 +0,0 @@ | |||
/** | |||
* Export lib/mongoose | |||
* | |||
*/ | |||
'use strict'; | |||
module.exports = require('./lib/'); |
@@ -1,133 +0,0 @@ | |||
/* eslint-env browser */ | |||
'use strict'; | |||
require('./driver').set(require('./drivers/browser')); | |||
const DocumentProvider = require('./document_provider.js'); | |||
const PromiseProvider = require('./promise_provider'); | |||
DocumentProvider.setBrowser(true); | |||
/** | |||
* The Mongoose [Promise](#promise_Promise) constructor. | |||
* | |||
* @method Promise | |||
* @api public | |||
*/ | |||
Object.defineProperty(exports, 'Promise', { | |||
get: function() { | |||
return PromiseProvider.get(); | |||
}, | |||
set: function(lib) { | |||
PromiseProvider.set(lib); | |||
} | |||
}); | |||
/** | |||
* Storage layer for mongoose promises | |||
* | |||
* @method PromiseProvider | |||
* @api public | |||
*/ | |||
exports.PromiseProvider = PromiseProvider; | |||
/** | |||
* The [MongooseError](#error_MongooseError) constructor. | |||
* | |||
* @method Error | |||
* @api public | |||
*/ | |||
exports.Error = require('./error'); | |||
/** | |||
* The Mongoose [Schema](#schema_Schema) constructor | |||
* | |||
* ####Example: | |||
* | |||
* var mongoose = require('mongoose'); | |||
* var Schema = mongoose.Schema; | |||
* var CatSchema = new Schema(..); | |||
* | |||
* @method Schema | |||
* @api public | |||
*/ | |||
exports.Schema = require('./schema'); | |||
/** | |||
* The various Mongoose Types. | |||
* | |||
* ####Example: | |||
* | |||
* var mongoose = require('mongoose'); | |||
* var array = mongoose.Types.Array; | |||
* | |||
* ####Types: | |||
* | |||
* - [ObjectId](#types-objectid-js) | |||
* - [Buffer](#types-buffer-js) | |||
* - [SubDocument](#types-embedded-js) | |||
* - [Array](#types-array-js) | |||
* - [DocumentArray](#types-documentarray-js) | |||
* | |||
* Using this exposed access to the `ObjectId` type, we can construct ids on demand. | |||
* | |||
* var ObjectId = mongoose.Types.ObjectId; | |||
* var id1 = new ObjectId; | |||
* | |||
* @property Types | |||
* @api public | |||
*/ | |||
exports.Types = require('./types'); | |||
/** | |||
* The Mongoose [VirtualType](#virtualtype_VirtualType) constructor | |||
* | |||
* @method VirtualType | |||
* @api public | |||
*/ | |||
exports.VirtualType = require('./virtualtype'); | |||
/** | |||
* The various Mongoose SchemaTypes. | |||
* | |||
* ####Note: | |||
* | |||
* _Alias of mongoose.Schema.Types for backwards compatibility._ | |||
* | |||
* @property SchemaTypes | |||
* @see Schema.SchemaTypes #schema_Schema.Types | |||
* @api public | |||
*/ | |||
exports.SchemaType = require('./schematype.js'); | |||
/** | |||
* Internal utils | |||
* | |||
* @property utils | |||
* @api private | |||
*/ | |||
exports.utils = require('./utils.js'); | |||
/** | |||
* The Mongoose browser [Document](#document-js) constructor. | |||
* | |||
* @method Document | |||
* @api public | |||
*/ | |||
exports.Document = DocumentProvider(); | |||
/*! | |||
* Module exports. | |||
*/ | |||
if (typeof window !== 'undefined') { | |||
window.mongoose = module.exports; | |||
window.Buffer = Buffer; | |||
} |
@@ -1,102 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const NodeJSDocument = require('./document'); | |||
const EventEmitter = require('events').EventEmitter; | |||
const MongooseError = require('./error'); | |||
const Schema = require('./schema'); | |||
const ObjectId = require('./types/objectid'); | |||
const ValidationError = MongooseError.ValidationError; | |||
const applyHooks = require('./helpers/model/applyHooks'); | |||
const utils = require('./utils'); | |||
/** | |||
* Document constructor. | |||
* | |||
* @param {Object} obj the values to set | |||
* @param {Object} [fields] optional object containing the fields which were selected in the query returning this document and any populated paths data | |||
* @param {Boolean} [skipId] bool, should we auto create an ObjectId _id | |||
* @inherits NodeJS EventEmitter http://nodejs.org/api/events.html#events_class_events_eventemitter | |||
* @event `init`: Emitted on a document after it has was retrieved from the db and fully hydrated by Mongoose. | |||
* @event `save`: Emitted when the document is successfully saved | |||
* @api private | |||
*/ | |||
function Document(obj, schema, fields, skipId, skipInit) { | |||
if (!(this instanceof Document)) { | |||
return new Document(obj, schema, fields, skipId, skipInit); | |||
} | |||
if (utils.isObject(schema) && !schema.instanceOfSchema) { | |||
schema = new Schema(schema); | |||
} | |||
// When creating EmbeddedDocument, it already has the schema and he doesn't need the _id | |||
schema = this.schema || schema; | |||
// Generate ObjectId if it is missing, but it requires a scheme | |||
if (!this.schema && schema.options._id) { | |||
obj = obj || {}; | |||
if (obj._id === undefined) { | |||
obj._id = new ObjectId(); | |||
} | |||
} | |||
if (!schema) { | |||
throw new MongooseError.MissingSchemaError(); | |||
} | |||
this.$__setSchema(schema); | |||
NodeJSDocument.call(this, obj, fields, skipId, skipInit); | |||
applyHooks(this, schema, { decorateDoc: true }); | |||
// apply methods | |||
for (const m in schema.methods) { | |||
this[m] = schema.methods[m]; | |||
} | |||
// apply statics | |||
for (const s in schema.statics) { | |||
this[s] = schema.statics[s]; | |||
} | |||
} | |||
/*! | |||
* Inherit from the NodeJS document | |||
*/ | |||
Document.prototype = Object.create(NodeJSDocument.prototype); | |||
Document.prototype.constructor = Document; | |||
/*! | |||
* ignore | |||
*/ | |||
Document.events = new EventEmitter(); | |||
/*! | |||
* Browser doc exposes the event emitter API | |||
*/ | |||
Document.$emitter = new EventEmitter(); | |||
utils.each( | |||
['on', 'once', 'emit', 'listeners', 'removeListener', 'setMaxListeners', | |||
'removeAllListeners', 'addListener'], | |||
function(emitterFn) { | |||
Document[emitterFn] = function() { | |||
return Document.$emitter[emitterFn].apply(Document.$emitter, arguments); | |||
}; | |||
}); | |||
/*! | |||
* Module exports. | |||
*/ | |||
Document.ValidationError = ValidationError; | |||
module.exports = exports = Document; |
@@ -1,340 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
const StrictModeError = require('./error/strict'); | |||
const Types = require('./schema/index'); | |||
const castTextSearch = require('./schema/operators/text'); | |||
const get = require('./helpers/get'); | |||
const util = require('util'); | |||
const utils = require('./utils'); | |||
const ALLOWED_GEOWITHIN_GEOJSON_TYPES = ['Polygon', 'MultiPolygon']; | |||
/** | |||
* Handles internal casting for query filters. | |||
* | |||
* @param {Schema} schema | |||
* @param {Object} obj Object to cast | |||
* @param {Object} options the query options | |||
* @param {Query} context passed to setters | |||
* @api private | |||
*/ | |||
module.exports = function cast(schema, obj, options, context) { | |||
if (Array.isArray(obj)) { | |||
throw new Error('Query filter must be an object, got an array ', util.inspect(obj)); | |||
} | |||
const paths = Object.keys(obj); | |||
let i = paths.length; | |||
let _keys; | |||
let any$conditionals; | |||
let schematype; | |||
let nested; | |||
let path; | |||
let type; | |||
let val; | |||
options = options || {}; | |||
while (i--) { | |||
path = paths[i]; | |||
val = obj[path]; | |||
if (path === '$or' || path === '$nor' || path === '$and') { | |||
let k = val.length; | |||
while (k--) { | |||
val[k] = cast(schema, val[k], options, context); | |||
} | |||
} else if (path === '$where') { | |||
type = typeof val; | |||
if (type !== 'string' && type !== 'function') { | |||
throw new Error('Must have a string or function for $where'); | |||
} | |||
if (type === 'function') { | |||
obj[path] = val.toString(); | |||
} | |||
continue; | |||
} else if (path === '$elemMatch') { | |||
val = cast(schema, val, options, context); | |||
} else if (path === '$text') { | |||
val = castTextSearch(val, path); | |||
} else { | |||
if (!schema) { | |||
// no casting for Mixed types | |||
continue; | |||
} | |||
schematype = schema.path(path); | |||
// Check for embedded discriminator paths | |||
if (!schematype) { | |||
const split = path.split('.'); | |||
let j = split.length; | |||
while (j--) { | |||
const pathFirstHalf = split.slice(0, j).join('.'); | |||
const pathLastHalf = split.slice(j).join('.'); | |||
const _schematype = schema.path(pathFirstHalf); | |||
const discriminatorKey = get(_schematype, 'schema.options.discriminatorKey'); | |||
// gh-6027: if we haven't found the schematype but this path is | |||
// underneath an embedded discriminator and the embedded discriminator | |||
// key is in the query, use the embedded discriminator schema | |||
if (_schematype != null && | |||
get(_schematype, 'schema.discriminators') != null && | |||
discriminatorKey != null && | |||
pathLastHalf !== discriminatorKey) { | |||
const discriminatorVal = get(obj, pathFirstHalf + '.' + discriminatorKey); | |||
if (discriminatorVal != null) { | |||
schematype = _schematype.schema.discriminators[discriminatorVal]. | |||
path(pathLastHalf); | |||
} | |||
} | |||
} | |||
} | |||
if (!schematype) { | |||
// Handle potential embedded array queries | |||
const split = path.split('.'); | |||
let j = split.length; | |||
let pathFirstHalf; | |||
let pathLastHalf; | |||
let remainingConds; | |||
// Find the part of the var path that is a path of the Schema | |||
while (j--) { | |||
pathFirstHalf = split.slice(0, j).join('.'); | |||
schematype = schema.path(pathFirstHalf); | |||
if (schematype) { | |||
break; | |||
} | |||
} | |||
// If a substring of the input path resolves to an actual real path... | |||
if (schematype) { | |||
// Apply the casting; similar code for $elemMatch in schema/array.js | |||
if (schematype.caster && schematype.caster.schema) { | |||
remainingConds = {}; | |||
pathLastHalf = split.slice(j).join('.'); | |||
remainingConds[pathLastHalf] = val; | |||
obj[path] = cast(schematype.caster.schema, remainingConds, options, context)[pathLastHalf]; | |||
} else { | |||
obj[path] = val; | |||
} | |||
continue; | |||
} | |||
if (utils.isObject(val)) { | |||
// handle geo schemas that use object notation | |||
// { loc: { long: Number, lat: Number } | |||
let geo = ''; | |||
if (val.$near) { | |||
geo = '$near'; | |||
} else if (val.$nearSphere) { | |||
geo = '$nearSphere'; | |||
} else if (val.$within) { | |||
geo = '$within'; | |||
} else if (val.$geoIntersects) { | |||
geo = '$geoIntersects'; | |||
} else if (val.$geoWithin) { | |||
geo = '$geoWithin'; | |||
} | |||
if (geo) { | |||
const numbertype = new Types.Number('__QueryCasting__'); | |||
let value = val[geo]; | |||
if (val.$maxDistance != null) { | |||
val.$maxDistance = numbertype.castForQueryWrapper({ | |||
val: val.$maxDistance, | |||
context: context | |||
}); | |||
} | |||
if (val.$minDistance != null) { | |||
val.$minDistance = numbertype.castForQueryWrapper({ | |||
val: val.$minDistance, | |||
context: context | |||
}); | |||
} | |||
if (geo === '$within') { | |||
const withinType = value.$center | |||
|| value.$centerSphere | |||
|| value.$box | |||
|| value.$polygon; | |||
if (!withinType) { | |||
throw new Error('Bad $within parameter: ' + JSON.stringify(val)); | |||
} | |||
value = withinType; | |||
} else if (geo === '$near' && | |||
typeof value.type === 'string' && Array.isArray(value.coordinates)) { | |||
// geojson; cast the coordinates | |||
value = value.coordinates; | |||
} else if ((geo === '$near' || geo === '$nearSphere' || geo === '$geoIntersects') && | |||
value.$geometry && typeof value.$geometry.type === 'string' && | |||
Array.isArray(value.$geometry.coordinates)) { | |||
if (value.$maxDistance != null) { | |||
value.$maxDistance = numbertype.castForQueryWrapper({ | |||
val: value.$maxDistance, | |||
context: context | |||
}); | |||
} | |||
if (value.$minDistance != null) { | |||
value.$minDistance = numbertype.castForQueryWrapper({ | |||
val: value.$minDistance, | |||
context: context | |||
}); | |||
} | |||
if (utils.isMongooseObject(value.$geometry)) { | |||
value.$geometry = value.$geometry.toObject({ | |||
transform: false, | |||
virtuals: false | |||
}); | |||
} | |||
value = value.$geometry.coordinates; | |||
} else if (geo === '$geoWithin') { | |||
if (value.$geometry) { | |||
if (utils.isMongooseObject(value.$geometry)) { | |||
value.$geometry = value.$geometry.toObject({ virtuals: false }); | |||
} | |||
const geoWithinType = value.$geometry.type; | |||
if (ALLOWED_GEOWITHIN_GEOJSON_TYPES.indexOf(geoWithinType) === -1) { | |||
throw new Error('Invalid geoJSON type for $geoWithin "' + | |||
geoWithinType + '", must be "Polygon" or "MultiPolygon"'); | |||
} | |||
value = value.$geometry.coordinates; | |||
} else { | |||
value = value.$box || value.$polygon || value.$center || | |||
value.$centerSphere; | |||
if (utils.isMongooseObject(value)) { | |||
value = value.toObject({ virtuals: false }); | |||
} | |||
} | |||
} | |||
_cast(value, numbertype, context); | |||
continue; | |||
} | |||
} | |||
if (schema.nested[path]) { | |||
continue; | |||
} | |||
if (options.upsert && options.strict) { | |||
if (options.strict === 'throw') { | |||
throw new StrictModeError(path); | |||
} | |||
throw new StrictModeError(path, 'Path "' + path + '" is not in ' + | |||
'schema, strict mode is `true`, and upsert is `true`.'); | |||
} else if (options.strictQuery === 'throw') { | |||
throw new StrictModeError(path, 'Path "' + path + '" is not in ' + | |||
'schema and strictQuery is true.'); | |||
} else if (options.strictQuery) { | |||
delete obj[path]; | |||
} | |||
} else if (val == null) { | |||
continue; | |||
} else if (val.constructor.name === 'Object') { | |||
any$conditionals = Object.keys(val).some(function(k) { | |||
return k.charAt(0) === '$' && k !== '$id' && k !== '$ref'; | |||
}); | |||
if (!any$conditionals) { | |||
obj[path] = schematype.castForQueryWrapper({ | |||
val: val, | |||
context: context | |||
}); | |||
} else { | |||
const ks = Object.keys(val); | |||
let $cond; | |||
let k = ks.length; | |||
while (k--) { | |||
$cond = ks[k]; | |||
nested = val[$cond]; | |||
if ($cond === '$not') { | |||
if (nested && schematype && !schematype.caster) { | |||
_keys = Object.keys(nested); | |||
if (_keys.length && _keys[0].charAt(0) === '$') { | |||
for (const key in nested) { | |||
nested[key] = schematype.castForQueryWrapper({ | |||
$conditional: key, | |||
val: nested[key], | |||
context: context | |||
}); | |||
} | |||
} else { | |||
val[$cond] = schematype.castForQueryWrapper({ | |||
$conditional: $cond, | |||
val: nested, | |||
context: context | |||
}); | |||
} | |||
continue; | |||
} | |||
cast(schematype.caster ? schematype.caster.schema : schema, nested, options, context); | |||
} else { | |||
val[$cond] = schematype.castForQueryWrapper({ | |||
$conditional: $cond, | |||
val: nested, | |||
context: context | |||
}); | |||
} | |||
} | |||
} | |||
} else if (Array.isArray(val) && ['Buffer', 'Array'].indexOf(schematype.instance) === -1) { | |||
const casted = []; | |||
for (let valIndex = 0; valIndex < val.length; valIndex++) { | |||
casted.push(schematype.castForQueryWrapper({ | |||
val: val[valIndex], | |||
context: context | |||
})); | |||
} | |||
obj[path] = { $in: casted }; | |||
} else { | |||
obj[path] = schematype.castForQueryWrapper({ | |||
val: val, | |||
context: context | |||
}); | |||
} | |||
} | |||
} | |||
return obj; | |||
}; | |||
function _cast(val, numbertype, context) { | |||
if (Array.isArray(val)) { | |||
val.forEach(function(item, i) { | |||
if (Array.isArray(item) || utils.isObject(item)) { | |||
return _cast(item, numbertype, context); | |||
} | |||
val[i] = numbertype.castForQueryWrapper({ val: item, context: context }); | |||
}); | |||
} else { | |||
const nearKeys = Object.keys(val); | |||
let nearLen = nearKeys.length; | |||
while (nearLen--) { | |||
const nkey = nearKeys[nearLen]; | |||
const item = val[nkey]; | |||
if (Array.isArray(item) || utils.isObject(item)) { | |||
_cast(item, numbertype, context); | |||
val[nkey] = item; | |||
} else { | |||
val[nkey] = numbertype.castForQuery({ val: item, context: context }); | |||
} | |||
} | |||
} | |||
} |
@@ -1,31 +0,0 @@ | |||
'use strict'; | |||
const CastError = require('../error/cast'); | |||
/*! | |||
* Given a value, cast it to a boolean, or throw a `CastError` if the value | |||
* cannot be casted. `null` and `undefined` are considered valid. | |||
* | |||
* @param {Any} value | |||
* @param {String} [path] optional the path to set on the CastError | |||
* @return {Boolean|null|undefined} | |||
* @throws {CastError} if `value` is not one of the allowed values | |||
* @api private | |||
*/ | |||
module.exports = function castBoolean(value, path) { | |||
if (value == null) { | |||
return value; | |||
} | |||
if (module.exports.convertToTrue.has(value)) { | |||
return true; | |||
} | |||
if (module.exports.convertToFalse.has(value)) { | |||
return false; | |||
} | |||
throw new CastError('boolean', value, path); | |||
}; | |||
module.exports.convertToTrue = new Set([true, 'true', 1, '1', 'yes']); | |||
module.exports.convertToFalse = new Set([false, 'false', 0, '0', 'no']); |
@@ -1,41 +0,0 @@ | |||
'use strict'; | |||
const assert = require('assert'); | |||
module.exports = function castDate(value) { | |||
// Support empty string because of empty form values. Originally introduced | |||
// in https://github.com/Automattic/mongoose/commit/efc72a1898fc3c33a319d915b8c5463a22938dfe | |||
if (value == null || value === '') { | |||
return null; | |||
} | |||
if (value instanceof Date) { | |||
assert.ok(!isNaN(value.valueOf())); | |||
return value; | |||
} | |||
let date; | |||
assert.ok(typeof value !== 'boolean'); | |||
if (value instanceof Number || typeof value === 'number') { | |||
date = new Date(value); | |||
} else if (typeof value === 'string' && !isNaN(Number(value)) && (Number(value) >= 275761 || Number(value) < -271820)) { | |||
// string representation of milliseconds take this path | |||
date = new Date(Number(value)); | |||
} else if (typeof value.valueOf === 'function') { | |||
// support for moment.js. This is also the path strings will take because | |||
// strings have a `valueOf()` | |||
date = new Date(value.valueOf()); | |||
} else { | |||
// fallback | |||
date = new Date(value); | |||
} | |||
if (!isNaN(date.valueOf())) { | |||
return date; | |||
} | |||
assert.ok(false); | |||
}; |
@@ -1,36 +0,0 @@ | |||
'use strict'; | |||
const Decimal128Type = require('../types/decimal128'); | |||
const assert = require('assert'); | |||
module.exports = function castDecimal128(value) { | |||
if (value == null) { | |||
return value; | |||
} | |||
if (typeof value === 'object' && typeof value.$numberDecimal === 'string') { | |||
return Decimal128Type.fromString(value.$numberDecimal); | |||
} | |||
if (value instanceof Decimal128Type) { | |||
return value; | |||
} | |||
if (typeof value === 'string') { | |||
return Decimal128Type.fromString(value); | |||
} | |||
if (Buffer.isBuffer(value)) { | |||
return new Decimal128Type(value); | |||
} | |||
if (typeof value === 'number') { | |||
return Decimal128Type.fromString(String(value)); | |||
} | |||
if (typeof value.valueOf === 'function' && typeof value.valueOf() === 'string') { | |||
return Decimal128Type.fromString(value.valueOf()); | |||
} | |||
assert.ok(false); | |||
}; |
@@ -1,45 +0,0 @@ | |||
'use strict'; | |||
const assert = require('assert'); | |||
/*! | |||
* Given a value, cast it to a number, or throw a `CastError` if the value | |||
* cannot be casted. `null` and `undefined` are considered valid. | |||
* | |||
* @param {Any} value | |||
* @param {String} [path] optional the path to set on the CastError | |||
* @return {Boolean|null|undefined} | |||
* @throws {Error} if `value` is not one of the allowed values | |||
* @api private | |||
*/ | |||
module.exports = function castNumber(val) { | |||
assert.ok(!isNaN(val)); | |||
if (val == null) { | |||
return val; | |||
} | |||
if (val === '') { | |||
return null; | |||
} | |||
if (typeof val === 'string' || typeof val === 'boolean') { | |||
val = Number(val); | |||
} | |||
assert.ok(!isNaN(val)); | |||
if (val instanceof Number) { | |||
return val; | |||
} | |||
if (typeof val === 'number') { | |||
return val; | |||
} | |||
if (!Array.isArray(val) && typeof val.valueOf === 'function') { | |||
return Number(val.valueOf()); | |||
} | |||
if (val.toString && !Array.isArray(val) && val.toString() == Number(val)) { | |||
return new Number(val); | |||
} | |||
assert.ok(false); | |||
}; |
@@ -1,29 +0,0 @@ | |||
'use strict'; | |||
const ObjectId = require('../driver').get().ObjectId; | |||
const assert = require('assert'); | |||
module.exports = function castObjectId(value) { | |||
if (value == null) { | |||
return value; | |||
} | |||
if (value instanceof ObjectId) { | |||
return value; | |||
} | |||
if (value._id) { | |||
if (value._id instanceof ObjectId) { | |||
return value._id; | |||
} | |||
if (value._id.toString instanceof Function) { | |||
return new ObjectId(value._id.toString()); | |||
} | |||
} | |||
if (value.toString instanceof Function) { | |||
return new ObjectId(value.toString()); | |||
} | |||
assert.ok(false); | |||
}; |
@@ -1,35 +0,0 @@ | |||
'use strict'; | |||
const CastError = require('../error/cast'); | |||
/*! | |||
* Given a value, cast it to a string, or throw a `CastError` if the value | |||
* cannot be casted. `null` and `undefined` are considered valid. | |||
* | |||
* @param {Any} value | |||
* @param {String} [path] optional the path to set on the CastError | |||
* @return {string|null|undefined} | |||
* @throws {CastError} | |||
* @api private | |||
*/ | |||
module.exports = function castString(value, path) { | |||
// If null or undefined | |||
if (value == null) { | |||
return value; | |||
} | |||
// handle documents being passed | |||
if (value._id && typeof value._id === 'string') { | |||
return value._id; | |||
} | |||
// Re: gh-647 and gh-3030, we're ok with casting using `toString()` | |||
// **unless** its the default Object.toString, because "[object Object]" | |||
// doesn't really qualify as useful data | |||
if (value.toString && value.toString !== Object.prototype.toString) { | |||
return value.toString(); | |||
} | |||
throw new CastError('string', value, path); | |||
}; |
@@ -1,269 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
const EventEmitter = require('events').EventEmitter; | |||
const STATES = require('./connectionstate'); | |||
const immediate = require('./helpers/immediate'); | |||
/** | |||
* Abstract Collection constructor | |||
* | |||
* This is the base class that drivers inherit from and implement. | |||
* | |||
* @param {String} name name of the collection | |||
* @param {Connection} conn A MongooseConnection instance | |||
* @param {Object} opts optional collection options | |||
* @api public | |||
*/ | |||
function Collection(name, conn, opts) { | |||
if (opts === void 0) { | |||
opts = {}; | |||
} | |||
if (opts.capped === void 0) { | |||
opts.capped = {}; | |||
} | |||
opts.bufferCommands = undefined === opts.bufferCommands | |||
? true | |||
: opts.bufferCommands; | |||
if (typeof opts.capped === 'number') { | |||
opts.capped = {size: opts.capped}; | |||
} | |||
this.opts = opts; | |||
this.name = name; | |||
this.collectionName = name; | |||
this.conn = conn; | |||
this.queue = []; | |||
this.buffer = this.opts.bufferCommands; | |||
this.emitter = new EventEmitter(); | |||
if (STATES.connected === this.conn.readyState) { | |||
this.onOpen(); | |||
} | |||
} | |||
/** | |||
* The collection name | |||
* | |||
* @api public | |||
* @property name | |||
*/ | |||
Collection.prototype.name; | |||
/** | |||
* The collection name | |||
* | |||
* @api public | |||
* @property collectionName | |||
*/ | |||
Collection.prototype.collectionName; | |||
/** | |||
* The Connection instance | |||
* | |||
* @api public | |||
* @property conn | |||
*/ | |||
Collection.prototype.conn; | |||
/** | |||
* Called when the database connects | |||
* | |||
* @api private | |||
*/ | |||
Collection.prototype.onOpen = function() { | |||
this.buffer = false; | |||
immediate(() => this.doQueue()); | |||
}; | |||
/** | |||
* Called when the database disconnects | |||
* | |||
* @api private | |||
*/ | |||
Collection.prototype.onClose = function(force) { | |||
if (this.opts.bufferCommands && !force) { | |||
this.buffer = true; | |||
} | |||
}; | |||
/** | |||
* Queues a method for later execution when its | |||
* database connection opens. | |||
* | |||
* @param {String} name name of the method to queue | |||
* @param {Array} args arguments to pass to the method when executed | |||
* @api private | |||
*/ | |||
Collection.prototype.addQueue = function(name, args) { | |||
this.queue.push([name, args]); | |||
return this; | |||
}; | |||
/** | |||
* Executes all queued methods and clears the queue. | |||
* | |||
* @api private | |||
*/ | |||
Collection.prototype.doQueue = function() { | |||
for (let i = 0, l = this.queue.length; i < l; i++) { | |||
if (typeof this.queue[i][0] === 'function') { | |||
this.queue[i][0].apply(this, this.queue[i][1]); | |||
} else { | |||
this[this.queue[i][0]].apply(this, this.queue[i][1]); | |||
} | |||
} | |||
this.queue = []; | |||
const _this = this; | |||
process.nextTick(function() { | |||
_this.emitter.emit('queue'); | |||
}); | |||
return this; | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.ensureIndex = function() { | |||
throw new Error('Collection#ensureIndex unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.createIndex = function() { | |||
throw new Error('Collection#ensureIndex unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.findAndModify = function() { | |||
throw new Error('Collection#findAndModify unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.findOneAndUpdate = function() { | |||
throw new Error('Collection#findOneAndUpdate unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.findOneAndDelete = function() { | |||
throw new Error('Collection#findOneAndDelete unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.findOneAndReplace = function() { | |||
throw new Error('Collection#findOneAndReplace unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.findOne = function() { | |||
throw new Error('Collection#findOne unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.find = function() { | |||
throw new Error('Collection#find unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.insert = function() { | |||
throw new Error('Collection#insert unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.insertOne = function() { | |||
throw new Error('Collection#insertOne unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.insertMany = function() { | |||
throw new Error('Collection#insertMany unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.save = function() { | |||
throw new Error('Collection#save unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.update = function() { | |||
throw new Error('Collection#update unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.getIndexes = function() { | |||
throw new Error('Collection#getIndexes unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.mapReduce = function() { | |||
throw new Error('Collection#mapReduce unimplemented by driver'); | |||
}; | |||
/** | |||
* Abstract method that drivers must implement. | |||
*/ | |||
Collection.prototype.watch = function() { | |||
throw new Error('Collection#watch unimplemented by driver'); | |||
}; | |||
/*! | |||
* Module exports. | |||
*/ | |||
module.exports = Collection; |
@@ -1,954 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
const EventEmitter = require('events').EventEmitter; | |||
const Schema = require('./schema'); | |||
const Collection = require('./driver').get().Collection; | |||
const STATES = require('./connectionstate'); | |||
const MongooseError = require('./error'); | |||
const PromiseProvider = require('./promise_provider'); | |||
const get = require('./helpers/get'); | |||
const mongodb = require('mongodb'); | |||
const utils = require('./utils'); | |||
const parseConnectionString = require('mongodb-core').parseConnectionString; | |||
/*! | |||
* A list of authentication mechanisms that don't require a password for authentication. | |||
* This is used by the authMechanismDoesNotRequirePassword method. | |||
* | |||
* @api private | |||
*/ | |||
const noPasswordAuthMechanisms = [ | |||
'MONGODB-X509' | |||
]; | |||
/** | |||
* Connection constructor | |||
* | |||
* For practical reasons, a Connection equals a Db. | |||
* | |||
* @param {Mongoose} base a mongoose instance | |||
* @inherits NodeJS EventEmitter http://nodejs.org/api/events.html#events_class_events_eventemitter | |||
* @event `connecting`: Emitted when `connection.openUri()` is executed on this connection. | |||
* @event `connected`: Emitted when this connection successfully connects to the db. May be emitted _multiple_ times in `reconnected` scenarios. | |||
* @event `open`: Emitted after we `connected` and `onOpen` is executed on all of this connections models. | |||
* @event `disconnecting`: Emitted when `connection.close()` was executed. | |||
* @event `disconnected`: Emitted after getting disconnected from the db. | |||
* @event `close`: Emitted after we `disconnected` and `onClose` executed on all of this connections models. | |||
* @event `reconnected`: Emitted after we `connected` and subsequently `disconnected`, followed by successfully another successfull connection. | |||
* @event `error`: Emitted when an error occurs on this connection. | |||
* @event `fullsetup`: Emitted in a replica-set scenario, when primary and at least one seconaries specified in the connection string are connected. | |||
* @event `all`: Emitted in a replica-set scenario, when all nodes specified in the connection string are connected. | |||
* @api public | |||
*/ | |||
function Connection(base) { | |||
this.base = base; | |||
this.collections = {}; | |||
this.models = {}; | |||
this.config = {autoIndex: true}; | |||
this.replica = false; | |||
this.options = null; | |||
this.otherDbs = []; // FIXME: To be replaced with relatedDbs | |||
this.relatedDbs = {}; // Hashmap of other dbs that share underlying connection | |||
this.states = STATES; | |||
this._readyState = STATES.disconnected; | |||
this._closeCalled = false; | |||
this._hasOpened = false; | |||
this.$internalEmitter = new EventEmitter(); | |||
this.$internalEmitter.setMaxListeners(0); | |||
} | |||
/*! | |||
* Inherit from EventEmitter | |||
*/ | |||
Connection.prototype.__proto__ = EventEmitter.prototype; | |||
/** | |||
* Connection ready state | |||
* | |||
* - 0 = disconnected | |||
* - 1 = connected | |||
* - 2 = connecting | |||
* - 3 = disconnecting | |||
* | |||
* Each state change emits its associated event name. | |||
* | |||
* ####Example | |||
* | |||
* conn.on('connected', callback); | |||
* conn.on('disconnected', callback); | |||
* | |||
* @property readyState | |||
* @memberOf Connection | |||
* @instance | |||
* @api public | |||
*/ | |||
Object.defineProperty(Connection.prototype, 'readyState', { | |||
get: function() { | |||
return this._readyState; | |||
}, | |||
set: function(val) { | |||
if (!(val in STATES)) { | |||
throw new Error('Invalid connection state: ' + val); | |||
} | |||
if (this._readyState !== val) { | |||
this._readyState = val; | |||
// [legacy] loop over the otherDbs on this connection and change their state | |||
for (let i = 0; i < this.otherDbs.length; i++) { | |||
this.otherDbs[i].readyState = val; | |||
} | |||
// loop over relatedDbs on this connection and change their state | |||
for (const k in this.relatedDbs) { | |||
this.relatedDbs[k].readyState = val; | |||
} | |||
if (STATES.connected === val) { | |||
this._hasOpened = true; | |||
} | |||
this.emit(STATES[val]); | |||
} | |||
} | |||
}); | |||
/** | |||
* A hash of the collections associated with this connection | |||
* | |||
* @property collections | |||
* @memberOf Connection | |||
* @instance | |||
* @api public | |||
*/ | |||
Connection.prototype.collections; | |||
/** | |||
* The name of the database this connection points to. | |||
* | |||
* ####Example | |||
* | |||
* mongoose.createConnection('mongodb://localhost:27017/mydb').name; // "mydb" | |||
* | |||
* @property name | |||
* @memberOf Connection | |||
* @instance | |||
* @api public | |||
*/ | |||
Connection.prototype.name; | |||
/** | |||
* The host name portion of the URI. If multiple hosts, such as a replica set, | |||
* this will contain the first host name in the URI | |||
* | |||
* ####Example | |||
* | |||
* mongoose.createConnection('mongodb://localhost:27017/mydb').host; // "localhost" | |||
* | |||
* @property host | |||
* @memberOf Connection | |||
* @instance | |||
* @api public | |||
*/ | |||
Object.defineProperty(Connection.prototype, 'host', { | |||
configurable: true, | |||
enumerable: true, | |||
writable: true | |||
}); | |||
/** | |||
* The port portion of the URI. If multiple hosts, such as a replica set, | |||
* this will contain the port from the first host name in the URI. | |||
* | |||
* ####Example | |||
* | |||
* mongoose.createConnection('mongodb://localhost:27017/mydb').port; // 27017 | |||
* | |||
* @property port | |||
* @memberOf Connection | |||
* @instance | |||
* @api public | |||
*/ | |||
Object.defineProperty(Connection.prototype, 'port', { | |||
configurable: true, | |||
enumerable: true, | |||
writable: true | |||
}); | |||
/** | |||
* The username specified in the URI | |||
* | |||
* ####Example | |||
* | |||
* mongoose.createConnection('mongodb://val:psw@localhost:27017/mydb').user; // "val" | |||
* | |||
* @property user | |||
* @memberOf Connection | |||
* @instance | |||
* @api public | |||
*/ | |||
Object.defineProperty(Connection.prototype, 'user', { | |||
configurable: true, | |||
enumerable: true, | |||
writable: true | |||
}); | |||
/** | |||
* The password specified in the URI | |||
* | |||
* ####Example | |||
* | |||
* mongoose.createConnection('mongodb://val:psw@localhost:27017/mydb').pass; // "psw" | |||
* | |||
* @property pass | |||
* @memberOf Connection | |||
* @instance | |||
* @api public | |||
*/ | |||
Object.defineProperty(Connection.prototype, 'pass', { | |||
configurable: true, | |||
enumerable: true, | |||
writable: true | |||
}); | |||
/** | |||
* The mongodb.Db instance, set when the connection is opened | |||
* | |||
* @property db | |||
* @memberOf Connection | |||
* @instance | |||
* @api public | |||
*/ | |||
Connection.prototype.db; | |||
/** | |||
* A hash of the global options that are associated with this connection | |||
* | |||
* @property config | |||
* @memberOf Connection | |||
* @instance | |||
* @api public | |||
*/ | |||
Connection.prototype.config; | |||
/** | |||
* Helper for `createCollection()`. Will explicitly create the given collection | |||
* with specified options. Used to create [capped collections](https://docs.mongodb.com/manual/core/capped-collections/) | |||
* and [views](https://docs.mongodb.com/manual/core/views/) from mongoose. | |||
* | |||
* Options are passed down without modification to the [MongoDB driver's `createCollection()` function](http://mongodb.github.io/node-mongodb-native/2.2/api/Db.html#createCollection) | |||
* | |||
* @method createCollection | |||
* @param {string} collection The collection to create | |||
* @param {Object} [options] see [MongoDB driver docs](http://mongodb.github.io/node-mongodb-native/2.2/api/Db.html#createCollection) | |||
* @param {Function} [callback] | |||
* @return {Promise} | |||
* @api public | |||
*/ | |||
Connection.prototype.createCollection = _wrapConnHelper(function createCollection(collection, options, cb) { | |||
if (typeof options === 'function') { | |||
cb = options; | |||
options = {}; | |||
} | |||
this.db.createCollection(collection, options, cb); | |||
}); | |||
/** | |||
* _Requires MongoDB >= 3.6.0._ Starts a [MongoDB session](https://docs.mongodb.com/manual/release-notes/3.6/#client-sessions) | |||
* for benefits like causal consistency, [retryable writes](https://docs.mongodb.com/manual/core/retryable-writes/), | |||
* and [transactions](http://thecodebarbarian.com/a-node-js-perspective-on-mongodb-4-transactions.html). | |||
* | |||
* ####Example: | |||
* | |||
* const session = await conn.startSession(); | |||
* let doc = await Person.findOne({ name: 'Ned Stark' }, null, { session }); | |||
* await doc.remove(); | |||
* // `doc` will always be null, even if reading from a replica set | |||
* // secondary. Without causal consistency, it is possible to | |||
* // get a doc back from the below query if the query reads from a | |||
* // secondary that is experiencing replication lag. | |||
* doc = await Person.findOne({ name: 'Ned Stark' }, null, { session, readPreference: 'secondary' }); | |||
* | |||
* | |||
* @method startSession | |||
* @param {Object} [options] see the [mongodb driver options](http://mongodb.github.io/node-mongodb-native/3.0/api/MongoClient.html#startSession) | |||
* @param {Boolean} [options.causalConsistency=true] set to false to disable causal consistency | |||
* @param {Function} [callback] | |||
* @return {Promise<ClientSession>} promise that resolves to a MongoDB driver `ClientSession` | |||
* @api public | |||
*/ | |||
Connection.prototype.startSession = _wrapConnHelper(function startSession(options, cb) { | |||
if (typeof options === 'function') { | |||
cb = options; | |||
options = null; | |||
} | |||
const session = this.client.startSession(options); | |||
cb(null, session); | |||
}); | |||
/** | |||
* Helper for `dropCollection()`. Will delete the given collection, including | |||
* all documents and indexes. | |||
* | |||
* @method dropCollection | |||
* @param {string} collection The collection to delete | |||
* @param {Function} [callback] | |||
* @return {Promise} | |||
* @api public | |||
*/ | |||
Connection.prototype.dropCollection = _wrapConnHelper(function dropCollection(collection, cb) { | |||
this.db.dropCollection(collection, cb); | |||
}); | |||
/** | |||
* Helper for `dropDatabase()`. Deletes the given database, including all | |||
* collections, documents, and indexes. | |||
* | |||
* @method dropDatabase | |||
* @param {Function} [callback] | |||
* @return {Promise} | |||
* @api public | |||
*/ | |||
Connection.prototype.dropDatabase = _wrapConnHelper(function dropDatabase(cb) { | |||
this.$internalEmitter.emit('dropDatabase'); | |||
this.db.dropDatabase(cb); | |||
}); | |||
/*! | |||
* ignore | |||
*/ | |||
function _wrapConnHelper(fn) { | |||
return function() { | |||
const cb = arguments.length > 0 ? arguments[arguments.length - 1] : null; | |||
const argsWithoutCb = typeof cb === 'function' ? | |||
Array.prototype.slice.call(arguments, 0, arguments.length - 1) : | |||
Array.prototype.slice.call(arguments); | |||
return utils.promiseOrCallback(cb, cb => { | |||
if (this.readyState !== STATES.connected) { | |||
this.once('open', function() { | |||
fn.apply(this, argsWithoutCb.concat([cb])); | |||
}); | |||
} else { | |||
fn.apply(this, argsWithoutCb.concat([cb])); | |||
} | |||
}); | |||
}; | |||
} | |||
/** | |||
* error | |||
* | |||
* Graceful error handling, passes error to callback | |||
* if available, else emits error on the connection. | |||
* | |||
* @param {Error} err | |||
* @param {Function} callback optional | |||
* @api private | |||
*/ | |||
Connection.prototype.error = function(err, callback) { | |||
if (callback) { | |||
callback(err); | |||
return null; | |||
} | |||
if (this.listeners('error').length > 0) { | |||
this.emit('error', err); | |||
} | |||
return Promise.reject(err); | |||
}; | |||
/** | |||
* Called when the connection is opened | |||
* | |||
* @api private | |||
*/ | |||
Connection.prototype.onOpen = function() { | |||
this.readyState = STATES.connected; | |||
// avoid having the collection subscribe to our event emitter | |||
// to prevent 0.3 warning | |||
for (const i in this.collections) { | |||
if (utils.object.hasOwnProperty(this.collections, i)) { | |||
this.collections[i].onOpen(); | |||
} | |||
} | |||
this.emit('open'); | |||
}; | |||
/** | |||
* Opens the connection with a URI using `MongoClient.connect()`. | |||
* | |||
* @param {String} uri The URI to connect with. | |||
* @param {Object} [options] Passed on to http://mongodb.github.io/node-mongodb-native/2.2/api/MongoClient.html#connect | |||
* @param {Function} [callback] | |||
* @returns {Connection} this | |||
* @api private | |||
*/ | |||
Connection.prototype.openUri = function(uri, options, callback) { | |||
this.readyState = STATES.connecting; | |||
this._closeCalled = false; | |||
if (typeof options === 'function') { | |||
callback = options; | |||
options = null; | |||
} | |||
if (['string', 'number'].indexOf(typeof options) !== -1) { | |||
throw new MongooseError('Mongoose 5.x no longer supports ' + | |||
'`mongoose.connect(host, dbname, port)` or ' + | |||
'`mongoose.createConnection(host, dbname, port)`. See ' + | |||
'http://mongoosejs.com/docs/connections.html for supported connection syntax'); | |||
} | |||
if (typeof uri !== 'string') { | |||
throw new MongooseError('The `uri` parameter to `openUri()` must be a ' + | |||
`string, got "${typeof uri}". Make sure the first parameter to ` + | |||
'`mongoose.connect()` or `mongoose.createConnection()` is a string.'); | |||
} | |||
const Promise = PromiseProvider.get(); | |||
const _this = this; | |||
if (options) { | |||
options = utils.clone(options); | |||
const autoIndex = options.config && options.config.autoIndex != null ? | |||
options.config.autoIndex : | |||
options.autoIndex; | |||
if (autoIndex != null) { | |||
this.config.autoIndex = autoIndex !== false; | |||
delete options.config; | |||
delete options.autoIndex; | |||
} | |||
if ('autoCreate' in options) { | |||
this.config.autoCreate = !!options.autoCreate; | |||
delete options.autoCreate; | |||
} | |||
if ('useCreateIndex' in options) { | |||
this.config.useCreateIndex = !!options.useCreateIndex; | |||
delete options.useCreateIndex; | |||
} | |||
if ('useFindAndModify' in options) { | |||
this.config.useFindAndModify = !!options.useFindAndModify; | |||
delete options.useFindAndModify; | |||
} | |||
// Backwards compat | |||
if (options.user || options.pass) { | |||
options.auth = options.auth || {}; | |||
options.auth.user = options.user; | |||
options.auth.password = options.pass; | |||
this.user = options.user; | |||
this.pass = options.pass; | |||
} | |||
delete options.user; | |||
delete options.pass; | |||
if (options.bufferCommands != null) { | |||
options.bufferMaxEntries = 0; | |||
this.config.bufferCommands = options.bufferCommands; | |||
delete options.bufferCommands; | |||
} | |||
if (options.useMongoClient != null) { | |||
handleUseMongoClient(options); | |||
} | |||
} else { | |||
options = {}; | |||
} | |||
this._connectionOptions = options; | |||
const dbName = options.dbName; | |||
if (dbName != null) { | |||
this.$dbName = dbName; | |||
} | |||
delete options.dbName; | |||
if (!('promiseLibrary' in options)) { | |||
options.promiseLibrary = PromiseProvider.get(); | |||
} | |||
if (!('useNewUrlParser' in options)) { | |||
if ('useNewUrlParser' in this.base.options) { | |||
options.useNewUrlParser = this.base.options.useNewUrlParser; | |||
} else { | |||
options.useNewUrlParser = false; | |||
} | |||
} | |||
const parsePromise = new Promise((resolve, reject) => { | |||
parseConnectionString(uri, options, (err, parsed) => { | |||
if (err) { | |||
return reject(err); | |||
} | |||
this.name = dbName != null ? dbName : get(parsed, 'auth.db', null); | |||
this.host = get(parsed, 'hosts.0.host', 'localhost'); | |||
this.port = get(parsed, 'hosts.0.port', 27017); | |||
this.user = this.user || get(parsed, 'auth.username'); | |||
this.pass = this.pass || get(parsed, 'auth.password'); | |||
resolve(); | |||
}); | |||
}); | |||
const promise = new Promise((resolve, reject) => { | |||
const client = new mongodb.MongoClient(uri, options); | |||
_this.client = client; | |||
client.connect(function(error) { | |||
if (error) { | |||
_this.readyState = STATES.disconnected; | |||
return reject(error); | |||
} | |||
const db = dbName != null ? client.db(dbName) : client.db(); | |||
_this.db = db; | |||
// Backwards compat for mongoose 4.x | |||
db.on('reconnect', function() { | |||
_this.readyState = STATES.connected; | |||
_this.emit('reconnect'); | |||
_this.emit('reconnected'); | |||
}); | |||
db.s.topology.on('reconnectFailed', function() { | |||
_this.emit('reconnectFailed'); | |||
}); | |||
db.s.topology.on('left', function(data) { | |||
_this.emit('left', data); | |||
}); | |||
db.s.topology.on('joined', function(data) { | |||
_this.emit('joined', data); | |||
}); | |||
db.s.topology.on('fullsetup', function(data) { | |||
_this.emit('fullsetup', data); | |||
}); | |||
db.on('close', function() { | |||
// Implicitly emits 'disconnected' | |||
_this.readyState = STATES.disconnected; | |||
}); | |||
client.on('left', function() { | |||
if (_this.readyState === STATES.connected && | |||
get(db, 's.topology.s.coreTopology.s.replicaSetState.topologyType') === 'ReplicaSetNoPrimary') { | |||
_this.readyState = STATES.disconnected; | |||
} | |||
}); | |||
db.on('timeout', function() { | |||
_this.emit('timeout'); | |||
}); | |||
delete _this.then; | |||
delete _this.catch; | |||
_this.readyState = STATES.connected; | |||
for (const i in _this.collections) { | |||
if (utils.object.hasOwnProperty(_this.collections, i)) { | |||
_this.collections[i].onOpen(); | |||
} | |||
} | |||
resolve(_this); | |||
_this.emit('open'); | |||
}); | |||
}); | |||
this.$initialConnection = Promise.all([promise, parsePromise]). | |||
then(res => res[0]). | |||
catch(err => { | |||
if (this.listeners('error').length > 0) { | |||
process.nextTick(() => this.emit('error', err)); | |||
return; | |||
} | |||
throw err; | |||
}); | |||
this.then = function(resolve, reject) { | |||
return this.$initialConnection.then(resolve, reject); | |||
}; | |||
this.catch = function(reject) { | |||
return this.$initialConnection.catch(reject); | |||
}; | |||
if (callback != null) { | |||
this.$initialConnection = this.$initialConnection.then( | |||
() => callback(null, this), | |||
err => callback(err) | |||
); | |||
} | |||
return this; | |||
}; | |||
/*! | |||
* ignore | |||
*/ | |||
const handleUseMongoClient = function handleUseMongoClient(options) { | |||
console.warn('WARNING: The `useMongoClient` option is no longer ' + | |||
'necessary in mongoose 5.x, please remove it.'); | |||
const stack = new Error().stack; | |||
console.warn(stack.substr(stack.indexOf('\n') + 1)); | |||
delete options.useMongoClient; | |||
}; | |||
/** | |||
* Closes the connection | |||
* | |||
* @param {Boolean} [force] optional | |||
* @param {Function} [callback] optional | |||
* @return {Connection} self | |||
* @api public | |||
*/ | |||
Connection.prototype.close = function(force, callback) { | |||
if (typeof force === 'function') { | |||
callback = force; | |||
force = false; | |||
} | |||
this.$wasForceClosed = !!force; | |||
return utils.promiseOrCallback(callback, cb => { | |||
this._close(force, cb); | |||
}); | |||
}; | |||
/** | |||
* Handles closing the connection | |||
* | |||
* @param {Boolean} force | |||
* @param {Function} callback | |||
* @api private | |||
*/ | |||
Connection.prototype._close = function(force, callback) { | |||
const _this = this; | |||
this._closeCalled = true; | |||
switch (this.readyState) { | |||
case 0: // disconnected | |||
callback(); | |||
break; | |||
case 1: // connected | |||
this.readyState = STATES.disconnecting; | |||
this.doClose(force, function(err) { | |||
if (err) { | |||
return callback(err); | |||
} | |||
_this.onClose(force); | |||
callback(null); | |||
}); | |||
break; | |||
case 2: // connecting | |||
this.once('open', function() { | |||
_this.close(callback); | |||
}); | |||
break; | |||
case 3: // disconnecting | |||
this.once('close', function() { | |||
callback(); | |||
}); | |||
break; | |||
} | |||
return this; | |||
}; | |||
/** | |||
* Called when the connection closes | |||
* | |||
* @api private | |||
*/ | |||
Connection.prototype.onClose = function(force) { | |||
this.readyState = STATES.disconnected; | |||
// avoid having the collection subscribe to our event emitter | |||
// to prevent 0.3 warning | |||
for (const i in this.collections) { | |||
if (utils.object.hasOwnProperty(this.collections, i)) { | |||
this.collections[i].onClose(force); | |||
} | |||
} | |||
this.emit('close', force); | |||
}; | |||
/** | |||
* Retrieves a collection, creating it if not cached. | |||
* | |||
* Not typically needed by applications. Just talk to your collection through your model. | |||
* | |||
* @param {String} name of the collection | |||
* @param {Object} [options] optional collection options | |||
* @return {Collection} collection instance | |||
* @api public | |||
*/ | |||
Connection.prototype.collection = function(name, options) { | |||
options = options ? utils.clone(options) : {}; | |||
options.$wasForceClosed = this.$wasForceClosed; | |||
if (!(name in this.collections)) { | |||
this.collections[name] = new Collection(name, this, options); | |||
} | |||
return this.collections[name]; | |||
}; | |||
/** | |||
* Defines or retrieves a model. | |||
* | |||
* var mongoose = require('mongoose'); | |||
* var db = mongoose.createConnection(..); | |||
* db.model('Venue', new Schema(..)); | |||
* var Ticket = db.model('Ticket', new Schema(..)); | |||
* var Venue = db.model('Venue'); | |||
* | |||
* _When no `collection` argument is passed, Mongoose produces a collection name by passing the model `name` to the [utils.toCollectionName](#utils_exports.toCollectionName) method. This method pluralizes the name. If you don't like this behavior, either pass a collection name or set your schemas collection name option._ | |||
* | |||
* ####Example: | |||
* | |||
* var schema = new Schema({ name: String }, { collection: 'actor' }); | |||
* | |||
* // or | |||
* | |||
* schema.set('collection', 'actor'); | |||
* | |||
* // or | |||
* | |||
* var collectionName = 'actor' | |||
* var M = conn.model('Actor', schema, collectionName) | |||
* | |||
* @param {String|Function} name the model name or class extending Model | |||
* @param {Schema} [schema] a schema. necessary when defining a model | |||
* @param {String} [collection] name of mongodb collection (optional) if not given it will be induced from model name | |||
* @see Mongoose#model #index_Mongoose-model | |||
* @return {Model} The compiled model | |||
* @api public | |||
*/ | |||
Connection.prototype.model = function(name, schema, collection) { | |||
if (!(this instanceof Connection)) { | |||
throw new MongooseError('`connection.model()` should not be run with ' + | |||
'`new`. If you are doing `new db.model(foo)(bar)`, use ' + | |||
'`db.model(foo)(bar)` instead'); | |||
} | |||
let fn; | |||
if (typeof name === 'function') { | |||
fn = name; | |||
name = fn.name; | |||
} | |||
// collection name discovery | |||
if (typeof schema === 'string') { | |||
collection = schema; | |||
schema = false; | |||
} | |||
if (utils.isObject(schema) && !schema.instanceOfSchema) { | |||
schema = new Schema(schema); | |||
} | |||
if (schema && !schema.instanceOfSchema) { | |||
throw new Error('The 2nd parameter to `mongoose.model()` should be a ' + | |||
'schema or a POJO'); | |||
} | |||
if (this.models[name] && !collection) { | |||
// model exists but we are not subclassing with custom collection | |||
if (schema && schema.instanceOfSchema && schema !== this.models[name].schema) { | |||
throw new MongooseError.OverwriteModelError(name); | |||
} | |||
return this.models[name]; | |||
} | |||
const opts = {cache: false, connection: this}; | |||
let model; | |||
if (schema && schema.instanceOfSchema) { | |||
// compile a model | |||
model = this.base.model(fn || name, schema, collection, opts); | |||
// only the first model with this name is cached to allow | |||
// for one-offs with custom collection names etc. | |||
if (!this.models[name]) { | |||
this.models[name] = model; | |||
} | |||
// Errors handled internally, so safe to ignore error | |||
model.init(function $modelInitNoop() {}); | |||
return model; | |||
} | |||
if (this.models[name] && collection) { | |||
// subclassing current model with alternate collection | |||
model = this.models[name]; | |||
schema = model.prototype.schema; | |||
const sub = model.__subclass(this, schema, collection); | |||
// do not cache the sub model | |||
return sub; | |||
} | |||
// lookup model in mongoose module | |||
model = this.base.models[name]; | |||
if (!model) { | |||
throw new MongooseError.MissingSchemaError(name); | |||
} | |||
if (this === model.prototype.db | |||
&& (!collection || collection === model.collection.name)) { | |||
// model already uses this connection. | |||
// only the first model with this name is cached to allow | |||
// for one-offs with custom collection names etc. | |||
if (!this.models[name]) { | |||
this.models[name] = model; | |||
} | |||
return model; | |||
} | |||
this.models[name] = model.__subclass(this, schema, collection); | |||
return this.models[name]; | |||
}; | |||
/** | |||
* Removes the model named `name` from this connection, if it exists. You can | |||
* use this function to clean up any models you created in your tests to | |||
* prevent OverwriteModelErrors. | |||
* | |||
* ####Example: | |||
* | |||
* conn.model('User', new Schema({ name: String })); | |||
* console.log(conn.model('User')); // Model object | |||
* conn.deleteModel('User'); | |||
* console.log(conn.model('User')); // undefined | |||
* | |||
* // Usually useful in a Mocha `afterEach()` hook | |||
* afterEach(function() { | |||
* conn.deleteModel(/.+/); // Delete every model | |||
* }); | |||
* | |||
* @api public | |||
* @param {String|RegExp} name if string, the name of the model to remove. If regexp, removes all models whose name matches the regexp. | |||
* @return {Connection} this | |||
*/ | |||
Connection.prototype.deleteModel = function(name) { | |||
if (typeof name === 'string') { | |||
const model = this.model(name); | |||
if (model == null) { | |||
return this; | |||
} | |||
delete this.models[name]; | |||
delete this.collections[model.collection.name]; | |||
delete this.base.modelSchemas[name]; | |||
} else if (name instanceof RegExp) { | |||
const pattern = name; | |||
const names = this.modelNames(); | |||
for (const name of names) { | |||
if (pattern.test(name)) { | |||
this.deleteModel(name); | |||
} | |||
} | |||
} else { | |||
throw new Error('First parameter to `deleteModel()` must be a string ' + | |||
'or regexp, got "' + name + '"'); | |||
} | |||
return this; | |||
}; | |||
/** | |||
* Returns an array of model names created on this connection. | |||
* @api public | |||
* @return {Array} | |||
*/ | |||
Connection.prototype.modelNames = function() { | |||
return Object.keys(this.models); | |||
}; | |||
/** | |||
* @brief Returns if the connection requires authentication after it is opened. Generally if a | |||
* username and password are both provided than authentication is needed, but in some cases a | |||
* password is not required. | |||
* @api private | |||
* @return {Boolean} true if the connection should be authenticated after it is opened, otherwise false. | |||
*/ | |||
Connection.prototype.shouldAuthenticate = function() { | |||
return this.user != null && | |||
(this.pass != null || this.authMechanismDoesNotRequirePassword()); | |||
}; | |||
/** | |||
* @brief Returns a boolean value that specifies if the current authentication mechanism needs a | |||
* password to authenticate according to the auth objects passed into the openUri methods. | |||
* @api private | |||
* @return {Boolean} true if the authentication mechanism specified in the options object requires | |||
* a password, otherwise false. | |||
*/ | |||
Connection.prototype.authMechanismDoesNotRequirePassword = function() { | |||
if (this.options && this.options.auth) { | |||
return noPasswordAuthMechanisms.indexOf(this.options.auth.authMechanism) >= 0; | |||
} | |||
return true; | |||
}; | |||
/** | |||
* @brief Returns a boolean value that specifies if the provided objects object provides enough | |||
* data to authenticate with. Generally this is true if the username and password are both specified | |||
* but in some authentication methods, a password is not required for authentication so only a username | |||
* is required. | |||
* @param {Object} [options] the options object passed into the openUri methods. | |||
* @api private | |||
* @return {Boolean} true if the provided options object provides enough data to authenticate with, | |||
* otherwise false. | |||
*/ | |||
Connection.prototype.optionsProvideAuthenticationData = function(options) { | |||
return (options) && | |||
(options.user) && | |||
((options.pass) || this.authMechanismDoesNotRequirePassword()); | |||
}; | |||
/** | |||
* Switches to a different database using the same connection pool. | |||
* | |||
* Returns a new connection object, with the new db. | |||
* | |||
* @method useDb | |||
* @memberOf Connection | |||
* @param {String} name The database name | |||
* @return {Connection} New Connection Object | |||
* @api public | |||
*/ | |||
/*! | |||
* Module exports. | |||
*/ | |||
Connection.STATES = STATES; | |||
module.exports = Connection; |
@@ -1,26 +0,0 @@ | |||
/*! | |||
* Connection states | |||
*/ | |||
'use strict'; | |||
const STATES = module.exports = exports = Object.create(null); | |||
const disconnected = 'disconnected'; | |||
const connected = 'connected'; | |||
const connecting = 'connecting'; | |||
const disconnecting = 'disconnecting'; | |||
const uninitialized = 'uninitialized'; | |||
STATES[0] = disconnected; | |||
STATES[1] = connected; | |||
STATES[2] = connecting; | |||
STATES[3] = disconnecting; | |||
STATES[99] = uninitialized; | |||
STATES[disconnected] = 0; | |||
STATES[connected] = 1; | |||
STATES[connecting] = 2; | |||
STATES[disconnecting] = 3; | |||
STATES[uninitialized] = 99; |
@@ -1,296 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const Readable = require('stream').Readable; | |||
const eachAsync = require('../helpers/cursor/eachAsync'); | |||
const util = require('util'); | |||
const utils = require('../utils'); | |||
/** | |||
* An AggregationCursor is a concurrency primitive for processing aggregation | |||
* results one document at a time. It is analogous to QueryCursor. | |||
* | |||
* An AggregationCursor fulfills the Node.js streams3 API, | |||
* in addition to several other mechanisms for loading documents from MongoDB | |||
* one at a time. | |||
* | |||
* Creating an AggregationCursor executes the model's pre aggregate hooks, | |||
* but **not** the model's post aggregate hooks. | |||
* | |||
* Unless you're an advanced user, do **not** instantiate this class directly. | |||
* Use [`Aggregate#cursor()`](/docs/api.html#aggregate_Aggregate-cursor) instead. | |||
* | |||
* @param {Aggregate} agg | |||
* @param {Object} options | |||
* @inherits Readable | |||
* @event `cursor`: Emitted when the cursor is created | |||
* @event `error`: Emitted when an error occurred | |||
* @event `data`: Emitted when the stream is flowing and the next doc is ready | |||
* @event `end`: Emitted when the stream is exhausted | |||
* @api public | |||
*/ | |||
function AggregationCursor(agg) { | |||
Readable.call(this, { objectMode: true }); | |||
this.cursor = null; | |||
this.agg = agg; | |||
this._transforms = []; | |||
const model = agg._model; | |||
delete agg.options.cursor.useMongooseAggCursor; | |||
this._mongooseOptions = {}; | |||
_init(model, this, agg); | |||
} | |||
util.inherits(AggregationCursor, Readable); | |||
/*! | |||
* ignore | |||
*/ | |||
function _init(model, c, agg) { | |||
if (!model.collection.buffer) { | |||
model.hooks.execPre('aggregate', agg, function() { | |||
c.cursor = model.collection.aggregate(agg._pipeline, agg.options || {}); | |||
c.emit('cursor', c.cursor); | |||
}); | |||
} else { | |||
model.collection.emitter.once('queue', function() { | |||
model.hooks.execPre('aggregate', agg, function() { | |||
c.cursor = model.collection.aggregate(agg._pipeline, agg.options || {}); | |||
c.emit('cursor', c.cursor); | |||
}); | |||
}); | |||
} | |||
} | |||
/*! | |||
* Necessary to satisfy the Readable API | |||
*/ | |||
AggregationCursor.prototype._read = function() { | |||
const _this = this; | |||
_next(this, function(error, doc) { | |||
if (error) { | |||
return _this.emit('error', error); | |||
} | |||
if (!doc) { | |||
_this.push(null); | |||
_this.cursor.close(function(error) { | |||
if (error) { | |||
return _this.emit('error', error); | |||
} | |||
setTimeout(function() { | |||
_this.emit('close'); | |||
}, 0); | |||
}); | |||
return; | |||
} | |||
_this.push(doc); | |||
}); | |||
}; | |||
/** | |||
* Registers a transform function which subsequently maps documents retrieved | |||
* via the streams interface or `.next()` | |||
* | |||
* ####Example | |||
* | |||
* // Map documents returned by `data` events | |||
* Thing. | |||
* find({ name: /^hello/ }). | |||
* cursor(). | |||
* map(function (doc) { | |||
* doc.foo = "bar"; | |||
* return doc; | |||
* }) | |||
* on('data', function(doc) { console.log(doc.foo); }); | |||
* | |||
* // Or map documents returned by `.next()` | |||
* var cursor = Thing.find({ name: /^hello/ }). | |||
* cursor(). | |||
* map(function (doc) { | |||
* doc.foo = "bar"; | |||
* return doc; | |||
* }); | |||
* cursor.next(function(error, doc) { | |||
* console.log(doc.foo); | |||
* }); | |||
* | |||
* @param {Function} fn | |||
* @return {AggregationCursor} | |||
* @api public | |||
* @method map | |||
*/ | |||
AggregationCursor.prototype.map = function(fn) { | |||
this._transforms.push(fn); | |||
return this; | |||
}; | |||
/*! | |||
* Marks this cursor as errored | |||
*/ | |||
AggregationCursor.prototype._markError = function(error) { | |||
this._error = error; | |||
return this; | |||
}; | |||
/** | |||
* Marks this cursor as closed. Will stop streaming and subsequent calls to | |||
* `next()` will error. | |||
* | |||
* @param {Function} callback | |||
* @return {Promise} | |||
* @api public | |||
* @method close | |||
* @emits close | |||
* @see MongoDB driver cursor#close http://mongodb.github.io/node-mongodb-native/2.1/api/Cursor.html#close | |||
*/ | |||
AggregationCursor.prototype.close = function(callback) { | |||
return utils.promiseOrCallback(callback, cb => { | |||
this.cursor.close(error => { | |||
if (error) { | |||
cb(error); | |||
return this.listeners('error').length > 0 && this.emit('error', error); | |||
} | |||
this.emit('close'); | |||
cb(null); | |||
}); | |||
}); | |||
}; | |||
/** | |||
* Get the next document from this cursor. Will return `null` when there are | |||
* no documents left. | |||
* | |||
* @param {Function} callback | |||
* @return {Promise} | |||
* @api public | |||
* @method next | |||
*/ | |||
AggregationCursor.prototype.next = function(callback) { | |||
return utils.promiseOrCallback(callback, cb => { | |||
_next(this, cb); | |||
}); | |||
}; | |||
/** | |||
* Execute `fn` for every document in the cursor. If `fn` returns a promise, | |||
* will wait for the promise to resolve before iterating on to the next one. | |||
* Returns a promise that resolves when done. | |||
* | |||
* @param {Function} fn | |||
* @param {Object} [options] | |||
* @param {Number} [options.parallel] the number of promises to execute in parallel. Defaults to 1. | |||
* @param {Function} [callback] executed when all docs have been processed | |||
* @return {Promise} | |||
* @api public | |||
* @method eachAsync | |||
*/ | |||
AggregationCursor.prototype.eachAsync = function(fn, opts, callback) { | |||
const _this = this; | |||
if (typeof opts === 'function') { | |||
callback = opts; | |||
opts = {}; | |||
} | |||
opts = opts || {}; | |||
return eachAsync(function(cb) { return _next(_this, cb); }, fn, opts, callback); | |||
}; | |||
/*! | |||
* ignore | |||
*/ | |||
AggregationCursor.prototype.transformNull = function(val) { | |||
if (arguments.length === 0) { | |||
val = true; | |||
} | |||
this._mongooseOptions.transformNull = val; | |||
return this; | |||
}; | |||
/** | |||
* Adds a [cursor flag](http://mongodb.github.io/node-mongodb-native/2.2/api/Cursor.html#addCursorFlag). | |||
* Useful for setting the `noCursorTimeout` and `tailable` flags. | |||
* | |||
* @param {String} flag | |||
* @param {Boolean} value | |||
* @return {AggregationCursor} this | |||
* @api public | |||
* @method addCursorFlag | |||
*/ | |||
AggregationCursor.prototype.addCursorFlag = function(flag, value) { | |||
const _this = this; | |||
_waitForCursor(this, function() { | |||
_this.cursor.addCursorFlag(flag, value); | |||
}); | |||
return this; | |||
}; | |||
/*! | |||
* ignore | |||
*/ | |||
function _waitForCursor(ctx, cb) { | |||
if (ctx.cursor) { | |||
return cb(); | |||
} | |||
ctx.once('cursor', function() { | |||
cb(); | |||
}); | |||
} | |||
/*! | |||
* Get the next doc from the underlying cursor and mongooseify it | |||
* (populate, etc.) | |||
*/ | |||
function _next(ctx, cb) { | |||
let callback = cb; | |||
if (ctx._transforms.length) { | |||
callback = function(err, doc) { | |||
if (err || (doc === null && !ctx._mongooseOptions.transformNull)) { | |||
return cb(err, doc); | |||
} | |||
cb(err, ctx._transforms.reduce(function(doc, fn) { | |||
return fn(doc); | |||
}, doc)); | |||
}; | |||
} | |||
if (ctx._error) { | |||
return process.nextTick(function() { | |||
callback(ctx._error); | |||
}); | |||
} | |||
if (ctx.cursor) { | |||
return ctx.cursor.next(function(error, doc) { | |||
if (error) { | |||
return callback(error); | |||
} | |||
if (!doc) { | |||
return callback(null, null); | |||
} | |||
callback(null, doc); | |||
}); | |||
} else { | |||
ctx.once('cursor', function() { | |||
_next(ctx, cb); | |||
}); | |||
} | |||
} | |||
module.exports = AggregationCursor; |
@@ -1,58 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
const EventEmitter = require('events').EventEmitter; | |||
/*! | |||
* ignore | |||
*/ | |||
class ChangeStream extends EventEmitter { | |||
constructor(model, pipeline, options) { | |||
super(); | |||
this.driverChangeStream = null; | |||
this.closed = false; | |||
// This wrapper is necessary because of buffering. | |||
if (model.collection.buffer) { | |||
model.collection.addQueue(() => { | |||
if (this.closed) { | |||
return; | |||
} | |||
this.driverChangeStream = model.collection.watch(pipeline, options); | |||
this._bindEvents(); | |||
this.emit('ready'); | |||
}); | |||
} else { | |||
this.driverChangeStream = model.collection.watch(pipeline, options); | |||
this._bindEvents(); | |||
this.emit('ready'); | |||
} | |||
} | |||
_bindEvents() { | |||
['close', 'change', 'end', 'error'].forEach(ev => { | |||
this.driverChangeStream.on(ev, data => this.emit(ev, data)); | |||
}); | |||
} | |||
_queue(cb) { | |||
this.once('ready', () => cb()); | |||
} | |||
close() { | |||
this.closed = true; | |||
if (this.driverChangeStream) { | |||
this.driverChangeStream.close(); | |||
} | |||
} | |||
} | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = ChangeStream; |
@@ -1,331 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const Readable = require('stream').Readable; | |||
const eachAsync = require('../helpers/cursor/eachAsync'); | |||
const helpers = require('../queryhelpers'); | |||
const util = require('util'); | |||
const utils = require('../utils'); | |||
/** | |||
* A QueryCursor is a concurrency primitive for processing query results | |||
* one document at a time. A QueryCursor fulfills the Node.js streams3 API, | |||
* in addition to several other mechanisms for loading documents from MongoDB | |||
* one at a time. | |||
* | |||
* QueryCursors execute the model's pre find hooks, but **not** the model's | |||
* post find hooks. | |||
* | |||
* Unless you're an advanced user, do **not** instantiate this class directly. | |||
* Use [`Query#cursor()`](/docs/api.html#query_Query-cursor) instead. | |||
* | |||
* @param {Query} query | |||
* @param {Object} options query options passed to `.find()` | |||
* @inherits Readable | |||
* @event `cursor`: Emitted when the cursor is created | |||
* @event `error`: Emitted when an error occurred | |||
* @event `data`: Emitted when the stream is flowing and the next doc is ready | |||
* @event `end`: Emitted when the stream is exhausted | |||
* @api public | |||
*/ | |||
function QueryCursor(query, options) { | |||
Readable.call(this, { objectMode: true }); | |||
this.cursor = null; | |||
this.query = query; | |||
const _this = this; | |||
const model = query.model; | |||
this._mongooseOptions = {}; | |||
this._transforms = []; | |||
this.model = model; | |||
model.hooks.execPre('find', query, () => { | |||
this._transforms = this._transforms.concat(query._transforms.slice()); | |||
if (options.transform) { | |||
this._transforms.push(options.transform); | |||
} | |||
model.collection.find(query._conditions, options, function(err, cursor) { | |||
if (_this._error) { | |||
cursor.close(function() {}); | |||
_this.listeners('error').length > 0 && _this.emit('error', _this._error); | |||
} | |||
if (err) { | |||
return _this.emit('error', err); | |||
} | |||
_this.cursor = cursor; | |||
_this.emit('cursor', cursor); | |||
}); | |||
}); | |||
} | |||
util.inherits(QueryCursor, Readable); | |||
/*! | |||
* Necessary to satisfy the Readable API | |||
*/ | |||
QueryCursor.prototype._read = function() { | |||
const _this = this; | |||
_next(this, function(error, doc) { | |||
if (error) { | |||
return _this.emit('error', error); | |||
} | |||
if (!doc) { | |||
_this.push(null); | |||
_this.cursor.close(function(error) { | |||
if (error) { | |||
return _this.emit('error', error); | |||
} | |||
setTimeout(function() { | |||
_this.emit('close'); | |||
}, 0); | |||
}); | |||
return; | |||
} | |||
_this.push(doc); | |||
}); | |||
}; | |||
/** | |||
* Registers a transform function which subsequently maps documents retrieved | |||
* via the streams interface or `.next()` | |||
* | |||
* ####Example | |||
* | |||
* // Map documents returned by `data` events | |||
* Thing. | |||
* find({ name: /^hello/ }). | |||
* cursor(). | |||
* map(function (doc) { | |||
* doc.foo = "bar"; | |||
* return doc; | |||
* }) | |||
* on('data', function(doc) { console.log(doc.foo); }); | |||
* | |||
* // Or map documents returned by `.next()` | |||
* var cursor = Thing.find({ name: /^hello/ }). | |||
* cursor(). | |||
* map(function (doc) { | |||
* doc.foo = "bar"; | |||
* return doc; | |||
* }); | |||
* cursor.next(function(error, doc) { | |||
* console.log(doc.foo); | |||
* }); | |||
* | |||
* @param {Function} fn | |||
* @return {QueryCursor} | |||
* @api public | |||
* @method map | |||
*/ | |||
QueryCursor.prototype.map = function(fn) { | |||
this._transforms.push(fn); | |||
return this; | |||
}; | |||
/*! | |||
* Marks this cursor as errored | |||
*/ | |||
QueryCursor.prototype._markError = function(error) { | |||
this._error = error; | |||
return this; | |||
}; | |||
/** | |||
* Marks this cursor as closed. Will stop streaming and subsequent calls to | |||
* `next()` will error. | |||
* | |||
* @param {Function} callback | |||
* @return {Promise} | |||
* @api public | |||
* @method close | |||
* @emits close | |||
* @see MongoDB driver cursor#close http://mongodb.github.io/node-mongodb-native/2.1/api/Cursor.html#close | |||
*/ | |||
QueryCursor.prototype.close = function(callback) { | |||
return utils.promiseOrCallback(callback, cb => { | |||
this.cursor.close(error => { | |||
if (error) { | |||
cb(error); | |||
return this.listeners('error').length > 0 && this.emit('error', error); | |||
} | |||
this.emit('close'); | |||
cb(null); | |||
}); | |||
}, this.model.events); | |||
}; | |||
/** | |||
* Get the next document from this cursor. Will return `null` when there are | |||
* no documents left. | |||
* | |||
* @param {Function} callback | |||
* @return {Promise} | |||
* @api public | |||
* @method next | |||
*/ | |||
QueryCursor.prototype.next = function(callback) { | |||
return utils.promiseOrCallback(callback, cb => { | |||
_next(this, function(error, doc) { | |||
if (error) { | |||
return cb(error); | |||
} | |||
cb(null, doc); | |||
}); | |||
}, this.model.events); | |||
}; | |||
/** | |||
* Execute `fn` for every document in the cursor. If `fn` returns a promise, | |||
* will wait for the promise to resolve before iterating on to the next one. | |||
* Returns a promise that resolves when done. | |||
* | |||
* @param {Function} fn | |||
* @param {Object} [options] | |||
* @param {Number} [options.parallel] the number of promises to execute in parallel. Defaults to 1. | |||
* @param {Function} [callback] executed when all docs have been processed | |||
* @return {Promise} | |||
* @api public | |||
* @method eachAsync | |||
*/ | |||
QueryCursor.prototype.eachAsync = function(fn, opts, callback) { | |||
const _this = this; | |||
if (typeof opts === 'function') { | |||
callback = opts; | |||
opts = {}; | |||
} | |||
opts = opts || {}; | |||
return eachAsync(function(cb) { return _next(_this, cb); }, fn, opts, callback); | |||
}; | |||
/** | |||
* Adds a [cursor flag](http://mongodb.github.io/node-mongodb-native/2.2/api/Cursor.html#addCursorFlag). | |||
* Useful for setting the `noCursorTimeout` and `tailable` flags. | |||
* | |||
* @param {String} flag | |||
* @param {Boolean} value | |||
* @return {AggregationCursor} this | |||
* @api public | |||
* @method addCursorFlag | |||
*/ | |||
QueryCursor.prototype.addCursorFlag = function(flag, value) { | |||
const _this = this; | |||
_waitForCursor(this, function() { | |||
_this.cursor.addCursorFlag(flag, value); | |||
}); | |||
return this; | |||
}; | |||
/*! | |||
* ignore | |||
*/ | |||
QueryCursor.prototype.transformNull = function(val) { | |||
if (arguments.length === 0) { | |||
val = true; | |||
} | |||
this._mongooseOptions.transformNull = val; | |||
return this; | |||
}; | |||
/*! | |||
* Get the next doc from the underlying cursor and mongooseify it | |||
* (populate, etc.) | |||
*/ | |||
function _next(ctx, cb) { | |||
let callback = cb; | |||
if (ctx._transforms.length) { | |||
callback = function(err, doc) { | |||
if (err || (doc === null && !ctx._mongooseOptions.transformNull)) { | |||
return cb(err, doc); | |||
} | |||
cb(err, ctx._transforms.reduce(function(doc, fn) { | |||
return fn.call(ctx, doc); | |||
}, doc)); | |||
}; | |||
} | |||
if (ctx._error) { | |||
return process.nextTick(function() { | |||
callback(ctx._error); | |||
}); | |||
} | |||
if (ctx.cursor) { | |||
return ctx.cursor.next(function(error, doc) { | |||
if (error) { | |||
return callback(error); | |||
} | |||
if (!doc) { | |||
return callback(null, null); | |||
} | |||
const opts = ctx.query._mongooseOptions; | |||
if (!opts.populate) { | |||
return opts.lean ? | |||
callback(null, doc) : | |||
_create(ctx, doc, null, callback); | |||
} | |||
const pop = helpers.preparePopulationOptionsMQ(ctx.query, | |||
ctx.query._mongooseOptions); | |||
pop.__noPromise = true; | |||
ctx.query.model.populate(doc, pop, function(err, doc) { | |||
if (err) { | |||
return callback(err); | |||
} | |||
return opts.lean ? | |||
callback(null, doc) : | |||
_create(ctx, doc, pop, callback); | |||
}); | |||
}); | |||
} else { | |||
ctx.once('cursor', function() { | |||
_next(ctx, cb); | |||
}); | |||
} | |||
} | |||
/*! | |||
* ignore | |||
*/ | |||
function _waitForCursor(ctx, cb) { | |||
if (ctx.cursor) { | |||
return cb(); | |||
} | |||
ctx.once('cursor', function() { | |||
cb(); | |||
}); | |||
} | |||
/*! | |||
* Convert a raw doc into a full mongoose doc. | |||
*/ | |||
function _create(ctx, doc, populatedIds, cb) { | |||
const instance = helpers.createModel(ctx.query.model, doc, ctx.query._fields); | |||
const opts = populatedIds ? | |||
{ populated: populatedIds } : | |||
undefined; | |||
instance.init(doc, opts, function(err) { | |||
if (err) { | |||
return cb(err); | |||
} | |||
cb(null, instance); | |||
}); | |||
} | |||
module.exports = QueryCursor; |
@@ -1,30 +0,0 @@ | |||
'use strict'; | |||
/* eslint-env browser */ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
const Document = require('./document.js'); | |||
const BrowserDocument = require('./browserDocument.js'); | |||
let isBrowser = false; | |||
/** | |||
* Returns the Document constructor for the current context | |||
* | |||
* @api private | |||
*/ | |||
module.exports = function() { | |||
if (isBrowser) { | |||
return BrowserDocument; | |||
} | |||
return Document; | |||
}; | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports.setBrowser = function(flag) { | |||
isBrowser = flag; | |||
}; |
@@ -1,15 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* ignore | |||
*/ | |||
let driver = null; | |||
module.exports.get = function() { | |||
return driver; | |||
}; | |||
module.exports.set = function(v) { | |||
driver = v; | |||
}; |
@@ -1,4 +0,0 @@ | |||
# Driver Spec | |||
TODO |
@@ -1,7 +0,0 @@ | |||
/*! | |||
* ignore | |||
*/ | |||
'use strict'; | |||
module.exports = function() {}; |
@@ -1,14 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const Binary = require('bson').Binary; | |||
/*! | |||
* Module exports. | |||
*/ | |||
module.exports = exports = Binary; |
@@ -1,7 +0,0 @@ | |||
/*! | |||
* ignore | |||
*/ | |||
'use strict'; | |||
module.exports = require('bson').Decimal128; |
@@ -1,13 +0,0 @@ | |||
/*! | |||
* Module exports. | |||
*/ | |||
'use strict'; | |||
exports.Binary = require('./binary'); | |||
exports.Collection = function() { | |||
throw new Error('Cannot create a collection from browser library'); | |||
}; | |||
exports.Decimal128 = require('./decimal128'); | |||
exports.ObjectId = require('./objectid'); | |||
exports.ReadPreference = require('./ReadPreference'); |
@@ -1,28 +0,0 @@ | |||
/*! | |||
* [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) ObjectId | |||
* @constructor NodeMongoDbObjectId | |||
* @see ObjectId | |||
*/ | |||
'use strict'; | |||
const ObjectId = require('bson').ObjectID; | |||
/*! | |||
* Getter for convenience with populate, see gh-6115 | |||
*/ | |||
Object.defineProperty(ObjectId.prototype, '_id', { | |||
enumerable: false, | |||
configurable: true, | |||
get: function() { | |||
return this; | |||
} | |||
}); | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = exports = ObjectId; |
@@ -1,47 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const mongodb = require('mongodb'); | |||
const ReadPref = mongodb.ReadPreference; | |||
/*! | |||
* Converts arguments to ReadPrefs the driver | |||
* can understand. | |||
* | |||
* @param {String|Array} pref | |||
* @param {Array} [tags] | |||
*/ | |||
module.exports = function readPref(pref, tags) { | |||
if (Array.isArray(pref)) { | |||
tags = pref[1]; | |||
pref = pref[0]; | |||
} | |||
if (pref instanceof ReadPref) { | |||
return pref; | |||
} | |||
switch (pref) { | |||
case 'p': | |||
pref = 'primary'; | |||
break; | |||
case 'pp': | |||
pref = 'primaryPreferred'; | |||
break; | |||
case 's': | |||
pref = 'secondary'; | |||
break; | |||
case 'sp': | |||
pref = 'secondaryPreferred'; | |||
break; | |||
case 'n': | |||
pref = 'nearest'; | |||
break; | |||
} | |||
return new ReadPref(pref, tags); | |||
}; |
@@ -1,10 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const Binary = require('mongodb').Binary; | |||
module.exports = exports = Binary; |
@@ -1,181 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const MongooseConnection = require('../../connection'); | |||
const STATES = require('../../connectionstate'); | |||
/** | |||
* A [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) connection implementation. | |||
* | |||
* @inherits Connection | |||
* @api private | |||
*/ | |||
function NativeConnection() { | |||
MongooseConnection.apply(this, arguments); | |||
this._listening = false; | |||
} | |||
/** | |||
* Expose the possible connection states. | |||
* @api public | |||
*/ | |||
NativeConnection.STATES = STATES; | |||
/*! | |||
* Inherits from Connection. | |||
*/ | |||
NativeConnection.prototype.__proto__ = MongooseConnection.prototype; | |||
/** | |||
* Switches to a different database using the same connection pool. | |||
* | |||
* Returns a new connection object, with the new db. | |||
* | |||
* @param {String} name The database name | |||
* @return {Connection} New Connection Object | |||
* @api public | |||
*/ | |||
NativeConnection.prototype.useDb = function(name, options) { | |||
// Return immediately if cached | |||
if (options && options.useCache && this.relatedDbs[name]) { | |||
return this.relatedDbs[name]; | |||
} | |||
// we have to manually copy all of the attributes... | |||
const newConn = new this.constructor(); | |||
newConn.name = name; | |||
newConn.base = this.base; | |||
newConn.collections = {}; | |||
newConn.models = {}; | |||
newConn.replica = this.replica; | |||
newConn.name = this.name; | |||
newConn.options = this.options; | |||
newConn._readyState = this._readyState; | |||
newConn._closeCalled = this._closeCalled; | |||
newConn._hasOpened = this._hasOpened; | |||
newConn._listening = false; | |||
newConn.host = this.host; | |||
newConn.port = this.port; | |||
newConn.user = this.user; | |||
newConn.pass = this.pass; | |||
// First, when we create another db object, we are not guaranteed to have a | |||
// db object to work with. So, in the case where we have a db object and it | |||
// is connected, we can just proceed with setting everything up. However, if | |||
// we do not have a db or the state is not connected, then we need to wait on | |||
// the 'open' event of the connection before doing the rest of the setup | |||
// the 'connected' event is the first time we'll have access to the db object | |||
const _this = this; | |||
newConn.client = _this.client; | |||
if (this.db && this._readyState === STATES.connected) { | |||
wireup(); | |||
} else { | |||
this.once('connected', wireup); | |||
} | |||
function wireup() { | |||
newConn.client = _this.client; | |||
newConn.db = _this.client.db(name); | |||
newConn.onOpen(); | |||
// setup the events appropriately | |||
listen(newConn); | |||
} | |||
newConn.name = name; | |||
// push onto the otherDbs stack, this is used when state changes | |||
this.otherDbs.push(newConn); | |||
newConn.otherDbs.push(this); | |||
// push onto the relatedDbs cache, this is used when state changes | |||
if (options && options.useCache) { | |||
this.relatedDbs[newConn.name] = newConn; | |||
newConn.relatedDbs = this.relatedDbs; | |||
} | |||
return newConn; | |||
}; | |||
/*! | |||
* Register listeners for important events and bubble appropriately. | |||
*/ | |||
function listen(conn) { | |||
if (conn.db._listening) { | |||
return; | |||
} | |||
conn.db._listening = true; | |||
conn.db.on('close', function(force) { | |||
if (conn._closeCalled) return; | |||
// the driver never emits an `open` event. auto_reconnect still | |||
// emits a `close` event but since we never get another | |||
// `open` we can't emit close | |||
if (conn.db.serverConfig.autoReconnect) { | |||
conn.readyState = STATES.disconnected; | |||
conn.emit('close'); | |||
return; | |||
} | |||
conn.onClose(force); | |||
}); | |||
conn.db.on('error', function(err) { | |||
conn.emit('error', err); | |||
}); | |||
conn.db.on('reconnect', function() { | |||
conn.readyState = STATES.connected; | |||
conn.emit('reconnect'); | |||
conn.emit('reconnected'); | |||
conn.onOpen(); | |||
}); | |||
conn.db.on('timeout', function(err) { | |||
conn.emit('timeout', err); | |||
}); | |||
conn.db.on('open', function(err, db) { | |||
if (STATES.disconnected === conn.readyState && db && db.databaseName) { | |||
conn.readyState = STATES.connected; | |||
conn.emit('reconnect'); | |||
conn.emit('reconnected'); | |||
} | |||
}); | |||
conn.db.on('parseError', function(err) { | |||
conn.emit('parseError', err); | |||
}); | |||
} | |||
/** | |||
* Closes the connection | |||
* | |||
* @param {Boolean} [force] | |||
* @param {Function} [fn] | |||
* @return {Connection} this | |||
* @api private | |||
*/ | |||
NativeConnection.prototype.doClose = function(force, fn) { | |||
this.client.close(force, (err, res) => { | |||
// Defer because the driver will wait at least 1ms before finishing closing | |||
// the pool, see https://github.com/mongodb-js/mongodb-core/blob/a8f8e4ce41936babc3b9112bf42d609779f03b39/lib/connection/pool.js#L1026-L1030. | |||
// If there's queued operations, you may still get some background work | |||
// after the callback is called. | |||
setTimeout(() => fn(err, res), 1); | |||
}); | |||
return this; | |||
}; | |||
/*! | |||
* Module exports. | |||
*/ | |||
module.exports = NativeConnection; |
@@ -1,7 +0,0 @@ | |||
/*! | |||
* ignore | |||
*/ | |||
'use strict'; | |||
module.exports = require('mongodb').Decimal128; |
@@ -1,11 +0,0 @@ | |||
/*! | |||
* Module exports. | |||
*/ | |||
'use strict'; | |||
exports.Binary = require('./binary'); | |||
exports.Collection = require('./collection'); | |||
exports.Decimal128 = require('./decimal128'); | |||
exports.ObjectId = require('./objectid'); | |||
exports.ReadPreference = require('./ReadPreference'); |
@@ -1,16 +0,0 @@ | |||
/*! | |||
* [node-mongodb-native](https://github.com/mongodb/node-mongodb-native) ObjectId | |||
* @constructor NodeMongoDbObjectId | |||
* @see ObjectId | |||
*/ | |||
'use strict'; | |||
const ObjectId = require('mongodb').ObjectId; | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = exports = ObjectId; |
@@ -1,38 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const MongooseError = require('./'); | |||
/*! | |||
* MissingSchema Error constructor. | |||
* | |||
* @inherits MongooseError | |||
*/ | |||
function MissingSchemaError() { | |||
const msg = 'Schema hasn\'t been registered for document.\n' | |||
+ 'Use mongoose.Document(name, schema)'; | |||
MongooseError.call(this, msg); | |||
this.name = 'MissingSchemaError'; | |||
if (Error.captureStackTrace) { | |||
Error.captureStackTrace(this); | |||
} else { | |||
this.stack = new Error().stack; | |||
} | |||
} | |||
/*! | |||
* Inherits from MongooseError. | |||
*/ | |||
MissingSchemaError.prototype = Object.create(MongooseError.prototype); | |||
MissingSchemaError.prototype.constructor = MongooseError; | |||
/*! | |||
* exports | |||
*/ | |||
module.exports = MissingSchemaError; |
@@ -1,62 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
const MongooseError = require('./mongooseError'); | |||
const util = require('util'); | |||
/** | |||
* Casting Error constructor. | |||
* | |||
* @param {String} type | |||
* @param {String} value | |||
* @inherits MongooseError | |||
* @api private | |||
*/ | |||
function CastError(type, value, path, reason) { | |||
let stringValue = util.inspect(value); | |||
stringValue = stringValue.replace(/^'/, '"').replace(/'$/, '"'); | |||
if (stringValue.charAt(0) !== '"') { | |||
stringValue = '"' + stringValue + '"'; | |||
} | |||
MongooseError.call(this, 'Cast to ' + type + ' failed for value ' + | |||
stringValue + ' at path "' + path + '"'); | |||
this.name = 'CastError'; | |||
if (Error.captureStackTrace) { | |||
Error.captureStackTrace(this); | |||
} else { | |||
this.stack = new Error().stack; | |||
} | |||
this.stringValue = stringValue; | |||
this.kind = type; | |||
this.value = value; | |||
this.path = path; | |||
this.reason = reason; | |||
} | |||
/*! | |||
* Inherits from MongooseError. | |||
*/ | |||
CastError.prototype = Object.create(MongooseError.prototype); | |||
CastError.prototype.constructor = MongooseError; | |||
/*! | |||
* ignore | |||
*/ | |||
CastError.prototype.setModel = function(model) { | |||
this.model = model; | |||
this.message = 'Cast to ' + this.kind + ' failed for value ' + | |||
this.stringValue + ' at path "' + this.path + '"' + ' for model "' + | |||
model.modelName + '"'; | |||
}; | |||
/*! | |||
* exports | |||
*/ | |||
module.exports = CastError; |
@@ -1,42 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const MongooseError = require('./'); | |||
/** | |||
* Casting Error constructor. | |||
* | |||
* @param {String} type | |||
* @param {String} value | |||
* @inherits MongooseError | |||
* @api private | |||
*/ | |||
function DisconnectedError(connectionString) { | |||
MongooseError.call(this, 'Ran out of retries trying to reconnect to "' + | |||
connectionString + '". Try setting `server.reconnectTries` and ' + | |||
'`server.reconnectInterval` to something higher.'); | |||
this.name = 'DisconnectedError'; | |||
if (Error.captureStackTrace) { | |||
Error.captureStackTrace(this); | |||
} else { | |||
this.stack = new Error().stack; | |||
} | |||
} | |||
/*! | |||
* Inherits from MongooseError. | |||
*/ | |||
DisconnectedError.prototype = Object.create(MongooseError.prototype); | |||
DisconnectedError.prototype.constructor = MongooseError; | |||
/*! | |||
* exports | |||
*/ | |||
module.exports = DisconnectedError; |
@@ -1,48 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const MongooseError = require('./'); | |||
/*! | |||
* DivergentArrayError constructor. | |||
* | |||
* @inherits MongooseError | |||
*/ | |||
function DivergentArrayError(paths) { | |||
const msg = 'For your own good, using `document.save()` to update an array ' | |||
+ 'which was selected using an $elemMatch projection OR ' | |||
+ 'populated using skip, limit, query conditions, or exclusion of ' | |||
+ 'the _id field when the operation results in a $pop or $set of ' | |||
+ 'the entire array is not supported. The following ' | |||
+ 'path(s) would have been modified unsafely:\n' | |||
+ ' ' + paths.join('\n ') + '\n' | |||
+ 'Use Model.update() to update these arrays instead.'; | |||
// TODO write up a docs page (FAQ) and link to it | |||
MongooseError.call(this, msg); | |||
this.name = 'DivergentArrayError'; | |||
if (Error.captureStackTrace) { | |||
Error.captureStackTrace(this); | |||
} else { | |||
this.stack = new Error().stack; | |||
} | |||
} | |||
/*! | |||
* Inherits from MongooseError. | |||
*/ | |||
DivergentArrayError.prototype = Object.create(MongooseError.prototype); | |||
DivergentArrayError.prototype.constructor = MongooseError; | |||
/*! | |||
* exports | |||
*/ | |||
module.exports = DivergentArrayError; |
@@ -1,104 +0,0 @@ | |||
'use strict'; | |||
const MongooseError = require('./mongooseError'); | |||
/*! | |||
* Module exports. | |||
*/ | |||
module.exports = exports = MongooseError; | |||
/** | |||
* The default built-in validator error messages. | |||
* | |||
* @see Error.messages #error_messages_MongooseError-messages | |||
* @api public | |||
*/ | |||
MongooseError.messages = require('./messages'); | |||
// backward compat | |||
MongooseError.Messages = MongooseError.messages; | |||
/** | |||
* An instance of this error class will be returned when `save()` fails | |||
* because the underlying | |||
* document was not found. The constructor takes one parameter, the | |||
* conditions that mongoose passed to `update()` when trying to update | |||
* the document. | |||
* | |||
* @api public | |||
*/ | |||
MongooseError.DocumentNotFoundError = require('./notFound'); | |||
/** | |||
* An instance of this error class will be returned when mongoose failed to | |||
* cast a value. | |||
* | |||
* @api public | |||
*/ | |||
MongooseError.CastError = require('./cast'); | |||
/** | |||
* An instance of this error class will be returned when [validation](/docs/validation.html) failed. | |||
* | |||
* @api public | |||
*/ | |||
MongooseError.ValidationError = require('./validation'); | |||
/** | |||
* A `ValidationError` has a hash of `errors` that contain individual `ValidatorError` instances | |||
* | |||
* @api public | |||
*/ | |||
MongooseError.ValidatorError = require('./validator'); | |||
/** | |||
* An instance of this error class will be returned when you call `save()` after | |||
* the document in the database was changed in a potentially unsafe way. See | |||
* the [`versionKey` option](/docs/guide.html#versionKey) for more information. | |||
* | |||
* @api public | |||
*/ | |||
MongooseError.VersionError = require('./version'); | |||
/** | |||
* An instance of this error class will be returned when you call `save()` multiple | |||
* times on the same document in parallel. See the [FAQ](/docs/faq.html) for more | |||
* information. | |||
* | |||
* @api public | |||
*/ | |||
MongooseError.ParallelSaveError = require('./parallelSave'); | |||
/** | |||
* Thrown when a model with the given name was already registered on the connection. | |||
* See [the FAQ about `OverwriteModelError`](/docs/faq.html#overwrite-model-error). | |||
* | |||
* @api public | |||
*/ | |||
MongooseError.OverwriteModelError = require('./overwriteModel'); | |||
/** | |||
* Thrown when you try to access a model that has not been registered yet | |||
* | |||
* @api public | |||
*/ | |||
MongooseError.MissingSchemaError = require('./missingSchema'); | |||
/** | |||
* An instance of this error will be returned if you used an array projection | |||
* and then modified the array in an unsafe way. | |||
* | |||
* @api public | |||
*/ | |||
MongooseError.DivergentArrayError = require('./divergentArray'); |
@@ -1,46 +0,0 @@ | |||
/** | |||
* The default built-in validator error messages. These may be customized. | |||
* | |||
* // customize within each schema or globally like so | |||
* var mongoose = require('mongoose'); | |||
* mongoose.Error.messages.String.enum = "Your custom message for {PATH}."; | |||
* | |||
* As you might have noticed, error messages support basic templating | |||
* | |||
* - `{PATH}` is replaced with the invalid document path | |||
* - `{VALUE}` is replaced with the invalid value | |||
* - `{TYPE}` is replaced with the validator type such as "regexp", "min", or "user defined" | |||
* - `{MIN}` is replaced with the declared min value for the Number.min validator | |||
* - `{MAX}` is replaced with the declared max value for the Number.max validator | |||
* | |||
* Click the "show code" link below to see all defaults. | |||
* | |||
* @static messages | |||
* @receiver MongooseError | |||
* @api public | |||
*/ | |||
'use strict'; | |||
const msg = module.exports = exports = {}; | |||
msg.DocumentNotFoundError = null; | |||
msg.general = {}; | |||
msg.general.default = 'Validator failed for path `{PATH}` with value `{VALUE}`'; | |||
msg.general.required = 'Path `{PATH}` is required.'; | |||
msg.Number = {}; | |||
msg.Number.min = 'Path `{PATH}` ({VALUE}) is less than minimum allowed value ({MIN}).'; | |||
msg.Number.max = 'Path `{PATH}` ({VALUE}) is more than maximum allowed value ({MAX}).'; | |||
msg.Date = {}; | |||
msg.Date.min = 'Path `{PATH}` ({VALUE}) is before minimum allowed value ({MIN}).'; | |||
msg.Date.max = 'Path `{PATH}` ({VALUE}) is after maximum allowed value ({MAX}).'; | |||
msg.String = {}; | |||
msg.String.enum = '`{VALUE}` is not a valid enum value for path `{PATH}`.'; | |||
msg.String.match = 'Path `{PATH}` is invalid ({VALUE}).'; | |||
msg.String.minlength = 'Path `{PATH}` (`{VALUE}`) is shorter than the minimum allowed length ({MINLENGTH}).'; | |||
msg.String.maxlength = 'Path `{PATH}` (`{VALUE}`) is longer than the maximum allowed length ({MAXLENGTH}).'; |
@@ -1,39 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const MongooseError = require('./'); | |||
/*! | |||
* MissingSchema Error constructor. | |||
* | |||
* @inherits MongooseError | |||
*/ | |||
function MissingSchemaError(name) { | |||
const msg = 'Schema hasn\'t been registered for model "' + name + '".\n' | |||
+ 'Use mongoose.model(name, schema)'; | |||
MongooseError.call(this, msg); | |||
this.name = 'MissingSchemaError'; | |||
if (Error.captureStackTrace) { | |||
Error.captureStackTrace(this); | |||
} else { | |||
this.stack = new Error().stack; | |||
} | |||
} | |||
/*! | |||
* Inherits from MongooseError. | |||
*/ | |||
MissingSchemaError.prototype = Object.create(MongooseError.prototype); | |||
MissingSchemaError.prototype.constructor = MongooseError; | |||
/*! | |||
* exports | |||
*/ | |||
module.exports = MissingSchemaError; |
@@ -1,28 +0,0 @@ | |||
/** | |||
* MongooseError constructor | |||
* | |||
* @param {String} msg Error message | |||
* @inherits Error https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Error | |||
*/ | |||
'use strict'; | |||
function MongooseError(msg) { | |||
Error.call(this); | |||
if (Error.captureStackTrace) { | |||
Error.captureStackTrace(this); | |||
} else { | |||
this.stack = new Error().stack; | |||
} | |||
this.message = msg; | |||
this.name = 'MongooseError'; | |||
} | |||
/*! | |||
* Inherits from Error. | |||
*/ | |||
MongooseError.prototype = Object.create(Error.prototype); | |||
MongooseError.prototype.constructor = Error; | |||
module.exports = MongooseError; |
@@ -1,37 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const MongooseError = require('./'); | |||
/** | |||
* Strict mode error constructor | |||
* | |||
* @param {String} type | |||
* @param {String} value | |||
* @inherits MongooseError | |||
* @api private | |||
*/ | |||
function ObjectExpectedError(path, val) { | |||
MongooseError.call(this, 'Tried to set nested object field `' + path + | |||
'` to primitive value `' + val + '` and strict mode is set to throw.'); | |||
this.name = 'ObjectExpectedError'; | |||
if (Error.captureStackTrace) { | |||
Error.captureStackTrace(this); | |||
} else { | |||
this.stack = new Error().stack; | |||
} | |||
this.path = path; | |||
} | |||
/*! | |||
* Inherits from MongooseError. | |||
*/ | |||
ObjectExpectedError.prototype = Object.create(MongooseError.prototype); | |||
ObjectExpectedError.prototype.constructor = MongooseError; | |||
module.exports = ObjectExpectedError; |
@@ -1,38 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const MongooseError = require('./'); | |||
/** | |||
* Constructor for errors that happen when a parameter that's expected to be | |||
* an object isn't an object | |||
* | |||
* @param {Any} value | |||
* @param {String} paramName | |||
* @param {String} fnName | |||
* @inherits MongooseError | |||
* @api private | |||
*/ | |||
function ObjectParameterError(value, paramName, fnName) { | |||
MongooseError.call(this, 'Parameter "' + paramName + '" to ' + fnName + | |||
'() must be an object, got ' + value.toString()); | |||
this.name = 'ObjectParameterError'; | |||
if (Error.captureStackTrace) { | |||
Error.captureStackTrace(this); | |||
} else { | |||
this.stack = new Error().stack; | |||
} | |||
} | |||
/*! | |||
* Inherits from MongooseError. | |||
*/ | |||
ObjectParameterError.prototype = Object.create(MongooseError.prototype); | |||
ObjectParameterError.prototype.constructor = MongooseError; | |||
module.exports = ObjectParameterError; |
@@ -1,37 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const MongooseError = require('./'); | |||
/*! | |||
* OverwriteModel Error constructor. | |||
* | |||
* @inherits MongooseError | |||
*/ | |||
function OverwriteModelError(name) { | |||
MongooseError.call(this, 'Cannot overwrite `' + name + '` model once compiled.'); | |||
this.name = 'OverwriteModelError'; | |||
if (Error.captureStackTrace) { | |||
Error.captureStackTrace(this); | |||
} else { | |||
this.stack = new Error().stack; | |||
} | |||
} | |||
/*! | |||
* Inherits from MongooseError. | |||
*/ | |||
OverwriteModelError.prototype = Object.create(MongooseError.prototype); | |||
OverwriteModelError.prototype.constructor = MongooseError; | |||
/*! | |||
* exports | |||
*/ | |||
module.exports = OverwriteModelError; |
@@ -1,33 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
const MongooseError = require('./'); | |||
/** | |||
* ParallelSave Error constructor. | |||
* | |||
* @inherits MongooseError | |||
* @api private | |||
*/ | |||
function ParallelSaveError(doc) { | |||
const msg = 'Can\'t save() the same doc multiple times in parallel. Document: '; | |||
MongooseError.call(this, msg + doc.id); | |||
this.name = 'ParallelSaveError'; | |||
} | |||
/*! | |||
* Inherits from MongooseError. | |||
*/ | |||
ParallelSaveError.prototype = Object.create(MongooseError.prototype); | |||
ParallelSaveError.prototype.constructor = MongooseError; | |||
/*! | |||
* exports | |||
*/ | |||
module.exports = ParallelSaveError; |
@@ -1,38 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const MongooseError = require('./'); | |||
/** | |||
* Strict mode error constructor | |||
* | |||
* @param {String} type | |||
* @param {String} value | |||
* @inherits MongooseError | |||
* @api private | |||
*/ | |||
function StrictModeError(path, msg) { | |||
msg = msg || 'Field `' + path + '` is not in schema and strict ' + | |||
'mode is set to throw.'; | |||
MongooseError.call(this, msg); | |||
this.name = 'StrictModeError'; | |||
if (Error.captureStackTrace) { | |||
Error.captureStackTrace(this); | |||
} else { | |||
this.stack = new Error().stack; | |||
} | |||
this.path = path; | |||
} | |||
/*! | |||
* Inherits from MongooseError. | |||
*/ | |||
StrictModeError.prototype = Object.create(MongooseError.prototype); | |||
StrictModeError.prototype.constructor = MongooseError; | |||
module.exports = StrictModeError; |
@@ -1,112 +0,0 @@ | |||
/*! | |||
* Module requirements | |||
*/ | |||
'use strict'; | |||
const MongooseError = require('./'); | |||
const util = require('util'); | |||
/** | |||
* Document Validation Error | |||
* | |||
* @api private | |||
* @param {Document} instance | |||
* @inherits MongooseError | |||
*/ | |||
function ValidationError(instance) { | |||
this.errors = {}; | |||
this._message = ''; | |||
if (instance && instance.constructor.name === 'model') { | |||
this._message = instance.constructor.modelName + ' validation failed'; | |||
MongooseError.call(this, this._message); | |||
} else { | |||
this._message = 'Validation failed'; | |||
MongooseError.call(this, this._message); | |||
} | |||
this.name = 'ValidationError'; | |||
if (Error.captureStackTrace) { | |||
Error.captureStackTrace(this); | |||
} else { | |||
this.stack = new Error().stack; | |||
} | |||
if (instance) { | |||
instance.errors = this.errors; | |||
} | |||
} | |||
/*! | |||
* Inherits from MongooseError. | |||
*/ | |||
ValidationError.prototype = Object.create(MongooseError.prototype); | |||
ValidationError.prototype.constructor = MongooseError; | |||
/** | |||
* Console.log helper | |||
*/ | |||
ValidationError.prototype.toString = function() { | |||
return this.name + ': ' + _generateMessage(this); | |||
}; | |||
/*! | |||
* inspect helper | |||
*/ | |||
ValidationError.prototype.inspect = function() { | |||
return Object.assign(new Error(this.message), this); | |||
}; | |||
if (util.inspect.custom) { | |||
/*! | |||
* Avoid Node deprecation warning DEP0079 | |||
*/ | |||
ValidationError.prototype[util.inspect.custom] = ValidationError.prototype.inspect; | |||
} | |||
/*! | |||
* Helper for JSON.stringify | |||
*/ | |||
ValidationError.prototype.toJSON = function() { | |||
return Object.assign({}, this, { message: this.message }); | |||
}; | |||
/*! | |||
* add message | |||
*/ | |||
ValidationError.prototype.addError = function(path, error) { | |||
this.errors[path] = error; | |||
this.message = this._message + ': ' + _generateMessage(this); | |||
}; | |||
/*! | |||
* ignore | |||
*/ | |||
function _generateMessage(err) { | |||
const keys = Object.keys(err.errors || {}); | |||
const len = keys.length; | |||
const msgs = []; | |||
let key; | |||
for (let i = 0; i < len; ++i) { | |||
key = keys[i]; | |||
if (err === err.errors[key]) { | |||
continue; | |||
} | |||
msgs.push(key + ': ' + err.errors[key].message); | |||
} | |||
return msgs.join(', '); | |||
} | |||
/*! | |||
* Module exports | |||
*/ | |||
module.exports = exports = ValidationError; |
@@ -1,89 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const MongooseError = require('./'); | |||
/** | |||
* Schema validator error | |||
* | |||
* @param {Object} properties | |||
* @inherits MongooseError | |||
* @api private | |||
*/ | |||
function ValidatorError(properties) { | |||
let msg = properties.message; | |||
if (!msg) { | |||
msg = MongooseError.messages.general.default; | |||
} | |||
const message = this.formatMessage(msg, properties); | |||
MongooseError.call(this, message); | |||
properties = Object.assign({}, properties, { message: message }); | |||
this.name = 'ValidatorError'; | |||
if (Error.captureStackTrace) { | |||
Error.captureStackTrace(this); | |||
} else { | |||
this.stack = new Error().stack; | |||
} | |||
this.properties = properties; | |||
this.kind = properties.type; | |||
this.path = properties.path; | |||
this.value = properties.value; | |||
this.reason = properties.reason; | |||
} | |||
/*! | |||
* Inherits from MongooseError | |||
*/ | |||
ValidatorError.prototype = Object.create(MongooseError.prototype); | |||
ValidatorError.prototype.constructor = MongooseError; | |||
/*! | |||
* The object used to define this validator. Not enumerable to hide | |||
* it from `require('util').inspect()` output re: gh-3925 | |||
*/ | |||
Object.defineProperty(ValidatorError.prototype, 'properties', { | |||
enumerable: false, | |||
writable: true, | |||
value: null | |||
}); | |||
/*! | |||
* Formats error messages | |||
*/ | |||
ValidatorError.prototype.formatMessage = function(msg, properties) { | |||
if (typeof msg === 'function') { | |||
return msg(properties); | |||
} | |||
const propertyNames = Object.keys(properties); | |||
for (let i = 0; i < propertyNames.length; ++i) { | |||
const propertyName = propertyNames[i]; | |||
if (propertyName === 'message') { | |||
continue; | |||
} | |||
msg = msg.replace('{' + propertyName.toUpperCase() + '}', properties[propertyName]); | |||
} | |||
return msg; | |||
}; | |||
/*! | |||
* toString helper | |||
*/ | |||
ValidatorError.prototype.toString = function() { | |||
return this.message; | |||
}; | |||
/*! | |||
* exports | |||
*/ | |||
module.exports = ValidatorError; |
@@ -1,36 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
const MongooseError = require('./'); | |||
/** | |||
* Version Error constructor. | |||
* | |||
* @inherits MongooseError | |||
* @api private | |||
*/ | |||
function VersionError(doc, currentVersion, modifiedPaths) { | |||
const modifiedPathsStr = modifiedPaths.join(', '); | |||
MongooseError.call(this, 'No matching document found for id "' + doc._id + | |||
'" version ' + currentVersion + ' modifiedPaths "' + modifiedPathsStr + '"'); | |||
this.name = 'VersionError'; | |||
this.version = currentVersion; | |||
this.modifiedPaths = modifiedPaths; | |||
} | |||
/*! | |||
* Inherits from MongooseError. | |||
*/ | |||
VersionError.prototype = Object.create(MongooseError.prototype); | |||
VersionError.prototype.constructor = MongooseError; | |||
/*! | |||
* exports | |||
*/ | |||
module.exports = VersionError; |
@@ -1,87 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
const ObjectId = require('../types/objectid'); | |||
const utils = require('../utils'); | |||
exports.flatten = flatten; | |||
exports.modifiedPaths = modifiedPaths; | |||
/*! | |||
* ignore | |||
*/ | |||
function flatten(update, path, options) { | |||
let keys; | |||
if (update && utils.isMongooseObject(update) && !Buffer.isBuffer(update)) { | |||
keys = Object.keys(update.toObject({ transform: false, virtuals: false })); | |||
} else { | |||
keys = Object.keys(update || {}); | |||
} | |||
const numKeys = keys.length; | |||
const result = {}; | |||
path = path ? path + '.' : ''; | |||
for (let i = 0; i < numKeys; ++i) { | |||
const key = keys[i]; | |||
const val = update[key]; | |||
result[path + key] = val; | |||
if (shouldFlatten(val)) { | |||
if (options && options.skipArrays && Array.isArray(val)) { | |||
continue; | |||
} | |||
const flat = flatten(val, path + key, options); | |||
for (const k in flat) { | |||
result[k] = flat[k]; | |||
} | |||
if (Array.isArray(val)) { | |||
result[path + key] = val; | |||
} | |||
} | |||
} | |||
return result; | |||
} | |||
/*! | |||
* ignore | |||
*/ | |||
function modifiedPaths(update, path, result) { | |||
const keys = Object.keys(update || {}); | |||
const numKeys = keys.length; | |||
result = result || {}; | |||
path = path ? path + '.' : ''; | |||
for (let i = 0; i < numKeys; ++i) { | |||
const key = keys[i]; | |||
let val = update[key]; | |||
result[path + key] = true; | |||
if (utils.isMongooseObject(val) && !Buffer.isBuffer(val)) { | |||
val = val.toObject({ transform: false, virtuals: false }); | |||
} | |||
if (shouldFlatten(val)) { | |||
modifiedPaths(val, path + key, result); | |||
} | |||
} | |||
return result; | |||
} | |||
/*! | |||
* ignore | |||
*/ | |||
function shouldFlatten(val) { | |||
return val && | |||
typeof val === 'object' && | |||
!(val instanceof Date) && | |||
!(val instanceof ObjectId) && | |||
(!Array.isArray(val) || val.length > 0) && | |||
!(val instanceof Buffer); | |||
} |
@@ -1,71 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
const async = require('async'); | |||
const utils = require('../../utils'); | |||
/** | |||
* Execute `fn` for every document in the cursor. If `fn` returns a promise, | |||
* will wait for the promise to resolve before iterating on to the next one. | |||
* Returns a promise that resolves when done. | |||
* | |||
* @param {Function} next the thunk to call to get the next document | |||
* @param {Function} fn | |||
* @param {Object} options | |||
* @param {Function} [callback] executed when all docs have been processed | |||
* @return {Promise} | |||
* @api public | |||
* @method eachAsync | |||
*/ | |||
module.exports = function eachAsync(next, fn, options, callback) { | |||
const parallel = options.parallel || 1; | |||
const handleNextResult = function(doc, callback) { | |||
const promise = fn(doc); | |||
if (promise && typeof promise.then === 'function') { | |||
promise.then( | |||
function() { callback(null); }, | |||
function(error) { callback(error || new Error('`eachAsync()` promise rejected without error')); }); | |||
} else { | |||
callback(null); | |||
} | |||
}; | |||
const iterate = function(callback) { | |||
let drained = false; | |||
const nextQueue = async.queue(function(task, cb) { | |||
if (drained) return cb(); | |||
next(function(err, doc) { | |||
if (err) return cb(err); | |||
if (!doc) drained = true; | |||
cb(null, doc); | |||
}); | |||
}, 1); | |||
const getAndRun = function(cb) { | |||
nextQueue.push({}, function(err, doc) { | |||
if (err) return cb(err); | |||
if (!doc) return cb(); | |||
handleNextResult(doc, function(err) { | |||
if (err) return cb(err); | |||
// Make sure to clear the stack re: gh-4697 | |||
setTimeout(function() { | |||
getAndRun(cb); | |||
}, 0); | |||
}); | |||
}); | |||
}; | |||
async.times(parallel, function(n, cb) { | |||
getAndRun(cb); | |||
}, callback); | |||
}; | |||
return utils.promiseOrCallback(callback, cb => { | |||
iterate(cb); | |||
}); | |||
}; |
@@ -1,25 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = function cleanModifiedSubpaths(doc, path, options) { | |||
options = options || {}; | |||
const skipDocArrays = options.skipDocArrays; | |||
let deleted = 0; | |||
for (const modifiedPath of Object.keys(doc.$__.activePaths.states.modify)) { | |||
if (skipDocArrays) { | |||
const schemaType = doc.schema.path(modifiedPath); | |||
if (schemaType && schemaType.$isMongooseDocumentArray) { | |||
continue; | |||
} | |||
} | |||
if (modifiedPath.indexOf(path + '.') === 0) { | |||
delete doc.$__.activePaths.states.modify[modifiedPath]; | |||
++deleted; | |||
} | |||
} | |||
return deleted; | |||
}; |
@@ -1,148 +0,0 @@ | |||
'use strict'; | |||
const get = require('../../helpers/get'); | |||
const getSymbol = require('../../helpers/symbols').getSymbol; | |||
const utils = require('../../utils'); | |||
let Document; | |||
/*! | |||
* exports | |||
*/ | |||
exports.compile = compile; | |||
exports.defineKey = defineKey; | |||
/*! | |||
* Compiles schemas. | |||
*/ | |||
function compile(tree, proto, prefix, options) { | |||
Document = Document || require('../../document'); | |||
const keys = Object.keys(tree); | |||
const len = keys.length; | |||
let limb; | |||
let key; | |||
for (let i = 0; i < len; ++i) { | |||
key = keys[i]; | |||
limb = tree[key]; | |||
const hasSubprops = utils.getFunctionName(limb.constructor) === 'Object' && | |||
Object.keys(limb).length && | |||
(!limb[options.typeKey] || (options.typeKey === 'type' && limb.type.type)); | |||
const subprops = hasSubprops ? limb : null; | |||
defineKey(key, subprops, proto, prefix, keys, options); | |||
} | |||
} | |||
/*! | |||
* Defines the accessor named prop on the incoming prototype. | |||
*/ | |||
function defineKey(prop, subprops, prototype, prefix, keys, options) { | |||
Document = Document || require('../../document'); | |||
const path = (prefix ? prefix + '.' : '') + prop; | |||
prefix = prefix || ''; | |||
if (subprops) { | |||
Object.defineProperty(prototype, prop, { | |||
enumerable: true, | |||
configurable: true, | |||
get: function() { | |||
const _this = this; | |||
if (!this.$__.getters) { | |||
this.$__.getters = {}; | |||
} | |||
if (!this.$__.getters[path]) { | |||
const nested = Object.create(Document.prototype, getOwnPropertyDescriptors(this)); | |||
// save scope for nested getters/setters | |||
if (!prefix) { | |||
nested.$__.scope = this; | |||
} | |||
nested.$__.nestedPath = path; | |||
Object.defineProperty(nested, 'schema', { | |||
enumerable: false, | |||
configurable: true, | |||
writable: false, | |||
value: prototype.schema | |||
}); | |||
Object.defineProperty(nested, 'toObject', { | |||
enumerable: false, | |||
configurable: true, | |||
writable: false, | |||
value: function() { | |||
return utils.clone(_this.get(path, null, { | |||
virtuals: get(this, 'schema.options.toObject.virtuals', null) | |||
})); | |||
} | |||
}); | |||
Object.defineProperty(nested, 'toJSON', { | |||
enumerable: false, | |||
configurable: true, | |||
writable: false, | |||
value: function() { | |||
return _this.get(path, null, { | |||
virtuals: get(_this, 'schema.options.toJSON.virtuals', null) | |||
}); | |||
} | |||
}); | |||
Object.defineProperty(nested, '$__isNested', { | |||
enumerable: false, | |||
configurable: true, | |||
writable: false, | |||
value: true | |||
}); | |||
compile(subprops, nested, path, options); | |||
this.$__.getters[path] = nested; | |||
} | |||
return this.$__.getters[path]; | |||
}, | |||
set: function(v) { | |||
if (v instanceof Document) { | |||
v = v.toObject({ transform: false }); | |||
} | |||
const doc = this.$__.scope || this; | |||
return doc.$set(path, v); | |||
} | |||
}); | |||
} else { | |||
Object.defineProperty(prototype, prop, { | |||
enumerable: true, | |||
configurable: true, | |||
get: function() { | |||
return this[getSymbol].call(this.$__.scope || this, path); | |||
}, | |||
set: function(v) { | |||
return this.$set.call(this.$__.scope || this, path, v); | |||
} | |||
}); | |||
} | |||
} | |||
// gets descriptors for all properties of `object` | |||
// makes all properties non-enumerable to match previous behavior to #2211 | |||
function getOwnPropertyDescriptors(object) { | |||
const result = {}; | |||
Object.getOwnPropertyNames(object).forEach(function(key) { | |||
result[key] = Object.getOwnPropertyDescriptor(object, key); | |||
// Assume these are schema paths, ignore them re: #5470 | |||
if (result[key].get) { | |||
delete result[key]; | |||
return; | |||
} | |||
result[key].enumerable = ['isNew', '$__', 'errors', '_doc'].indexOf(key) === -1; | |||
}); | |||
return result; | |||
} |
@@ -1,43 +0,0 @@ | |||
'use strict'; | |||
const get = require('../get'); | |||
/*! | |||
* Like `schema.path()`, except with a document, because impossible to | |||
* determine path type without knowing the embedded discriminator key. | |||
*/ | |||
module.exports = function getEmbeddedDiscriminatorPath(doc, path, options) { | |||
options = options || {}; | |||
const typeOnly = options.typeOnly; | |||
const parts = path.split('.'); | |||
let schema = null; | |||
let type = 'adhocOrUndefined'; | |||
for (let i = 0; i < parts.length; ++i) { | |||
const subpath = parts.slice(0, i + 1).join('.'); | |||
schema = doc.schema.path(subpath); | |||
if (schema == null) { | |||
continue; | |||
} | |||
type = doc.schema.pathType(subpath); | |||
if ((schema.$isSingleNested || schema.$isMongooseDocumentArrayElement) && | |||
schema.schema.discriminators != null) { | |||
const discriminators = schema.schema.discriminators; | |||
const discriminatorKey = doc.get(subpath + '.' + | |||
get(schema, 'schema.options.discriminatorKey')); | |||
if (discriminatorKey == null || discriminators[discriminatorKey] == null) { | |||
continue; | |||
} | |||
const rest = parts.slice(i + 1).join('.'); | |||
schema = discriminators[discriminatorKey].path(rest); | |||
if (schema != null) { | |||
type = discriminators[discriminatorKey].pathType(rest); | |||
break; | |||
} | |||
} | |||
} | |||
// Are we getting the whole schema or just the type, 'real', 'nested', etc. | |||
return typeOnly ? type : schema; | |||
}; |
@@ -1,39 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Simplified lodash.get to work around the annoying null quirk. See: | |||
* https://github.com/lodash/lodash/issues/3659 | |||
*/ | |||
module.exports = function get(obj, path, def) { | |||
const parts = path.split('.'); | |||
let rest = path; | |||
let cur = obj; | |||
for (const part of parts) { | |||
if (cur == null) { | |||
return def; | |||
} | |||
// `lib/cast.js` depends on being able to get dotted paths in updates, | |||
// like `{ $set: { 'a.b': 42 } }` | |||
if (cur[rest] != null) { | |||
return cur[rest]; | |||
} | |||
cur = getProperty(cur, part); | |||
rest = rest.substr(part.length + 1); | |||
} | |||
return cur == null ? def : cur; | |||
}; | |||
function getProperty(obj, prop) { | |||
if (obj == null) { | |||
return obj; | |||
} | |||
if (obj instanceof Map) { | |||
return obj.get(prop); | |||
} | |||
return obj[prop]; | |||
} |
@@ -1,12 +0,0 @@ | |||
/*! | |||
* Centralize this so we can more easily work around issues with people | |||
* stubbing out `process.nextTick()` in tests using sinon: | |||
* https://github.com/sinonjs/lolex#automatically-incrementing-mocked-time | |||
* See gh-6074 | |||
*/ | |||
'use strict'; | |||
module.exports = function immediate(cb) { | |||
return process.nextTick(cb); | |||
}; |
@@ -1,53 +0,0 @@ | |||
'use strict'; | |||
const get = require('../get'); | |||
/*! | |||
* Register methods for this model | |||
* | |||
* @param {Model} model | |||
* @param {Schema} schema | |||
*/ | |||
module.exports = function applyMethods(model, schema) { | |||
function apply(method, schema) { | |||
Object.defineProperty(model.prototype, method, { | |||
get: function() { | |||
const h = {}; | |||
for (const k in schema.methods[method]) { | |||
h[k] = schema.methods[method][k].bind(this); | |||
} | |||
return h; | |||
}, | |||
configurable: true | |||
}); | |||
} | |||
for (const method of Object.keys(schema.methods)) { | |||
const fn = schema.methods[method]; | |||
if (schema.tree.hasOwnProperty(method)) { | |||
throw new Error('You have a method and a property in your schema both ' + | |||
'named "' + method + '"'); | |||
} | |||
if (schema.reserved[method] && | |||
!get(schema, `methodOptions.${method}.suppressWarning`, false)) { | |||
console.warn(`mongoose: the method name "${method}" is used by mongoose ` + | |||
'internally, overwriting it may cause bugs. If you\'re sure you know ' + | |||
'what you\'re doing, you can suppress this error by using ' + | |||
`\`schema.method('${method}', fn, { suppressWarning: true })\`.`); | |||
} | |||
if (typeof fn === 'function') { | |||
model.prototype[method] = fn; | |||
} else { | |||
apply(method, schema); | |||
} | |||
} | |||
// Recursively call `applyMethods()` on child schemas | |||
model.$appliedMethods = true; | |||
for (let i = 0; i < schema.childSchemas.length; ++i) { | |||
if (schema.childSchemas[i].model.$appliedMethods) { | |||
continue; | |||
} | |||
applyMethods(schema.childSchemas[i].model, schema.childSchemas[i].schema); | |||
} | |||
}; |
@@ -1,12 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Register statics for this model | |||
* @param {Model} model | |||
* @param {Schema} schema | |||
*/ | |||
module.exports = function applyStatics(model, schema) { | |||
for (const i in schema.statics) { | |||
model[i] = schema.statics[i]; | |||
} | |||
}; |
@@ -1,136 +0,0 @@ | |||
'use strict'; | |||
const applyTimestampsToChildren = require('../update/applyTimestampsToChildren'); | |||
const applyTimestampsToUpdate = require('../update/applyTimestampsToUpdate'); | |||
const cast = require('../../cast'); | |||
const castUpdate = require('../query/castUpdate'); | |||
const setDefaultsOnInsert = require('../setDefaultsOnInsert'); | |||
/*! | |||
* Given a model and a bulkWrite op, return a thunk that handles casting and | |||
* validating the individual op. | |||
*/ | |||
module.exports = function castBulkWrite(model, op) { | |||
const now = model.base.now(); | |||
if (op['insertOne']) { | |||
return (callback) => { | |||
const doc = new model(op['insertOne']['document']); | |||
if (model.schema.options.timestamps != null) { | |||
doc.initializeTimestamps(); | |||
} | |||
op['insertOne']['document'] = doc; | |||
op['insertOne']['document'].validate({ __noPromise: true }, function(error) { | |||
if (error) { | |||
return callback(error, null); | |||
} | |||
callback(null); | |||
}); | |||
}; | |||
} else if (op['updateOne']) { | |||
op = op['updateOne']; | |||
return (callback) => { | |||
try { | |||
op['filter'] = cast(model.schema, op['filter']); | |||
op['update'] = castUpdate(model.schema, op['update'], { | |||
strict: model.schema.options.strict, | |||
overwrite: false | |||
}); | |||
if (op.setDefaultsOnInsert) { | |||
setDefaultsOnInsert(op['filter'], model.schema, op['update'], { | |||
setDefaultsOnInsert: true, | |||
upsert: op.upsert | |||
}); | |||
} | |||
if (model.schema.$timestamps != null) { | |||
const createdAt = model.schema.$timestamps.createdAt; | |||
const updatedAt = model.schema.$timestamps.updatedAt; | |||
applyTimestampsToUpdate(now, createdAt, updatedAt, op['update'], {}); | |||
} | |||
applyTimestampsToChildren(now, op['update'], model.schema); | |||
} catch (error) { | |||
return callback(error, null); | |||
} | |||
callback(null); | |||
}; | |||
} else if (op['updateMany']) { | |||
op = op['updateMany']; | |||
return (callback) => { | |||
try { | |||
op['filter'] = cast(model.schema, op['filter']); | |||
op['update'] = castUpdate(model.schema, op['update'], { | |||
strict: model.schema.options.strict, | |||
overwrite: false | |||
}); | |||
if (op.setDefaultsOnInsert) { | |||
setDefaultsOnInsert(op['filter'], model.schema, op['update'], { | |||
setDefaultsOnInsert: true, | |||
upsert: op.upsert | |||
}); | |||
} | |||
if (model.schema.$timestamps != null) { | |||
const createdAt = model.schema.$timestamps.createdAt; | |||
const updatedAt = model.schema.$timestamps.updatedAt; | |||
applyTimestampsToUpdate(now, createdAt, updatedAt, op['update'], {}); | |||
} | |||
applyTimestampsToChildren(now, op['update'], model.schema); | |||
} catch (error) { | |||
return callback(error, null); | |||
} | |||
callback(null); | |||
}; | |||
} else if (op['replaceOne']) { | |||
return (callback) => { | |||
try { | |||
op['replaceOne']['filter'] = cast(model.schema, | |||
op['replaceOne']['filter']); | |||
} catch (error) { | |||
return callback(error, null); | |||
} | |||
// set `skipId`, otherwise we get "_id field cannot be changed" | |||
const doc = new model(op['replaceOne']['replacement'], null, true); | |||
if (model.schema.options.timestamps != null) { | |||
doc.initializeTimestamps(); | |||
} | |||
op['replaceOne']['replacement'] = doc; | |||
op['replaceOne']['replacement'].validate({ __noPromise: true }, function(error) { | |||
if (error) { | |||
return callback(error, null); | |||
} | |||
callback(null); | |||
}); | |||
}; | |||
} else if (op['deleteOne']) { | |||
return (callback) => { | |||
try { | |||
op['deleteOne']['filter'] = cast(model.schema, | |||
op['deleteOne']['filter']); | |||
} catch (error) { | |||
return callback(error, null); | |||
} | |||
callback(null); | |||
}; | |||
} else if (op['deleteMany']) { | |||
return (callback) => { | |||
try { | |||
op['deleteMany']['filter'] = cast(model.schema, | |||
op['deleteMany']['filter']); | |||
} catch (error) { | |||
return callback(error, null); | |||
} | |||
callback(null); | |||
}; | |||
} else { | |||
return (callback) => { | |||
callback(new Error('Invalid op passed to `bulkWrite()`'), null); | |||
}; | |||
} | |||
}; |
@@ -1,12 +0,0 @@ | |||
'use strict'; | |||
module.exports = function once(fn) { | |||
let called = false; | |||
return function() { | |||
if (called) { | |||
return; | |||
} | |||
called = true; | |||
return fn.apply(null, arguments); | |||
}; | |||
}; |
@@ -1,85 +0,0 @@ | |||
'use strict'; | |||
module.exports = assignRawDocsToIdStructure; | |||
/*! | |||
* Assign `vals` returned by mongo query to the `rawIds` | |||
* structure returned from utils.getVals() honoring | |||
* query sort order if specified by user. | |||
* | |||
* This can be optimized. | |||
* | |||
* Rules: | |||
* | |||
* if the value of the path is not an array, use findOne rules, else find. | |||
* for findOne the results are assigned directly to doc path (including null results). | |||
* for find, if user specified sort order, results are assigned directly | |||
* else documents are put back in original order of array if found in results | |||
* | |||
* @param {Array} rawIds | |||
* @param {Array} vals | |||
* @param {Boolean} sort | |||
* @api private | |||
*/ | |||
function assignRawDocsToIdStructure(rawIds, resultDocs, resultOrder, options, recursed) { | |||
// honor user specified sort order | |||
const newOrder = []; | |||
const sorting = options.sort && rawIds.length > 1; | |||
let doc; | |||
let sid; | |||
let id; | |||
for (let i = 0; i < rawIds.length; ++i) { | |||
id = rawIds[i]; | |||
if (Array.isArray(id)) { | |||
// handle [ [id0, id2], [id3] ] | |||
assignRawDocsToIdStructure(id, resultDocs, resultOrder, options, true); | |||
newOrder.push(id); | |||
continue; | |||
} | |||
if (id === null && !sorting) { | |||
// keep nulls for findOne unless sorting, which always | |||
// removes them (backward compat) | |||
newOrder.push(id); | |||
continue; | |||
} | |||
sid = String(id); | |||
doc = resultDocs[sid]; | |||
// If user wants separate copies of same doc, use this option | |||
if (options.clone) { | |||
doc = doc.constructor.hydrate(doc._doc); | |||
} | |||
if (recursed) { | |||
if (doc) { | |||
if (sorting) { | |||
newOrder[resultOrder[sid]] = doc; | |||
} else { | |||
newOrder.push(doc); | |||
} | |||
} else { | |||
newOrder.push(id); | |||
} | |||
} else { | |||
// apply findOne behavior - if document in results, assign, else assign null | |||
newOrder[i] = doc || null; | |||
} | |||
} | |||
rawIds.length = 0; | |||
if (newOrder.length) { | |||
// reassign the documents based on corrected order | |||
// forEach skips over sparse entries in arrays so we | |||
// can safely use this to our advantage dealing with sorted | |||
// result sets too. | |||
newOrder.forEach(function(doc, i) { | |||
rawIds[i] = doc; | |||
}); | |||
} | |||
} |
@@ -1,181 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* ignore | |||
*/ | |||
const Mixed = require('../../schema/mixed'); | |||
const get = require('../get'); | |||
const leanPopulateMap = require('./leanPopulateMap'); | |||
const mpath = require('mpath'); | |||
/*! | |||
* @param {Schema} schema | |||
* @param {Object} doc POJO | |||
* @param {string} path | |||
*/ | |||
module.exports = function getSchemaTypes(schema, doc, path) { | |||
const pathschema = schema.path(path); | |||
const topLevelDoc = doc; | |||
if (pathschema) { | |||
return pathschema; | |||
} | |||
function search(parts, schema, subdoc, nestedPath) { | |||
let p = parts.length + 1; | |||
let foundschema; | |||
let trypath; | |||
while (p--) { | |||
trypath = parts.slice(0, p).join('.'); | |||
foundschema = schema.path(trypath); | |||
if (foundschema == null) { | |||
continue; | |||
} | |||
if (foundschema.caster) { | |||
// array of Mixed? | |||
if (foundschema.caster instanceof Mixed) { | |||
return foundschema.caster; | |||
} | |||
let schemas = null; | |||
if (doc != null && foundschema.schema != null && foundschema.schema.discriminators != null) { | |||
const discriminators = foundschema.schema.discriminators; | |||
const discriminatorKeyPath = trypath + '.' + | |||
foundschema.schema.options.discriminatorKey; | |||
const keys = subdoc ? mpath.get(discriminatorKeyPath, subdoc) || [] : []; | |||
schemas = Object.keys(discriminators). | |||
reduce(function(cur, discriminator) { | |||
if (keys.indexOf(discriminator) !== -1) { | |||
cur.push(discriminators[discriminator]); | |||
} | |||
return cur; | |||
}, []); | |||
} | |||
// Now that we found the array, we need to check if there | |||
// are remaining document paths to look up for casting. | |||
// Also we need to handle array.$.path since schema.path | |||
// doesn't work for that. | |||
// If there is no foundschema.schema we are dealing with | |||
// a path like array.$ | |||
if (p !== parts.length && foundschema.schema) { | |||
let ret; | |||
if (parts[p] === '$') { | |||
if (p + 1 === parts.length) { | |||
// comments.$ | |||
return foundschema; | |||
} | |||
// comments.$.comments.$.title | |||
ret = search( | |||
parts.slice(p + 1), | |||
schema, | |||
subdoc ? mpath.get(trypath, subdoc) : null, | |||
nestedPath.concat(parts.slice(0, p)) | |||
); | |||
if (ret) { | |||
ret.$isUnderneathDocArray = ret.$isUnderneathDocArray || | |||
!foundschema.schema.$isSingleNested; | |||
} | |||
return ret; | |||
} | |||
if (schemas != null && schemas.length > 0) { | |||
ret = []; | |||
for (let i = 0; i < schemas.length; ++i) { | |||
const _ret = search( | |||
parts.slice(p), | |||
schemas[i], | |||
subdoc ? mpath.get(trypath, subdoc) : null, | |||
nestedPath.concat(parts.slice(0, p)) | |||
); | |||
if (_ret != null) { | |||
_ret.$isUnderneathDocArray = _ret.$isUnderneathDocArray || | |||
!foundschema.schema.$isSingleNested; | |||
if (_ret.$isUnderneathDocArray) { | |||
ret.$isUnderneathDocArray = true; | |||
} | |||
ret.push(_ret); | |||
} | |||
} | |||
return ret; | |||
} else { | |||
ret = search( | |||
parts.slice(p), | |||
foundschema.schema, | |||
subdoc ? mpath.get(trypath, subdoc) : null, | |||
nestedPath.concat(parts.slice(0, p)) | |||
); | |||
if (ret) { | |||
ret.$isUnderneathDocArray = ret.$isUnderneathDocArray || | |||
!foundschema.schema.$isSingleNested; | |||
} | |||
return ret; | |||
} | |||
} | |||
} | |||
const fullPath = nestedPath.concat([trypath]).join('.'); | |||
if (topLevelDoc.$__ && topLevelDoc.populated(fullPath) && p < parts.length) { | |||
const schema = get(doc.$__.populated[fullPath], 'options.model.schema'); | |||
if (schema != null) { | |||
const ret = search( | |||
parts.slice(p), | |||
schema, | |||
subdoc ? mpath.get(trypath, subdoc) : null, | |||
nestedPath.concat(parts.slice(0, p)) | |||
); | |||
if (ret) { | |||
ret.$isUnderneathDocArray = ret.$isUnderneathDocArray || | |||
!schema.$isSingleNested; | |||
} | |||
return ret; | |||
} | |||
} | |||
const _val = get(topLevelDoc, trypath); | |||
if (_val != null) { | |||
const model = Array.isArray(_val) && _val.length > 0 ? | |||
leanPopulateMap.get(_val[0]) : | |||
leanPopulateMap.get(_val); | |||
// Populated using lean, `leanPopulateMap` value is the foreign model | |||
const schema = model != null ? model.schema : null; | |||
if (schema != null) { | |||
const ret = search( | |||
parts.slice(p), | |||
schema, | |||
subdoc ? mpath.get(trypath, subdoc) : null, | |||
nestedPath.concat(parts.slice(0, p)) | |||
); | |||
if (ret) { | |||
ret.$isUnderneathDocArray = ret.$isUnderneathDocArray || | |||
!schema.$isSingleNested; | |||
} | |||
return ret; | |||
} | |||
} | |||
return foundschema; | |||
} | |||
} | |||
// look for arrays | |||
const parts = path.split('.'); | |||
for (let i = 0; i < parts.length; ++i) { | |||
if (parts[i] === '$') { | |||
// Re: gh-5628, because `schema.path()` doesn't take $ into account. | |||
parts[i] = '0'; | |||
} | |||
} | |||
return search(parts, schema, doc, []); | |||
}; |
@@ -1,61 +0,0 @@ | |||
'use strict'; | |||
module.exports = getVirtual; | |||
/*! | |||
* ignore | |||
*/ | |||
function getVirtual(schema, name) { | |||
if (schema.virtuals[name]) { | |||
return schema.virtuals[name]; | |||
} | |||
const parts = name.split('.'); | |||
let cur = ''; | |||
let nestedSchemaPath = ''; | |||
for (let i = 0; i < parts.length; ++i) { | |||
cur += (cur.length > 0 ? '.' : '') + parts[i]; | |||
if (schema.virtuals[cur]) { | |||
if (i === parts.length - 1) { | |||
schema.virtuals[cur].$nestedSchemaPath = nestedSchemaPath; | |||
return schema.virtuals[cur]; | |||
} | |||
continue; | |||
} | |||
if (schema.nested[cur]) { | |||
continue; | |||
} | |||
if (schema.paths[cur] && schema.paths[cur].schema) { | |||
schema = schema.paths[cur].schema; | |||
const rest = parts.slice(i + 1).join('.'); | |||
if (schema.virtuals[rest]) { | |||
if (i === parts.length - 2) { | |||
schema.virtuals[rest].$nestedSchemaPath = | |||
[nestedSchemaPath, cur].filter(v => !!v).join('.'); | |||
return schema.virtuals[rest]; | |||
} | |||
continue; | |||
} | |||
if (i + 1 < parts.length && schema.discriminators) { | |||
for (const key of Object.keys(schema.discriminators)) { | |||
const _virtual = getVirtual(schema.discriminators[key], rest); | |||
if (_virtual != null) { | |||
_virtual.$nestedSchemaPath = [nestedSchemaPath, cur]. | |||
filter(v => !!v).join('.'); | |||
return _virtual; | |||
} | |||
} | |||
} | |||
nestedSchemaPath += (nestedSchemaPath.length > 0 ? '.' : '') + cur; | |||
cur = ''; | |||
continue; | |||
} | |||
return null; | |||
} | |||
} |
@@ -1,7 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = new WeakMap(); |
@@ -1,45 +0,0 @@ | |||
'use strict'; | |||
module.exports = function normalizeRefPath(refPath, doc, populatedPath) { | |||
if (refPath == null) { | |||
return refPath; | |||
} | |||
if (typeof refPath === 'function') { | |||
refPath = refPath.call(doc, doc, populatedPath); | |||
} | |||
// If populated path has numerics, the end `refPath` should too. For example, | |||
// if populating `a.0.b` instead of `a.b` and `b` has `refPath = a.c`, we | |||
// should return `a.0.c` for the refPath. | |||
const hasNumericProp = /(\.\d+$|\.\d+\.)/g; | |||
if (hasNumericProp.test(populatedPath)) { | |||
const chunks = populatedPath.split(hasNumericProp); | |||
if (chunks[chunks.length - 1] === '') { | |||
throw new Error('Can\'t populate individual element in an array'); | |||
} | |||
let _refPath = ''; | |||
let _remaining = refPath; | |||
// 2nd, 4th, etc. will be numeric props. For example: `[ 'a', '.0.', 'b' ]` | |||
for (let i = 0; i < chunks.length; i += 2) { | |||
const chunk = chunks[i]; | |||
if (_remaining.startsWith(chunk + '.')) { | |||
_refPath += _remaining.substr(0, chunk.length) + chunks[i + 1]; | |||
_remaining = _remaining.substr(chunk.length + 1); | |||
} else if (i === chunks.length - 1) { | |||
_refPath += _remaining; | |||
_remaining = ''; | |||
break; | |||
} else { | |||
throw new Error('Could not normalize ref path, chunk ' + chunk + ' not in populated path'); | |||
} | |||
} | |||
return _refPath; | |||
} | |||
return refPath; | |||
}; |
@@ -1,19 +0,0 @@ | |||
'use strict'; | |||
const MongooseError = require('../../error/mongooseError'); | |||
const util = require('util'); | |||
module.exports = validateRef; | |||
function validateRef(ref, path) { | |||
if (typeof ref === 'string') { | |||
return; | |||
} | |||
if (typeof ref === 'function') { | |||
return; | |||
} | |||
throw new MongooseError('Invalid ref at path "' + path + '". Got ' + | |||
util.inspect(ref, { depth: 0 })); | |||
} |
@@ -1,8 +0,0 @@ | |||
'use strict'; | |||
if (typeof jest !== 'undefined' && typeof window !== 'undefined') { | |||
console.warn('Mongoose: looks like you\'re trying to test a Mongoose app ' + | |||
'with Jest\'s default jsdom test environment. Please make sure you read ' + | |||
'Mongoose\'s docs on configuring Jest to test Node.js apps: ' + | |||
'http://mongoosejs.com/docs/jest.html'); | |||
} |
@@ -1,18 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = function isDefiningProjection(val) { | |||
if (val == null) { | |||
// `undefined` or `null` become exclusive projections | |||
return true; | |||
} | |||
if (typeof val === 'object') { | |||
// Only cases where a value does **not** define whether the whole projection | |||
// is inclusive or exclusive are `$meta` and `$slice`. | |||
return !('$meta' in val) && !('$slice' in val); | |||
} | |||
return true; | |||
}; |
@@ -1,28 +0,0 @@ | |||
'use strict'; | |||
const isDefiningProjection = require('./isDefiningProjection'); | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = function isExclusive(projection) { | |||
const keys = Object.keys(projection); | |||
let ki = keys.length; | |||
let exclude = null; | |||
if (ki === 1 && keys[0] === '_id') { | |||
exclude = !!projection[keys[ki]]; | |||
} else { | |||
while (ki--) { | |||
// Does this projection explicitly define inclusion/exclusion? | |||
// Explicitly avoid `$meta` and `$slice` | |||
if (keys[ki] !== '_id' && isDefiningProjection(projection[keys[ki]])) { | |||
exclude = !projection[keys[ki]]; | |||
break; | |||
} | |||
} | |||
} | |||
return exclude; | |||
}; |
@@ -1,34 +0,0 @@ | |||
'use strict'; | |||
const isDefiningProjection = require('./isDefiningProjection'); | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = function isInclusive(projection) { | |||
if (projection == null) { | |||
return false; | |||
} | |||
const props = Object.keys(projection); | |||
const numProps = props.length; | |||
if (numProps === 0) { | |||
return false; | |||
} | |||
for (let i = 0; i < numProps; ++i) { | |||
const prop = props[i]; | |||
// Plus paths can't define the projection (see gh-7050) | |||
if (prop.charAt(0) === '+') { | |||
continue; | |||
} | |||
// If field is truthy (1, true, etc.) and not an object, then this | |||
// projection must be inclusive. If object, assume its $meta, $slice, etc. | |||
if (isDefiningProjection(projection[prop]) && !!projection[prop]) { | |||
return true; | |||
} | |||
} | |||
return false; | |||
}; |
@@ -1,35 +0,0 @@ | |||
'use strict'; | |||
const isDefiningProjection = require('./isDefiningProjection'); | |||
/*! | |||
* Determines if `path` is excluded by `projection` | |||
* | |||
* @param {Object} projection | |||
* @param {string} path | |||
* @return {Boolean} | |||
*/ | |||
module.exports = function isPathExcluded(projection, path) { | |||
if (path === '_id') { | |||
return projection._id === 0; | |||
} | |||
const paths = Object.keys(projection); | |||
let type = null; | |||
for (const _path of paths) { | |||
if (isDefiningProjection(projection[_path])) { | |||
type = projection[path] === 1 ? 'inclusive' : 'exclusive'; | |||
break; | |||
} | |||
} | |||
if (type === 'inclusive') { | |||
return projection[path] !== 1; | |||
} | |||
if (type === 'exclusive') { | |||
return projection[path] === 0; | |||
} | |||
return false; | |||
}; |
@@ -1,28 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = function isPathSelectedInclusive(fields, path) { | |||
const chunks = path.split('.'); | |||
let cur = ''; | |||
let j; | |||
let keys; | |||
let numKeys; | |||
for (let i = 0; i < chunks.length; ++i) { | |||
cur += cur.length ? '.' : '' + chunks[i]; | |||
if (fields[cur]) { | |||
keys = Object.keys(fields); | |||
numKeys = keys.length; | |||
for (j = 0; j < numKeys; ++j) { | |||
if (keys[i].indexOf(cur + '.') === 0 && keys[i].indexOf(path) !== 0) { | |||
continue; | |||
} | |||
} | |||
return true; | |||
} | |||
} | |||
return false; | |||
}; |
@@ -1,66 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = applyQueryMiddleware; | |||
/*! | |||
* ignore | |||
*/ | |||
applyQueryMiddleware.middlewareFunctions = [ | |||
'count', | |||
'countDocuments', | |||
'deleteMany', | |||
'deleteOne', | |||
'estimatedDocumentCount', | |||
'find', | |||
'findOne', | |||
'findOneAndDelete', | |||
'findOneAndRemove', | |||
'findOneAndReplace', | |||
'findOneAndUpdate', | |||
'remove', | |||
'replaceOne', | |||
'update', | |||
'updateMany', | |||
'updateOne' | |||
]; | |||
/*! | |||
* Apply query middleware | |||
* | |||
* @param {Query} query constructor | |||
* @param {Model} model | |||
*/ | |||
function applyQueryMiddleware(Query, model) { | |||
const kareemOptions = { | |||
useErrorHandlers: true, | |||
numCallbackParams: 1, | |||
nullResultByDefault: true | |||
}; | |||
const middleware = model.hooks.filter(hook => { | |||
if (hook.name === 'updateOne') { | |||
return hook.query == null || !!hook.query; | |||
} | |||
if (hook.name === 'remove') { | |||
return !!hook.query; | |||
} | |||
return true; | |||
}); | |||
// `update()` thunk has a different name because `_update` was already taken | |||
Query.prototype._execUpdate = middleware.createWrapper('update', | |||
Query.prototype._execUpdate, null, kareemOptions); | |||
applyQueryMiddleware.middlewareFunctions. | |||
filter(v => v !== 'update'). | |||
forEach(fn => { | |||
Query.prototype[`_${fn}`] = middleware.createWrapper(fn, | |||
Query.prototype[`_${fn}`], null, kareemOptions); | |||
}); | |||
} |
@@ -1,428 +0,0 @@ | |||
'use strict'; | |||
const CastError = require('../../error/cast'); | |||
const StrictModeError = require('../../error/strict'); | |||
const ValidationError = require('../../error/validation'); | |||
const castNumber = require('../../cast/number'); | |||
const getEmbeddedDiscriminatorPath = require('./getEmbeddedDiscriminatorPath'); | |||
const utils = require('../../utils'); | |||
/*! | |||
* Casts an update op based on the given schema | |||
* | |||
* @param {Schema} schema | |||
* @param {Object} obj | |||
* @param {Object} options | |||
* @param {Boolean} [options.overwrite] defaults to false | |||
* @param {Boolean|String} [options.strict] defaults to true | |||
* @param {Query} context passed to setters | |||
* @return {Boolean} true iff the update is non-empty | |||
*/ | |||
module.exports = function castUpdate(schema, obj, options, context, filter) { | |||
if (!obj) { | |||
return undefined; | |||
} | |||
const ops = Object.keys(obj); | |||
let i = ops.length; | |||
const ret = {}; | |||
let hasKeys; | |||
let val; | |||
let hasDollarKey = false; | |||
const overwrite = options.overwrite; | |||
filter = filter || {}; | |||
while (i--) { | |||
const op = ops[i]; | |||
// if overwrite is set, don't do any of the special $set stuff | |||
if (op[0] !== '$' && !overwrite) { | |||
// fix up $set sugar | |||
if (!ret.$set) { | |||
if (obj.$set) { | |||
ret.$set = obj.$set; | |||
} else { | |||
ret.$set = {}; | |||
} | |||
} | |||
ret.$set[op] = obj[op]; | |||
ops.splice(i, 1); | |||
if (!~ops.indexOf('$set')) ops.push('$set'); | |||
} else if (op === '$set') { | |||
if (!ret.$set) { | |||
ret[op] = obj[op]; | |||
} | |||
} else { | |||
ret[op] = obj[op]; | |||
} | |||
} | |||
// cast each value | |||
i = ops.length; | |||
// if we get passed {} for the update, we still need to respect that when it | |||
// is an overwrite scenario | |||
if (overwrite) { | |||
hasKeys = true; | |||
} | |||
while (i--) { | |||
const op = ops[i]; | |||
val = ret[op]; | |||
hasDollarKey = hasDollarKey || op.charAt(0) === '$'; | |||
if (val && | |||
typeof val === 'object' && | |||
!Buffer.isBuffer(val) && | |||
(!overwrite || hasDollarKey)) { | |||
hasKeys |= walkUpdatePath(schema, val, op, options, context, filter); | |||
} else if (overwrite && ret && typeof ret === 'object') { | |||
// if we are just using overwrite, cast the query and then we will | |||
// *always* return the value, even if it is an empty object. We need to | |||
// set hasKeys above because we need to account for the case where the | |||
// user passes {} and wants to clobber the whole document | |||
// Also, _walkUpdatePath expects an operation, so give it $set since that | |||
// is basically what we're doing | |||
walkUpdatePath(schema, ret, '$set', options, context, filter); | |||
} else { | |||
const msg = 'Invalid atomic update value for ' + op + '. ' | |||
+ 'Expected an object, received ' + typeof val; | |||
throw new Error(msg); | |||
} | |||
} | |||
return hasKeys && ret; | |||
}; | |||
/*! | |||
* Walk each path of obj and cast its values | |||
* according to its schema. | |||
* | |||
* @param {Schema} schema | |||
* @param {Object} obj - part of a query | |||
* @param {String} op - the atomic operator ($pull, $set, etc) | |||
* @param {Object} options | |||
* @param {Boolean|String} [options.strict] | |||
* @param {Boolean} [options.omitUndefined] | |||
* @param {Query} context | |||
* @param {String} pref - path prefix (internal only) | |||
* @return {Bool} true if this path has keys to update | |||
* @api private | |||
*/ | |||
function walkUpdatePath(schema, obj, op, options, context, filter, pref) { | |||
const strict = options.strict; | |||
const prefix = pref ? pref + '.' : ''; | |||
const keys = Object.keys(obj); | |||
let i = keys.length; | |||
let hasKeys = false; | |||
let schematype; | |||
let key; | |||
let val; | |||
let aggregatedError = null; | |||
let useNestedStrict; | |||
if (options.useNestedStrict === undefined) { | |||
useNestedStrict = schema.options.useNestedStrict; | |||
} else { | |||
useNestedStrict = options.useNestedStrict; | |||
} | |||
while (i--) { | |||
key = keys[i]; | |||
val = obj[key]; | |||
if (val && val.constructor.name === 'Object') { | |||
// watch for embedded doc schemas | |||
schematype = schema._getSchema(prefix + key); | |||
if (schematype && schematype.caster && op in castOps) { | |||
// embedded doc schema | |||
if ('$each' in val) { | |||
hasKeys = true; | |||
try { | |||
obj[key] = { | |||
$each: castUpdateVal(schematype, val.$each, op, key, context, prefix + key) | |||
}; | |||
} catch (error) { | |||
aggregatedError = _handleCastError(error, context, key, aggregatedError); | |||
} | |||
if (val.$slice != null) { | |||
obj[key].$slice = val.$slice | 0; | |||
} | |||
if (val.$sort) { | |||
obj[key].$sort = val.$sort; | |||
} | |||
if (!!val.$position || val.$position === 0) { | |||
obj[key].$position = val.$position; | |||
} | |||
} else { | |||
try { | |||
obj[key] = castUpdateVal(schematype, val, op, key, context, prefix + key); | |||
} catch (error) { | |||
aggregatedError = _handleCastError(error, context, key, aggregatedError); | |||
} | |||
if (options.omitUndefined && obj[key] === void 0) { | |||
delete obj[key]; | |||
continue; | |||
} | |||
hasKeys = true; | |||
} | |||
} else if ((op === '$currentDate') || (op in castOps && schematype)) { | |||
// $currentDate can take an object | |||
try { | |||
obj[key] = castUpdateVal(schematype, val, op, key, context, prefix + key); | |||
} catch (error) { | |||
aggregatedError = _handleCastError(error, context, key, aggregatedError); | |||
} | |||
if (options.omitUndefined && obj[key] === void 0) { | |||
delete obj[key]; | |||
continue; | |||
} | |||
hasKeys = true; | |||
} else { | |||
const pathToCheck = (prefix + key); | |||
const v = schema._getPathType(pathToCheck); | |||
let _strict = strict; | |||
if (useNestedStrict && | |||
v && | |||
v.schema && | |||
'strict' in v.schema.options) { | |||
_strict = v.schema.options.strict; | |||
} | |||
if (v.pathType === 'undefined') { | |||
if (_strict === 'throw') { | |||
throw new StrictModeError(pathToCheck); | |||
} else if (_strict) { | |||
delete obj[key]; | |||
continue; | |||
} | |||
} | |||
// gh-2314 | |||
// we should be able to set a schema-less field | |||
// to an empty object literal | |||
hasKeys |= walkUpdatePath(schema, val, op, options, context, filter, prefix + key) || | |||
(utils.isObject(val) && Object.keys(val).length === 0); | |||
} | |||
} else { | |||
const checkPath = (key === '$each' || key === '$or' || key === '$and' || key === '$in') ? | |||
pref : prefix + key; | |||
schematype = schema._getSchema(checkPath); | |||
let pathDetails = schema._getPathType(checkPath); | |||
// If no schema type, check for embedded discriminators | |||
if (schematype == null) { | |||
const _res = getEmbeddedDiscriminatorPath(schema, obj, filter, checkPath); | |||
if (_res.schematype != null) { | |||
schematype = _res.schematype; | |||
pathDetails = _res.type; | |||
} | |||
} | |||
let isStrict = strict; | |||
if (useNestedStrict && | |||
pathDetails && | |||
pathDetails.schema && | |||
'strict' in pathDetails.schema.options) { | |||
isStrict = pathDetails.schema.options.strict; | |||
} | |||
const skip = isStrict && | |||
!schematype && | |||
!/real|nested/.test(pathDetails.pathType); | |||
if (skip) { | |||
// Even if strict is `throw`, avoid throwing an error because of | |||
// virtuals because of #6731 | |||
if (isStrict === 'throw' && schema.virtuals[checkPath] == null) { | |||
throw new StrictModeError(prefix + key); | |||
} else { | |||
delete obj[key]; | |||
} | |||
} else { | |||
// gh-1845 temporary fix: ignore $rename. See gh-3027 for tracking | |||
// improving this. | |||
if (op === '$rename') { | |||
hasKeys = true; | |||
continue; | |||
} | |||
try { | |||
obj[key] = castUpdateVal(schematype, val, op, key, context, prefix + key); | |||
} catch (error) { | |||
aggregatedError = _handleCastError(error, context, key, aggregatedError); | |||
} | |||
if (Array.isArray(obj[key]) && (op === '$addToSet' || op === '$push') && key !== '$each') { | |||
if (schematype && schematype.caster && !schematype.caster.$isMongooseArray) { | |||
obj[key] = { $each: obj[key] }; | |||
} | |||
} | |||
if (options.omitUndefined && obj[key] === void 0) { | |||
delete obj[key]; | |||
continue; | |||
} | |||
hasKeys = true; | |||
} | |||
} | |||
} | |||
if (aggregatedError != null) { | |||
throw aggregatedError; | |||
} | |||
return hasKeys; | |||
} | |||
/*! | |||
* ignore | |||
*/ | |||
function _handleCastError(error, query, key, aggregatedError) { | |||
if (typeof query !== 'object' || !query.options.multipleCastError) { | |||
throw error; | |||
} | |||
aggregatedError = aggregatedError || new ValidationError(); | |||
aggregatedError.addError(key, error); | |||
return aggregatedError; | |||
} | |||
/*! | |||
* These operators should be cast to numbers instead | |||
* of their path schema type. | |||
*/ | |||
const numberOps = { | |||
$pop: 1, | |||
$inc: 1 | |||
}; | |||
/*! | |||
* These ops require no casting because the RHS doesn't do anything. | |||
*/ | |||
const noCastOps = { | |||
$unset: 1 | |||
}; | |||
/*! | |||
* These operators require casting docs | |||
* to real Documents for Update operations. | |||
*/ | |||
const castOps = { | |||
$push: 1, | |||
$addToSet: 1, | |||
$set: 1, | |||
$setOnInsert: 1 | |||
}; | |||
/*! | |||
* ignore | |||
*/ | |||
const overwriteOps = { | |||
$set: 1, | |||
$setOnInsert: 1 | |||
}; | |||
/*! | |||
* Casts `val` according to `schema` and atomic `op`. | |||
* | |||
* @param {SchemaType} schema | |||
* @param {Object} val | |||
* @param {String} op - the atomic operator ($pull, $set, etc) | |||
* @param {String} $conditional | |||
* @param {Query} context | |||
* @api private | |||
*/ | |||
function castUpdateVal(schema, val, op, $conditional, context, path) { | |||
if (!schema) { | |||
// non-existing schema path | |||
if (op in numberOps) { | |||
try { | |||
return castNumber(val); | |||
} catch (err) { | |||
throw new CastError('number', val, path); | |||
} | |||
} | |||
return val; | |||
} | |||
const cond = schema.caster && op in castOps && | |||
(utils.isObject(val) || Array.isArray(val)); | |||
if (cond && op !== '$set') { | |||
// Cast values for ops that add data to MongoDB. | |||
// Ensures embedded documents get ObjectIds etc. | |||
const tmp = schema.cast(val); | |||
if (Array.isArray(val)) { | |||
val = tmp; | |||
} else if (Array.isArray(tmp)) { | |||
val = tmp[0]; | |||
} else { | |||
val = tmp; | |||
} | |||
return val; | |||
} else if (cond && op === '$set') { | |||
return schema.cast(val); | |||
} | |||
if (op in noCastOps) { | |||
return val; | |||
} | |||
if (op in numberOps) { | |||
// Null and undefined not allowed for $pop, $inc | |||
if (val == null) { | |||
throw new CastError('number', val, schema.path); | |||
} | |||
if (op === '$inc') { | |||
// Support `$inc` with long, int32, etc. (gh-4283) | |||
return schema.castForQueryWrapper({ | |||
val: val, | |||
context: context | |||
}); | |||
} | |||
try { | |||
return castNumber(val); | |||
} catch (error) { | |||
throw new CastError('number', val, schema.path); | |||
} | |||
} | |||
if (op === '$currentDate') { | |||
if (typeof val === 'object') { | |||
return {$type: val.$type}; | |||
} | |||
return Boolean(val); | |||
} | |||
if (/^\$/.test($conditional)) { | |||
return schema.castForQueryWrapper({ | |||
$conditional: $conditional, | |||
val: val, | |||
context: context | |||
}); | |||
} | |||
if (overwriteOps[op]) { | |||
return schema.castForQueryWrapper({ | |||
val: val, | |||
context: context, | |||
$skipQueryCastForUpdate: val != null && schema.$isMongooseArray && schema.$parentSchema | |||
}); | |||
} | |||
return schema.castForQueryWrapper({ val: val, context: context }); | |||
} |
@@ -1,47 +0,0 @@ | |||
'use strict'; | |||
const helpers = require('../../queryhelpers'); | |||
module.exports = completeMany; | |||
/*! | |||
* Given a model and an array of docs, hydrates all the docs to be instances | |||
* of the model. Used to initialize docs returned from the db from `find()` | |||
* | |||
* @param {Model} model | |||
* @param {Array} docs | |||
* @param {Object} fields the projection used, including `select` from schemas | |||
* @param {Object} userProvidedFields the user-specified projection | |||
* @param {Object} opts | |||
* @param {Array} [opts.populated] | |||
* @param {ClientSession} [opts.session] | |||
* @param {Function} callback | |||
*/ | |||
function completeMany(model, docs, fields, userProvidedFields, opts, callback) { | |||
const arr = []; | |||
let count = docs.length; | |||
const len = count; | |||
let error = null; | |||
function init(_error) { | |||
if (_error != null) { | |||
error = error || _error; | |||
} | |||
if (error != null) { | |||
--count || process.nextTick(() => callback(error)); | |||
return; | |||
} | |||
--count || process.nextTick(() => callback(error, arr)); | |||
} | |||
for (let i = 0; i < len; ++i) { | |||
arr[i] = helpers.createModel(model, docs[i], fields, userProvidedFields); | |||
try { | |||
arr[i].init(docs[i], opts, init); | |||
} catch (error) { | |||
init(error); | |||
} | |||
arr[i].$session(opts.session); | |||
} | |||
} |
@@ -1,53 +0,0 @@ | |||
'use strict'; | |||
const get = require('../get'); | |||
/*! | |||
* Like `schema.path()`, except with a document, because impossible to | |||
* determine path type without knowing the embedded discriminator key. | |||
*/ | |||
module.exports = function getEmbeddedDiscriminatorPath(schema, update, filter, path) { | |||
const parts = path.split('.'); | |||
let schematype = null; | |||
let type = 'adhocOrUndefined'; | |||
filter = filter || {}; | |||
update = update || {}; | |||
for (let i = 0; i < parts.length; ++i) { | |||
const subpath = parts.slice(0, i + 1).join('.'). | |||
replace(/\.\$\./i, '.0.').replace(/\.\$$/, '.0'); | |||
schematype = schema.path(subpath); | |||
if (schematype == null) { | |||
continue; | |||
} | |||
type = schema.pathType(subpath); | |||
if ((schematype.$isSingleNested || schematype.$isMongooseDocumentArrayElement) && | |||
schematype.schema.discriminators != null) { | |||
const discriminators = schematype.schema.discriminators; | |||
const discriminatorValuePath = subpath + '.' + | |||
get(schematype, 'schema.options.discriminatorKey'); | |||
const discriminatorFilterPath = | |||
discriminatorValuePath.replace(/\.\d+\./, '.'); | |||
let discriminatorKey = null; | |||
if (discriminatorValuePath in filter) { | |||
discriminatorKey = filter[discriminatorValuePath]; | |||
} | |||
if (discriminatorFilterPath in filter) { | |||
discriminatorKey = filter[discriminatorFilterPath]; | |||
} | |||
if (discriminatorKey == null || discriminators[discriminatorKey] == null) { | |||
continue; | |||
} | |||
const rest = parts.slice(i + 1).join('.'); | |||
schematype = discriminators[discriminatorKey].path(rest); | |||
if (schematype != null) { | |||
type = discriminators[discriminatorKey]._getPathType(rest); | |||
break; | |||
} | |||
} | |||
} | |||
return { type: type, schematype: schematype }; | |||
}; |
@@ -1,16 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = function(obj) { | |||
const keys = Object.keys(obj); | |||
const len = keys.length; | |||
for (let i = 0; i < len; ++i) { | |||
if (keys[i].charAt(0) === '$') { | |||
return true; | |||
} | |||
} | |||
return false; | |||
}; |
@@ -1,46 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = function selectPopulatedFields(query) { | |||
const opts = query._mongooseOptions; | |||
if (opts.populate != null) { | |||
const paths = Object.keys(opts.populate); | |||
const userProvidedFields = query._userProvidedFields || {}; | |||
if (query.selectedInclusively()) { | |||
for (let i = 0; i < paths.length; ++i) { | |||
if (!isPathInFields(userProvidedFields, paths[i])) { | |||
query.select(paths[i]); | |||
} else if (userProvidedFields[paths[i]] === 0) { | |||
delete query._fields[paths[i]]; | |||
} | |||
} | |||
} else if (query.selectedExclusively()) { | |||
for (let i = 0; i < paths.length; ++i) { | |||
if (userProvidedFields[paths[i]] == null) { | |||
delete query._fields[paths[i]]; | |||
} | |||
} | |||
} | |||
} | |||
}; | |||
/*! | |||
* ignore | |||
*/ | |||
function isPathInFields(userProvidedFields, path) { | |||
const pieces = path.split('.'); | |||
const len = pieces.length; | |||
let cur = pieces[0]; | |||
for (let i = 1; i < len; ++i) { | |||
if (userProvidedFields[cur] != null) { | |||
return true; | |||
} | |||
cur += '.' + pieces[i]; | |||
} | |||
return userProvidedFields[cur] != null; | |||
} |
@@ -1,18 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* A query thunk is the function responsible for sending the query to MongoDB, | |||
* like `Query#_findOne()` or `Query#_execUpdate()`. The `Query#exec()` function | |||
* calls a thunk. The term "thunk" here is the traditional Node.js definition: | |||
* a function that takes exactly 1 parameter, a callback. | |||
* | |||
* This function defines common behavior for all query thunks. | |||
*/ | |||
module.exports = function wrapThunk(fn) { | |||
return function _wrappedThunk(cb) { | |||
++this._executionCount; | |||
fn.call(this, cb); | |||
}; | |||
}; |
@@ -1,16 +0,0 @@ | |||
'use strict'; | |||
const get = require('../get'); | |||
module.exports = function applyWriteConcern(schema, options) { | |||
const writeConcern = get(schema, 'options.writeConcern', {}); | |||
if (!('w' in options) && writeConcern.w != null) { | |||
options.w = writeConcern.w; | |||
} | |||
if (!('j' in options) && writeConcern.j != null) { | |||
options.j = writeConcern.j; | |||
} | |||
if (!('wtimeout' in options) && writeConcern.wtimeout != null) { | |||
options.wtimeout = writeConcern.wtimeout; | |||
} | |||
}; |
@@ -1,124 +0,0 @@ | |||
'use strict'; | |||
const get = require('../get'); | |||
const utils = require('../../utils'); | |||
/*! | |||
* Gather all indexes defined in the schema, including single nested, | |||
* document arrays, and embedded discriminators. | |||
*/ | |||
module.exports = function getIndexes(schema) { | |||
let indexes = []; | |||
const schemaStack = new WeakMap(); | |||
const indexTypes = schema.constructor.indexTypes; | |||
const collectIndexes = function(schema, prefix) { | |||
// Ignore infinitely nested schemas, if we've already seen this schema | |||
// along this path there must be a cycle | |||
if (schemaStack.has(schema)) { | |||
return; | |||
} | |||
schemaStack.set(schema, true); | |||
prefix = prefix || ''; | |||
const keys = Object.keys(schema.paths); | |||
const length = keys.length; | |||
for (let i = 0; i < length; ++i) { | |||
const key = keys[i]; | |||
const path = schema.paths[key]; | |||
if (path.$isMongooseDocumentArray || path.$isSingleNested) { | |||
if (get(path, 'options.excludeIndexes') !== true && | |||
get(path, 'schemaOptions.excludeIndexes') !== true) { | |||
collectIndexes(path.schema, prefix + key + '.'); | |||
} | |||
if (path.schema.discriminators != null) { | |||
const discriminators = path.schema.discriminators; | |||
const discriminatorKeys = Object.keys(discriminators); | |||
for (const discriminatorKey of discriminatorKeys) { | |||
collectIndexes(discriminators[discriminatorKey]._originalSchema, | |||
prefix + key + '.'); | |||
} | |||
} | |||
// Retained to minimize risk of backwards breaking changes due to | |||
// gh-6113 | |||
if (path.$isMongooseDocumentArray) { | |||
continue; | |||
} | |||
} | |||
const index = path._index || (path.caster && path.caster._index); | |||
if (index !== false && index !== null && index !== undefined) { | |||
const field = {}; | |||
const isObject = utils.isObject(index); | |||
const options = isObject ? index : {}; | |||
const type = typeof index === 'string' ? index : | |||
isObject ? index.type : | |||
false; | |||
if (type && indexTypes.indexOf(type) !== -1) { | |||
field[prefix + key] = type; | |||
} else if (options.text) { | |||
field[prefix + key] = 'text'; | |||
delete options.text; | |||
} else { | |||
field[prefix + key] = 1; | |||
} | |||
delete options.type; | |||
if (!('background' in options)) { | |||
options.background = true; | |||
} | |||
indexes.push([field, options]); | |||
} | |||
} | |||
schemaStack.delete(schema); | |||
if (prefix) { | |||
fixSubIndexPaths(schema, prefix); | |||
} else { | |||
schema._indexes.forEach(function(index) { | |||
if (!('background' in index[1])) { | |||
index[1].background = true; | |||
} | |||
}); | |||
indexes = indexes.concat(schema._indexes); | |||
} | |||
}; | |||
collectIndexes(schema); | |||
return indexes; | |||
/*! | |||
* Checks for indexes added to subdocs using Schema.index(). | |||
* These indexes need their paths prefixed properly. | |||
* | |||
* schema._indexes = [ [indexObj, options], [indexObj, options] ..] | |||
*/ | |||
function fixSubIndexPaths(schema, prefix) { | |||
const subindexes = schema._indexes; | |||
const len = subindexes.length; | |||
for (let i = 0; i < len; ++i) { | |||
const indexObj = subindexes[i][0]; | |||
const keys = Object.keys(indexObj); | |||
const klen = keys.length; | |||
const newindex = {}; | |||
// use forward iteration, order matters | |||
for (let j = 0; j < klen; ++j) { | |||
const key = keys[j]; | |||
newindex[prefix + key] = indexObj[key]; | |||
} | |||
indexes.push([newindex, subindexes[i][1]]); | |||
} | |||
} | |||
}; |
@@ -1,24 +0,0 @@ | |||
'use strict'; | |||
module.exports = handleTimestampOption; | |||
/*! | |||
* ignore | |||
*/ | |||
function handleTimestampOption(arg, prop) { | |||
if (arg == null) { | |||
return null; | |||
} | |||
if (typeof arg === 'boolean') { | |||
return prop; | |||
} | |||
if (typeof arg[prop] === 'boolean') { | |||
return arg[prop] ? prop : null; | |||
} | |||
if (!(prop in arg)) { | |||
return prop; | |||
} | |||
return arg[prop]; | |||
} |
@@ -1,19 +0,0 @@ | |||
'use strict'; | |||
module.exports = function merge(s1, s2) { | |||
s1.add(s2.obj); | |||
s1.callQueue = s1.callQueue.concat(s2.callQueue); | |||
s1.method(s2.methods); | |||
s1.static(s2.statics); | |||
for (const query in s2.query) { | |||
s1.query[query] = s2.query[query]; | |||
} | |||
for (const virtual in s2.virtuals) { | |||
s1.virtual[virtual] = s2.virtual[virtual].clone(); | |||
} | |||
s1.s.hooks.merge(s2.s.hooks, false); | |||
}; |
@@ -1,38 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Set `$parentSchema` on all schema types, and `$schemaType` on single | |||
* nested docs. | |||
* | |||
* This is a slow path function, should only run when model is compiled | |||
*/ | |||
module.exports = function setParentPointers(schema, skipRecursion) { | |||
for (const path of Object.keys(schema.paths)) { | |||
const schemaType = schema.paths[path]; | |||
if (schemaType.schema != null) { | |||
Object.defineProperty(schemaType.schema, '$schemaType', { | |||
configurable: true, | |||
writable: false, | |||
enumerable: false, | |||
value: schemaType | |||
}); | |||
} | |||
Object.defineProperty(schemaType, '$parentSchema', { | |||
configurable: true, | |||
writable: false, | |||
enumerable: false, | |||
value: schema | |||
}); | |||
} | |||
// `childSchemas` contains all descendant schemas, so no need to recurse | |||
// further. | |||
if (skipRecursion) { | |||
return; | |||
} | |||
for (const obj of schema.childSchemas) { | |||
setParentPointers(obj.schema, true); | |||
} | |||
}; |
@@ -1,117 +0,0 @@ | |||
'use strict'; | |||
const modifiedPaths = require('./common').modifiedPaths; | |||
/** | |||
* Applies defaults to update and findOneAndUpdate operations. | |||
* | |||
* @param {Object} filter | |||
* @param {Schema} schema | |||
* @param {Object} castedDoc | |||
* @param {Object} options | |||
* @method setDefaultsOnInsert | |||
* @api private | |||
*/ | |||
module.exports = function(filter, schema, castedDoc, options) { | |||
const keys = Object.keys(castedDoc || {}); | |||
const updatedKeys = {}; | |||
const updatedValues = {}; | |||
const numKeys = keys.length; | |||
const modified = {}; | |||
let hasDollarUpdate = false; | |||
options = options || {}; | |||
if (!options.upsert || !options.setDefaultsOnInsert) { | |||
return castedDoc; | |||
} | |||
for (let i = 0; i < numKeys; ++i) { | |||
if (keys[i].charAt(0) === '$') { | |||
modifiedPaths(castedDoc[keys[i]], '', modified); | |||
hasDollarUpdate = true; | |||
} | |||
} | |||
if (!hasDollarUpdate) { | |||
modifiedPaths(castedDoc, '', modified); | |||
} | |||
const paths = Object.keys(filter); | |||
const numPaths = paths.length; | |||
for (let i = 0; i < numPaths; ++i) { | |||
const path = paths[i]; | |||
const condition = filter[path]; | |||
if (condition && typeof condition === 'object') { | |||
const conditionKeys = Object.keys(condition); | |||
const numConditionKeys = conditionKeys.length; | |||
let hasDollarKey = false; | |||
for (let j = 0; j < numConditionKeys; ++j) { | |||
if (conditionKeys[j].charAt(0) === '$') { | |||
hasDollarKey = true; | |||
break; | |||
} | |||
} | |||
if (hasDollarKey) { | |||
continue; | |||
} | |||
} | |||
updatedKeys[path] = true; | |||
modified[path] = true; | |||
} | |||
if (options && options.overwrite && !hasDollarUpdate) { | |||
// Defaults will be set later, since we're overwriting we'll cast | |||
// the whole update to a document | |||
return castedDoc; | |||
} | |||
schema.eachPath(function(path, schemaType) { | |||
if (schemaType.$isSingleNested) { | |||
// Only handle nested schemas 1-level deep to avoid infinite | |||
// recursion re: https://github.com/mongodb-js/mongoose-autopopulate/issues/11 | |||
schemaType.schema.eachPath(function(_path, _schemaType) { | |||
if (_path === '_id' && _schemaType.auto) { | |||
// Ignore _id if auto id so we don't create subdocs | |||
return; | |||
} | |||
const def = _schemaType.getDefault(null, true); | |||
if (!isModified(modified, path + '.' + _path) && | |||
typeof def !== 'undefined') { | |||
castedDoc = castedDoc || {}; | |||
castedDoc.$setOnInsert = castedDoc.$setOnInsert || {}; | |||
castedDoc.$setOnInsert[path + '.' + _path] = def; | |||
updatedValues[path + '.' + _path] = def; | |||
} | |||
}); | |||
} else { | |||
const def = schemaType.getDefault(null, true); | |||
if (!isModified(modified, path) && typeof def !== 'undefined') { | |||
castedDoc = castedDoc || {}; | |||
castedDoc.$setOnInsert = castedDoc.$setOnInsert || {}; | |||
castedDoc.$setOnInsert[path] = def; | |||
updatedValues[path] = def; | |||
} | |||
} | |||
}); | |||
return castedDoc; | |||
}; | |||
function isModified(modified, path) { | |||
if (modified[path]) { | |||
return true; | |||
} | |||
const sp = path.split('.'); | |||
let cur = sp[0]; | |||
for (let i = 1; i < sp.length; ++i) { | |||
if (modified[cur]) { | |||
return true; | |||
} | |||
cur += '.' + sp[i]; | |||
} | |||
return false; | |||
} |
@@ -1,11 +0,0 @@ | |||
'use strict'; | |||
exports.validatorErrorSymbol = Symbol.for('mongoose:validatorError'); | |||
exports.documentArrayParent = Symbol.for('mongoose:documentArrayParent'); | |||
exports.modelSymbol = Symbol.for('mongoose#Model'); | |||
exports.getSymbol = Symbol.for('mongoose#Document#get'); | |||
exports.objectIdSymbol = Symbol.for('mongoose#ObjectId'); |
@@ -1,173 +0,0 @@ | |||
'use strict'; | |||
const handleTimestampOption = require('../schema/handleTimestampOption'); | |||
module.exports = applyTimestampsToChildren; | |||
/*! | |||
* ignore | |||
*/ | |||
function applyTimestampsToChildren(now, update, schema) { | |||
if (update == null) { | |||
return; | |||
} | |||
const keys = Object.keys(update); | |||
let key; | |||
let createdAt; | |||
let updatedAt; | |||
let timestamps; | |||
let path; | |||
const hasDollarKey = keys.length && keys[0].charAt(0) === '$'; | |||
if (hasDollarKey) { | |||
if (update.$push) { | |||
for (key in update.$push) { | |||
const $path = schema.path(key); | |||
if (update.$push[key] && | |||
$path && | |||
$path.$isMongooseDocumentArray && | |||
$path.schema.options.timestamps) { | |||
timestamps = $path.schema.options.timestamps; | |||
createdAt = handleTimestampOption(timestamps, 'createdAt'); | |||
updatedAt = handleTimestampOption(timestamps, 'updatedAt'); | |||
if (update.$push[key].$each) { | |||
update.$push[key].$each.forEach(function(subdoc) { | |||
if (updatedAt != null) { | |||
subdoc[updatedAt] = now; | |||
} | |||
if (createdAt != null) { | |||
subdoc[createdAt] = now; | |||
} | |||
}); | |||
} else { | |||
if (updatedAt != null) { | |||
update.$push[key][updatedAt] = now; | |||
} | |||
if (createdAt != null) { | |||
update.$push[key][createdAt] = now; | |||
} | |||
} | |||
} | |||
} | |||
} | |||
if (update.$set != null) { | |||
const keys = Object.keys(update.$set); | |||
for (key of keys) { | |||
// Replace positional operator `$` and array filters `$[]` and `$[.*]` | |||
const keyToSearch = key. | |||
replace(/\.\$(\[[^\]]*\])?\./g, '.0.'). | |||
replace(/\.(\[[^\]]*\])?\$$/, '.0'); | |||
path = schema.path(keyToSearch); | |||
if (!path) { | |||
continue; | |||
} | |||
if (Array.isArray(update.$set[key]) && path.$isMongooseDocumentArray) { | |||
applyTimestampsToDocumentArray(update.$set[key], path, now); | |||
} else if (update.$set[key] && path.$isSingleNested) { | |||
applyTimestampsToSingleNested(update.$set[key], path, now); | |||
} else if (path.$parentSchema !== schema && path.$parentSchema != null) { | |||
const parentPath = path.$parentSchema.$schemaType; | |||
if (parentPath == null) { | |||
continue; | |||
} | |||
timestamps = parentPath.schema.options.timestamps; | |||
createdAt = handleTimestampOption(timestamps, 'createdAt'); | |||
updatedAt = handleTimestampOption(timestamps, 'updatedAt'); | |||
if (updatedAt == null) { | |||
continue; | |||
} | |||
if (parentPath.$isSingleNested) { | |||
// Single nested is easy | |||
update.$set[parentPath.path + '.' + updatedAt] = now; | |||
continue; | |||
} | |||
let childPath = key.substr(parentPath.path.length + 1); | |||
const firstDot = childPath.indexOf('.'); | |||
// Shouldn't happen, but if it does ignore this path | |||
if (firstDot === -1) { | |||
continue; | |||
} | |||
childPath = childPath.substr(0, firstDot); | |||
update.$set[parentPath.path + '.' + childPath + '.' + updatedAt] = now; | |||
} else if (path.schema != null && path.schema != schema) { | |||
timestamps = path.schema.options.timestamps; | |||
createdAt = handleTimestampOption(timestamps, 'createdAt'); | |||
updatedAt = handleTimestampOption(timestamps, 'updatedAt'); | |||
if (updatedAt != null) { | |||
update.$set[key][updatedAt] = now; | |||
} | |||
if (createdAt != null) { | |||
update.$set[key][createdAt] = now; | |||
} | |||
} | |||
} | |||
} | |||
} else { | |||
const keys = Object.keys(update).filter(key => !key.startsWith('$')); | |||
for (key of keys) { | |||
// Replace positional operator `$` and array filters `$[]` and `$[.*]` | |||
const keyToSearch = key. | |||
replace(/\.\$(\[[^\]]*\])?\./g, '.0.'). | |||
replace(/\.(\[[^\]]*\])?\$$/, '.0'); | |||
path = schema.path(keyToSearch); | |||
if (!path) { | |||
continue; | |||
} | |||
if (Array.isArray(update[key]) && path.$isMongooseDocumentArray) { | |||
applyTimestampsToDocumentArray(update[key], path, now); | |||
} else if (update[key] != null && path.$isSingleNested) { | |||
applyTimestampsToSingleNested(update[key], path, now); | |||
} | |||
} | |||
} | |||
} | |||
function applyTimestampsToDocumentArray(arr, schematype, now) { | |||
const timestamps = schematype.schema.options.timestamps; | |||
if (!timestamps) { | |||
return; | |||
} | |||
const len = arr.length; | |||
const createdAt = handleTimestampOption(timestamps, 'createdAt'); | |||
const updatedAt = handleTimestampOption(timestamps, 'updatedAt'); | |||
for (let i = 0; i < len; ++i) { | |||
if (updatedAt != null) { | |||
arr[i][updatedAt] = now; | |||
} | |||
if (createdAt != null) { | |||
arr[i][createdAt] = now; | |||
} | |||
} | |||
} | |||
function applyTimestampsToSingleNested(subdoc, schematype, now) { | |||
const timestamps = schematype.schema.options.timestamps; | |||
if (!timestamps) { | |||
return; | |||
} | |||
const createdAt = handleTimestampOption(timestamps, 'createdAt'); | |||
const updatedAt = handleTimestampOption(timestamps, 'updatedAt'); | |||
if (updatedAt != null) { | |||
subdoc[updatedAt] = now; | |||
} | |||
if (createdAt != null) { | |||
subdoc[createdAt] = now; | |||
} | |||
} |
@@ -1,65 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* ignore | |||
*/ | |||
const get = require('../get'); | |||
module.exports = applyTimestampsToUpdate; | |||
/*! | |||
* ignore | |||
*/ | |||
function applyTimestampsToUpdate(now, createdAt, updatedAt, currentUpdate, options) { | |||
const updates = currentUpdate; | |||
let _updates = updates; | |||
const overwrite = get(options, 'overwrite', false); | |||
const timestamps = get(options, 'timestamps', true); | |||
// Support skipping timestamps at the query level, see gh-6980 | |||
if (!timestamps || updates == null) { | |||
return currentUpdate; | |||
} | |||
if (overwrite) { | |||
if (currentUpdate && currentUpdate.$set) { | |||
currentUpdate = currentUpdate.$set; | |||
updates.$set = {}; | |||
_updates = updates.$set; | |||
} | |||
if (updatedAt && !currentUpdate[updatedAt]) { | |||
_updates[updatedAt] = now; | |||
} | |||
if (createdAt && !currentUpdate[createdAt]) { | |||
_updates[createdAt] = now; | |||
} | |||
return updates; | |||
} | |||
updates.$set = updates.$set || {}; | |||
currentUpdate = currentUpdate || {}; | |||
if (updatedAt && | |||
(!currentUpdate.$currentDate || !currentUpdate.$currentDate[updatedAt])) { | |||
updates.$set[updatedAt] = now; | |||
} | |||
if (createdAt) { | |||
if (currentUpdate[createdAt]) { | |||
delete currentUpdate[createdAt]; | |||
} | |||
if (currentUpdate.$set && currentUpdate.$set[createdAt]) { | |||
delete currentUpdate.$set[createdAt]; | |||
} | |||
updates.$setOnInsert = updates.$setOnInsert || {}; | |||
updates.$setOnInsert[createdAt] = now; | |||
} | |||
if (Object.keys(updates.$set).length === 0) { | |||
delete updates.$set; | |||
} | |||
return updates; | |||
} |
@@ -1,33 +0,0 @@ | |||
'use strict'; | |||
const _modifiedPaths = require('../common').modifiedPaths; | |||
/** | |||
* Given an update document with potential update operators (`$set`, etc.) | |||
* returns an object whose keys are the directly modified paths. | |||
* | |||
* If there are any top-level keys that don't start with `$`, we assume those | |||
* will get wrapped in a `$set`. The Mongoose Query is responsible for wrapping | |||
* top-level keys in `$set`. | |||
* | |||
* @param {Object} update | |||
* @return {Object} modified | |||
*/ | |||
module.exports = function modifiedPaths(update) { | |||
const keys = Object.keys(update); | |||
const res = {}; | |||
const withoutDollarKeys = {}; | |||
for (const key of keys) { | |||
if (key.startsWith('$')) { | |||
_modifiedPaths(update[key], '', res); | |||
continue; | |||
} | |||
withoutDollarKeys[key] = update[key]; | |||
} | |||
_modifiedPaths(withoutDollarKeys, '', res); | |||
return res; | |||
}; |
@@ -1,227 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
const Mixed = require('../schema/mixed'); | |||
const ValidationError = require('../error/validation'); | |||
const flatten = require('./common').flatten; | |||
const modifiedPaths = require('./common').modifiedPaths; | |||
const parallel = require('async/parallel'); | |||
/** | |||
* Applies validators and defaults to update and findOneAndUpdate operations, | |||
* specifically passing a null doc as `this` to validators and defaults | |||
* | |||
* @param {Query} query | |||
* @param {Schema} schema | |||
* @param {Object} castedDoc | |||
* @param {Object} options | |||
* @method runValidatorsOnUpdate | |||
* @api private | |||
*/ | |||
module.exports = function(query, schema, castedDoc, options) { | |||
let _keys; | |||
const keys = Object.keys(castedDoc || {}); | |||
let updatedKeys = {}; | |||
let updatedValues = {}; | |||
const isPull = {}; | |||
const arrayAtomicUpdates = {}; | |||
const numKeys = keys.length; | |||
let hasDollarUpdate = false; | |||
const modified = {}; | |||
let currentUpdate; | |||
let key; | |||
let i; | |||
for (i = 0; i < numKeys; ++i) { | |||
if (keys[i].charAt(0) === '$') { | |||
hasDollarUpdate = true; | |||
if (keys[i] === '$push' || keys[i] === '$addToSet') { | |||
_keys = Object.keys(castedDoc[keys[i]]); | |||
for (let ii = 0; ii < _keys.length; ++ii) { | |||
currentUpdate = castedDoc[keys[i]][_keys[ii]]; | |||
if (currentUpdate && currentUpdate.$each) { | |||
arrayAtomicUpdates[_keys[ii]] = (arrayAtomicUpdates[_keys[ii]] || []). | |||
concat(currentUpdate.$each); | |||
} else { | |||
arrayAtomicUpdates[_keys[ii]] = (arrayAtomicUpdates[_keys[ii]] || []). | |||
concat([currentUpdate]); | |||
} | |||
} | |||
continue; | |||
} | |||
modifiedPaths(castedDoc[keys[i]], '', modified); | |||
const flat = flatten(castedDoc[keys[i]]); | |||
const paths = Object.keys(flat); | |||
const numPaths = paths.length; | |||
for (let j = 0; j < numPaths; ++j) { | |||
let updatedPath = paths[j].replace('.$.', '.0.'); | |||
updatedPath = updatedPath.replace(/\.\$$/, '.0'); | |||
key = keys[i]; | |||
// With `$pull` we might flatten `$in`. Skip stuff nested under `$in` | |||
// for the rest of the logic, it will get handled later. | |||
if (updatedPath.indexOf('$') !== -1) { | |||
continue; | |||
} | |||
if (key === '$set' || key === '$setOnInsert' || | |||
key === '$pull' || key === '$pullAll') { | |||
updatedValues[updatedPath] = flat[paths[j]]; | |||
isPull[updatedPath] = key === '$pull' || key === '$pullAll'; | |||
} else if (key === '$unset') { | |||
updatedValues[updatedPath] = undefined; | |||
} | |||
updatedKeys[updatedPath] = true; | |||
} | |||
} | |||
} | |||
if (!hasDollarUpdate) { | |||
modifiedPaths(castedDoc, '', modified); | |||
updatedValues = flatten(castedDoc); | |||
updatedKeys = Object.keys(updatedValues); | |||
} | |||
const updates = Object.keys(updatedValues); | |||
const numUpdates = updates.length; | |||
const validatorsToExecute = []; | |||
const validationErrors = []; | |||
const alreadyValidated = []; | |||
const context = options && options.context === 'query' ? query : null; | |||
function iter(i, v) { | |||
const schemaPath = schema._getSchema(updates[i]); | |||
if (schemaPath) { | |||
// gh-4305: `_getSchema()` will report all sub-fields of a 'Mixed' path | |||
// as 'Mixed', so avoid double validating them. | |||
if (schemaPath instanceof Mixed && schemaPath.$fullPath !== updates[i]) { | |||
return; | |||
} | |||
if (v && Array.isArray(v.$in)) { | |||
v.$in.forEach((v, i) => { | |||
validatorsToExecute.push(function(callback) { | |||
schemaPath.doValidate( | |||
v, | |||
function(err) { | |||
if (err) { | |||
err.path = updates[i] + '.$in.' + i; | |||
validationErrors.push(err); | |||
} | |||
callback(null); | |||
}, | |||
context, | |||
{updateValidator: true}); | |||
}); | |||
}); | |||
} else { | |||
if (isPull[updates[i]] && | |||
!Array.isArray(v) && | |||
schemaPath.$isMongooseArray) { | |||
v = [v]; | |||
} | |||
if (schemaPath.$isMongooseDocumentArrayElement && v != null && v.$__ != null) { | |||
alreadyValidated.push(updates[i]); | |||
validatorsToExecute.push(function(callback) { | |||
schemaPath.doValidate(v, function(err) { | |||
if (err) { | |||
err.path = updates[i]; | |||
validationErrors.push(err); | |||
return callback(null); | |||
} | |||
v.validate(function(err) { | |||
if (err) { | |||
if (err.errors) { | |||
for (const key of Object.keys(err.errors)) { | |||
const _err = err.errors[key]; | |||
_err.path = updates[i] + '.' + key; | |||
validationErrors.push(_err); | |||
} | |||
} | |||
} | |||
callback(null); | |||
}); | |||
}, context, { updateValidator: true }); | |||
}); | |||
} else { | |||
validatorsToExecute.push(function(callback) { | |||
for (const path of alreadyValidated) { | |||
if (updates[i].startsWith(path + '.')) { | |||
return callback(null); | |||
} | |||
} | |||
schemaPath.doValidate(v, function(err) { | |||
if (err) { | |||
err.path = updates[i]; | |||
validationErrors.push(err); | |||
} | |||
callback(null); | |||
}, context, { updateValidator: true }); | |||
}); | |||
} | |||
} | |||
} | |||
} | |||
for (i = 0; i < numUpdates; ++i) { | |||
iter(i, updatedValues[updates[i]]); | |||
} | |||
const arrayUpdates = Object.keys(arrayAtomicUpdates); | |||
const numArrayUpdates = arrayUpdates.length; | |||
for (i = 0; i < numArrayUpdates; ++i) { | |||
(function(i) { | |||
let schemaPath = schema._getSchema(arrayUpdates[i]); | |||
if (schemaPath && schemaPath.$isMongooseDocumentArray) { | |||
validatorsToExecute.push(function(callback) { | |||
schemaPath.doValidate( | |||
arrayAtomicUpdates[arrayUpdates[i]], | |||
function(err) { | |||
if (err) { | |||
err.path = arrayUpdates[i]; | |||
validationErrors.push(err); | |||
} | |||
callback(null); | |||
}, | |||
options && options.context === 'query' ? query : null); | |||
}); | |||
} else { | |||
schemaPath = schema._getSchema(arrayUpdates[i] + '.0'); | |||
for (let j = 0; j < arrayAtomicUpdates[arrayUpdates[i]].length; ++j) { | |||
(function(j) { | |||
validatorsToExecute.push(function(callback) { | |||
schemaPath.doValidate( | |||
arrayAtomicUpdates[arrayUpdates[i]][j], | |||
function(err) { | |||
if (err) { | |||
err.path = arrayUpdates[i]; | |||
validationErrors.push(err); | |||
} | |||
callback(null); | |||
}, | |||
options && options.context === 'query' ? query : null, | |||
{ updateValidator: true }); | |||
}); | |||
})(j); | |||
} | |||
} | |||
})(i); | |||
} | |||
return function(callback) { | |||
parallel(validatorsToExecute, function() { | |||
if (validationErrors.length) { | |||
const err = new ValidationError(null); | |||
for (let i = 0; i < validationErrors.length; ++i) { | |||
err.addError(validationErrors[i].path, validationErrors[i]); | |||
} | |||
return callback(err); | |||
} | |||
callback(null); | |||
}); | |||
}; | |||
}; |
@@ -1,37 +0,0 @@ | |||
/*! | |||
* Dependencies | |||
*/ | |||
'use strict'; | |||
const StateMachine = require('./statemachine'); | |||
const ActiveRoster = StateMachine.ctor('require', 'modify', 'init', 'default', 'ignore'); | |||
module.exports = exports = InternalCache; | |||
function InternalCache() { | |||
this.strictMode = undefined; | |||
this.selected = undefined; | |||
this.shardval = undefined; | |||
this.saveError = undefined; | |||
this.validationError = undefined; | |||
this.adhocPaths = undefined; | |||
this.removing = undefined; | |||
this.inserting = undefined; | |||
this.saving = undefined; | |||
this.version = undefined; | |||
this.getters = {}; | |||
this._id = undefined; | |||
this.populate = undefined; // what we want to populate in this doc | |||
this.populated = undefined;// the _ids that have been populated | |||
this.wasPopulated = false; // if this doc was the result of a population | |||
this.scope = undefined; | |||
this.activePaths = new ActiveRoster; | |||
this.pathsToScopes = {}; | |||
this.cachedRequired = {}; | |||
this.session = null; | |||
// embedded docs | |||
this.ownerDocument = undefined; | |||
this.fullPath = undefined; | |||
} |
@@ -1,14 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* ignore | |||
*/ | |||
exports.internalToObjectOptions = { | |||
transform: false, | |||
virtuals: false, | |||
getters: false, | |||
_skipDepopulateTopLevel: true, | |||
depopulate: true, | |||
flattenDecimals: false | |||
}; |
@@ -1,28 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = function(schema) { | |||
// ensure the documents receive an id getter unless disabled | |||
const autoIdGetter = !schema.paths['id'] && | |||
(!schema.options.noVirtualId && schema.options.id); | |||
if (!autoIdGetter) { | |||
return; | |||
} | |||
schema.virtual('id').get(idGetter); | |||
}; | |||
/*! | |||
* Returns this documents _id cast to a string. | |||
*/ | |||
function idGetter() { | |||
if (this._id != null) { | |||
return String(this._id); | |||
} | |||
return null; | |||
} |
@@ -1,38 +0,0 @@ | |||
'use strict'; | |||
const each = require('async/each'); | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = function(schema) { | |||
const unshift = true; | |||
schema.s.hooks.pre('remove', false, function(next) { | |||
if (this.ownerDocument) { | |||
next(); | |||
return; | |||
} | |||
const _this = this; | |||
const subdocs = this.$__getAllSubdocs(); | |||
if (!subdocs.length) { | |||
next(); | |||
return; | |||
} | |||
each(subdocs, function(subdoc, cb) { | |||
subdoc.$__remove(function(err) { | |||
cb(err); | |||
}); | |||
}, function(error) { | |||
if (error) { | |||
return _this.schema.s.hooks.execPost('remove:error', _this, [_this], { error: error }, function(error) { | |||
next(error); | |||
}); | |||
} | |||
next(); | |||
}); | |||
}, null, unshift); | |||
}; |
@@ -1,66 +0,0 @@ | |||
'use strict'; | |||
const each = require('async/each'); | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = function(schema) { | |||
const unshift = true; | |||
schema.s.hooks.pre('save', false, function(next) { | |||
if (this.ownerDocument) { | |||
next(); | |||
return; | |||
} | |||
const _this = this; | |||
const subdocs = this.$__getAllSubdocs(); | |||
if (!subdocs.length) { | |||
next(); | |||
return; | |||
} | |||
each(subdocs, function(subdoc, cb) { | |||
subdoc.schema.s.hooks.execPre('save', subdoc, function(err) { | |||
cb(err); | |||
}); | |||
}, function(error) { | |||
if (error) { | |||
return _this.schema.s.hooks.execPost('save:error', _this, [_this], { error: error }, function(error) { | |||
next(error); | |||
}); | |||
} | |||
next(); | |||
}); | |||
}, null, unshift); | |||
schema.s.hooks.post('save', function(doc, next) { | |||
if (this.ownerDocument) { | |||
next(); | |||
return; | |||
} | |||
const _this = this; | |||
const subdocs = this.$__getAllSubdocs(); | |||
if (!subdocs.length) { | |||
next(); | |||
return; | |||
} | |||
each(subdocs, function(subdoc, cb) { | |||
subdoc.schema.s.hooks.execPost('save', subdoc, [subdoc], function(err) { | |||
cb(err); | |||
}); | |||
}, function(error) { | |||
if (error) { | |||
return _this.schema.s.hooks.execPost('save:error', _this, [_this], { error: error }, function(error) { | |||
next(error); | |||
}); | |||
} | |||
next(); | |||
}); | |||
}, null, unshift); | |||
}; |
@@ -1,38 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* ignore | |||
*/ | |||
module.exports = function(schema) { | |||
const unshift = true; | |||
schema.pre('save', false, function(next, options) { | |||
const _this = this; | |||
// Nested docs have their own presave | |||
if (this.ownerDocument) { | |||
return next(); | |||
} | |||
const hasValidateBeforeSaveOption = options && | |||
(typeof options === 'object') && | |||
('validateBeforeSave' in options); | |||
let shouldValidate; | |||
if (hasValidateBeforeSaveOption) { | |||
shouldValidate = !!options.validateBeforeSave; | |||
} else { | |||
shouldValidate = this.schema.options.validateBeforeSave; | |||
} | |||
// Validate | |||
if (shouldValidate) { | |||
this.validate(function(error) { | |||
return _this.schema.s.hooks.execPost('save:error', _this, [ _this], { error: error }, function(error) { | |||
next(error); | |||
}); | |||
}); | |||
} else { | |||
next(); | |||
} | |||
}, null, unshift); | |||
}; |
@@ -1,49 +0,0 @@ | |||
/*! | |||
* ignore | |||
*/ | |||
'use strict'; | |||
const assert = require('assert'); | |||
const mquery = require('mquery'); | |||
/** | |||
* Helper for multiplexing promise implementations | |||
* | |||
* @api private | |||
*/ | |||
const store = { | |||
_promise: null | |||
}; | |||
/** | |||
* Get the current promise constructor | |||
* | |||
* @api private | |||
*/ | |||
store.get = function() { | |||
return store._promise; | |||
}; | |||
/** | |||
* Set the current promise constructor | |||
* | |||
* @api private | |||
*/ | |||
store.set = function(lib) { | |||
assert.ok(typeof lib === 'function', | |||
`mongoose.Promise must be a function, got ${lib}`); | |||
store._promise = lib; | |||
mquery.Promise = lib; | |||
}; | |||
/*! | |||
* Use native promises by default | |||
*/ | |||
store.set(global.Promise); | |||
module.exports = store; |
@@ -1,205 +0,0 @@ | |||
'use strict'; | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
const CastError = require('../error/cast'); | |||
const SchemaType = require('../schematype'); | |||
const castBoolean = require('../cast/boolean'); | |||
const utils = require('../utils'); | |||
/** | |||
* Boolean SchemaType constructor. | |||
* | |||
* @param {String} path | |||
* @param {Object} options | |||
* @inherits SchemaType | |||
* @api public | |||
*/ | |||
function SchemaBoolean(path, options) { | |||
SchemaType.call(this, path, options, 'Boolean'); | |||
} | |||
/** | |||
* This schema type's name, to defend against minifiers that mangle | |||
* function names. | |||
* | |||
* @api public | |||
*/ | |||
SchemaBoolean.schemaName = 'Boolean'; | |||
/*! | |||
* Inherits from SchemaType. | |||
*/ | |||
SchemaBoolean.prototype = Object.create(SchemaType.prototype); | |||
SchemaBoolean.prototype.constructor = SchemaBoolean; | |||
/*! | |||
* ignore | |||
*/ | |||
SchemaBoolean._cast = castBoolean; | |||
/** | |||
* Get/set the function used to cast arbitrary values to booleans. | |||
* | |||
* ####Example: | |||
* | |||
* // Make Mongoose cast empty string '' to false. | |||
* const original = mongoose.Schema.Boolean.cast(); | |||
* mongoose.Schema.Boolean.cast(v => { | |||
* if (v === '') { | |||
* return false; | |||
* } | |||
* return original(v); | |||
* }); | |||
* | |||
* // Or disable casting entirely | |||
* mongoose.Schema.Boolean.cast(false); | |||
* | |||
* @param {Function} caster | |||
* @return {Function} | |||
* @function get | |||
* @static | |||
* @api public | |||
*/ | |||
SchemaBoolean.cast = function cast(caster) { | |||
if (arguments.length === 0) { | |||
return this._cast; | |||
} | |||
if (caster === false) { | |||
caster = v => { | |||
if (v != null && typeof v !== 'boolean') { | |||
throw new Error(); | |||
} | |||
return v; | |||
}; | |||
} | |||
this._cast = caster; | |||
return this._cast; | |||
}; | |||
/*! | |||
* ignore | |||
*/ | |||
SchemaBoolean._checkRequired = v => v === true || v === false; | |||
/** | |||
* Override the function the required validator uses to check whether a boolean | |||
* passes the `required` check. | |||
* | |||
* @param {Function} fn | |||
* @return {Function} | |||
* @function checkRequired | |||
* @static | |||
* @api public | |||
*/ | |||
SchemaBoolean.checkRequired = SchemaType.checkRequired; | |||
/** | |||
* Check if the given value satisfies a required validator. For a boolean | |||
* to satisfy a required validator, it must be strictly equal to true or to | |||
* false. | |||
* | |||
* @param {Any} value | |||
* @return {Boolean} | |||
* @api public | |||
*/ | |||
SchemaBoolean.prototype.checkRequired = function(value) { | |||
return this.constructor._checkRequired(value); | |||
}; | |||
/** | |||
* Configure which values get casted to `true`. | |||
* | |||
* ####Example: | |||
* | |||
* const M = mongoose.model('Test', new Schema({ b: Boolean })); | |||
* new M({ b: 'affirmative' }).b; // undefined | |||
* mongoose.Schema.Boolean.convertToTrue.add('affirmative'); | |||
* new M({ b: 'affirmative' }).b; // true | |||
* | |||
* @property convertToTrue | |||
* @type Set | |||
* @api public | |||
*/ | |||
Object.defineProperty(SchemaBoolean, 'convertToTrue', { | |||
get: () => castBoolean.convertToTrue, | |||
set: v => { castBoolean.convertToTrue = v; } | |||
}); | |||
/** | |||
* Configure which values get casted to `false`. | |||
* | |||
* ####Example: | |||
* | |||
* const M = mongoose.model('Test', new Schema({ b: Boolean })); | |||
* new M({ b: 'nay' }).b; // undefined | |||
* mongoose.Schema.Types.Boolean.convertToFalse.add('nay'); | |||
* new M({ b: 'nay' }).b; // false | |||
* | |||
* @property convertToFalse | |||
* @type Set | |||
* @api public | |||
*/ | |||
Object.defineProperty(SchemaBoolean, 'convertToFalse', { | |||
get: () => castBoolean.convertToFalse, | |||
set: v => { castBoolean.convertToFalse = v; } | |||
}); | |||
/** | |||
* Casts to boolean | |||
* | |||
* @param {Object} value | |||
* @param {Object} model - this value is optional | |||
* @api private | |||
*/ | |||
SchemaBoolean.prototype.cast = function(value) { | |||
try { | |||
return this.constructor.cast()(value); | |||
} catch (error) { | |||
throw new CastError('Boolean', value, this.path); | |||
} | |||
}; | |||
SchemaBoolean.$conditionalHandlers = | |||
utils.options(SchemaType.prototype.$conditionalHandlers, {}); | |||
/** | |||
* Casts contents for queries. | |||
* | |||
* @param {String} $conditional | |||
* @param {any} val | |||
* @api private | |||
*/ | |||
SchemaBoolean.prototype.castForQuery = function($conditional, val) { | |||
let handler; | |||
if (arguments.length === 2) { | |||
handler = SchemaBoolean.$conditionalHandlers[$conditional]; | |||
if (handler) { | |||
return handler.call(this, val); | |||
} | |||
return this._castForQuery(val); | |||
} | |||
return this._castForQuery($conditional); | |||
}; | |||
/*! | |||
* Module exports. | |||
*/ | |||
module.exports = SchemaBoolean; |
@@ -1,250 +0,0 @@ | |||
/*! | |||
* Module dependencies. | |||
*/ | |||
'use strict'; | |||
const handleBitwiseOperator = require('./operators/bitwise'); | |||
const utils = require('../utils'); | |||
const MongooseBuffer = require('../types/buffer'); | |||
const SchemaType = require('../schematype'); | |||
const Binary = MongooseBuffer.Binary; | |||
const CastError = SchemaType.CastError; | |||
let Document; | |||
/** | |||
* Buffer SchemaType constructor | |||
* | |||
* @param {String} key | |||
* @param {Object} options | |||
* @inherits SchemaType | |||
* @api public | |||
*/ | |||
function SchemaBuffer(key, options) { | |||
SchemaType.call(this, key, options, 'Buffer'); | |||
} | |||
/** | |||
* This schema type's name, to defend against minifiers that mangle | |||
* function names. | |||
* | |||
* @api public | |||
*/ | |||
SchemaBuffer.schemaName = 'Buffer'; | |||
/*! | |||
* Inherits from SchemaType. | |||
*/ | |||
SchemaBuffer.prototype = Object.create(SchemaType.prototype); | |||
SchemaBuffer.prototype.constructor = SchemaBuffer; | |||
/*! | |||
* ignore | |||
*/ | |||
SchemaBuffer._checkRequired = v => !!(v && v.length); | |||
/** | |||
* Override the function the required validator uses to check whether a string | |||
* passes the `required` check. | |||
* | |||
* ####Example: | |||
* | |||
* // Allow empty strings to pass `required` check | |||
* mongoose.Schema.Types.String.checkRequired(v => v != null); | |||
* | |||
* const M = mongoose.model({ buf: { type: Buffer, required: true } }); | |||
* new M({ buf: Buffer.from('') }).validateSync(); // validation passes! | |||
* | |||
* @param {Function} fn | |||
* @return {Function} | |||
* @function checkRequired | |||
* @static | |||
* @api public | |||
*/ | |||
SchemaBuffer.checkRequired = SchemaType.checkRequired; | |||
/** | |||
* Check if the given value satisfies a required validator. To satisfy a | |||
* required validator, a buffer must not be null or undefined and have | |||
* non-zero length. | |||
* | |||
* @param {Any} value | |||
* @param {Document} doc | |||
* @return {Boolean} | |||
* @api public | |||
*/ | |||
SchemaBuffer.prototype.checkRequired = function(value, doc) { | |||
if (SchemaType._isRef(this, value, doc, true)) { | |||
return !!value; | |||
} | |||
return this.constructor._checkRequired(value); | |||
}; | |||
/** | |||
* Casts contents | |||
* | |||
* @param {Object} value | |||
* @param {Document} doc document that triggers the casting | |||
* @param {Boolean} init | |||
* @api private | |||
*/ | |||
SchemaBuffer.prototype.cast = function(value, doc, init) { | |||
let ret; | |||
if (SchemaType._isRef(this, value, doc, init)) { | |||
// wait! we may need to cast this to a document | |||
if (value === null || value === undefined) { | |||
return value; | |||
} | |||
// lazy load | |||
Document || (Document = require('./../document')); | |||
if (value instanceof Document) { | |||
value.$__.wasPopulated = true; | |||
return value; | |||
} | |||
// setting a populated path | |||
if (Buffer.isBuffer(value)) { | |||
return value; | |||
} else if (!utils.isObject(value)) { | |||
throw new CastError('buffer', value, this.path); | |||
} | |||
// Handle the case where user directly sets a populated | |||
// path to a plain object; cast to the Model used in | |||
// the population query. | |||
const path = doc.$__fullPath(this.path); | |||
const owner = doc.ownerDocument ? doc.ownerDocument() : doc; | |||
const pop = owner.populated(path, true); | |||
ret = new pop.options.model(value); | |||
ret.$__.wasPopulated = true; | |||
return ret; | |||
} | |||
// documents | |||
if (value && value._id) { | |||
value = value._id; | |||
} | |||
if (value && value.isMongooseBuffer) { | |||
return value; | |||
} | |||
if (Buffer.isBuffer(value)) { | |||
if (!value || !value.isMongooseBuffer) { | |||
value = new MongooseBuffer(value, [this.path, doc]); | |||
if (this.options.subtype != null) { | |||
value._subtype = this.options.subtype; | |||
} | |||
} | |||
return value; | |||
} | |||
if (value instanceof Binary) { | |||
ret = new MongooseBuffer(value.value(true), [this.path, doc]); | |||
if (typeof value.sub_type !== 'number') { | |||
throw new CastError('buffer', value, this.path); | |||
} | |||
ret._subtype = value.sub_type; | |||
return ret; | |||
} | |||
if (value === null) { | |||
return value; | |||
} | |||
const type = typeof value; | |||
if ( | |||
type === 'string' || type === 'number' || Array.isArray(value) || | |||
(type === 'object' && value.type === 'Buffer' && Array.isArray(value.data)) // gh-6863 | |||
) { | |||
if (type === 'number') { | |||
value = [value]; | |||
} | |||
ret = new MongooseBuffer(value, [this.path, doc]); | |||
if (this.options.subtype != null) { | |||
ret._subtype = this.options.subtype; | |||
} | |||
return ret; | |||
} | |||
throw new CastError('buffer', value, this.path); | |||
}; | |||
/** | |||
* Sets the default [subtype](https://studio3t.com/whats-new/best-practices-uuid-mongodb/) | |||
* for this buffer. You can find a [list of allowed subtypes here](http://api.mongodb.com/python/current/api/bson/binary.html). | |||
* | |||
* ####Example: | |||
* | |||
* var s = new Schema({ uuid: { type: Buffer, subtype: 4 }); | |||
* var M = db.model('M', s); | |||
* var m = new M({ uuid: 'test string' }); | |||
* m.uuid._subtype; // 4 | |||
* | |||
* @param {Number} subtype the default subtype | |||
* @return {SchemaType} this | |||
* @api public | |||
*/ | |||
SchemaBuffer.prototype.subtype = function(subtype) { | |||
this.options.subtype = subtype; | |||
return this; | |||
}; | |||
/*! | |||
* ignore | |||
*/ | |||
function handleSingle(val) { | |||
return this.castForQuery(val); | |||
} | |||
SchemaBuffer.prototype.$conditionalHandlers = | |||
utils.options(SchemaType.prototype.$conditionalHandlers, { | |||
$bitsAllClear: handleBitwiseOperator, | |||
$bitsAnyClear: handleBitwiseOperator, | |||
$bitsAllSet: handleBitwiseOperator, | |||
$bitsAnySet: handleBitwiseOperator, | |||
$gt: handleSingle, | |||
$gte: handleSingle, | |||
$lt: handleSingle, | |||
$lte: handleSingle | |||
}); | |||
/** | |||
* Casts contents for queries. | |||
* | |||
* @param {String} $conditional | |||
* @param {any} [value] | |||
* @api private | |||
*/ | |||
SchemaBuffer.prototype.castForQuery = function($conditional, val) { | |||
let handler; | |||
if (arguments.length === 2) { | |||
handler = this.$conditionalHandlers[$conditional]; | |||
if (!handler) { | |||
throw new Error('Can\'t use ' + $conditional + ' with Buffer.'); | |||
} | |||
return handler.call(this, val); | |||
} | |||
val = $conditional; | |||
const casted = this._castForQuery(val); | |||
return casted ? casted.toObject({ transform: false, virtuals: false }) : casted; | |||
}; | |||
/*! | |||
* Module exports. | |||
*/ | |||
module.exports = SchemaBuffer; |