diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1867f76cad6..c2af1c027e8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,15 @@
+7.6.6 / 2023-11-27
+==================
+ * perf: avoid double-running setter logic when calling `push()` #14120 #11380
+ * fix(populate): set populated docs in correct order when populating virtual underneath doc array with justOne #14105 #14018
+ * fix: bump mongodb driver -> 5.9.1 #14084 #13829 [lorand-horvath](https://github.com/lorand-horvath)
+ * types: allow defining document array using [{ prop: String }] syntax #14095 #13424
+ * types: correct types for when includeResultMetadata: true is set #14078 #13987 [prathamVaidya](https://github.com/prathamVaidya)
+ * types(query): base filters and projections off of RawDocType instead of DocType so autocomplete doesn't show populate #14118 #14077
+ * types: make property names show up in intellisense for UpdateQuery #14123 #14090
+ * types(model): support calling Model.validate() with pathsToSkip option #14088 #14003
+ * docs: remove "DEPRECATED" warning mistakenly added to read() tags param #13980
+
8.0.1 / 2023-11-15
==================
* fix: retain key order with aliases when creating indexes with alias #14042 [meabed](https://github.com/meabed)
diff --git a/lib/aggregate.js b/lib/aggregate.js
index 7128d57b1fd..e61daa8a5df 100644
--- a/lib/aggregate.js
+++ b/lib/aggregate.js
@@ -665,7 +665,7 @@ Aggregate.prototype.unionWith = function(options) {
* await Model.aggregate(pipeline).read('primaryPreferred');
*
* @param {String|ReadPreference} pref one of the listed preference options or their aliases
- * @param {Array} [tags] optional tags for this query. DEPRECATED
+ * @param {Array} [tags] optional tags for this query.
* @return {Aggregate} this
* @api public
* @see mongodb https://www.mongodb.com/docs/manual/applications/replication/#read-preference
diff --git a/lib/stateMachine.js b/lib/stateMachine.js
index 70b1beca695..02fbc03e0fc 100644
--- a/lib/stateMachine.js
+++ b/lib/stateMachine.js
@@ -65,7 +65,11 @@ StateMachine.ctor = function() {
*/
StateMachine.prototype._changeState = function _changeState(path, nextState) {
- const prevBucket = this.states[this.paths[path]];
+ const prevState = this.paths[path];
+ if (prevState === nextState) {
+ return;
+ }
+ const prevBucket = this.states[prevState];
if (prevBucket) delete prevBucket[path];
this.paths[path] = nextState;
diff --git a/lib/types/array/methods/index.js b/lib/types/array/methods/index.js
index 06a985bc15b..b6829e6993f 100644
--- a/lib/types/array/methods/index.js
+++ b/lib/types/array/methods/index.js
@@ -374,7 +374,15 @@ const methods = {
if (val != null && utils.hasUserDefinedProperty(val, '$each')) {
atomics.$push = val;
} else {
- atomics.$push.$each = atomics.$push.$each.concat(val);
+ if (val.length === 1) {
+ atomics.$push.$each.push(val[0]);
+ } else if (val.length < 10000) {
+ atomics.$push.$each.push(...val);
+ } else {
+ for (const v of val) {
+ atomics.$push.$each.push(v);
+ }
+ }
}
} else {
atomics[op] = val;
@@ -403,8 +411,7 @@ const methods = {
addToSet() {
_checkManualPopulation(this, arguments);
- let values = [].map.call(arguments, this._mapCast, this);
- values = this[arraySchemaSymbol].applySetters(values, this[arrayParentSymbol]);
+ const values = [].map.call(arguments, this._mapCast, this);
const added = [];
let type = '';
if (values[0] instanceof ArraySubdocument) {
@@ -415,7 +422,7 @@ const methods = {
type = 'ObjectId';
}
- const rawValues = utils.isMongooseArray(values) ? values.__array : this;
+ const rawValues = utils.isMongooseArray(values) ? values.__array : values;
const rawArray = utils.isMongooseArray(this) ? this.__array : this;
rawValues.forEach(function(v) {
@@ -682,10 +689,7 @@ const methods = {
_checkManualPopulation(this, values);
- const parent = this[arrayParentSymbol];
values = [].map.call(values, this._mapCast, this);
- values = this[arraySchemaSymbol].applySetters(values, parent, undefined,
- undefined, { skipDocumentArrayCast: true });
let ret;
const atomics = this[arrayAtomicsSymbol];
this._markModified();
@@ -711,7 +715,7 @@ const methods = {
'with different `$position`');
}
atomic = values;
- ret = [].push.apply(arr, values);
+ ret = _basePush.apply(arr, values);
}
this._registerAtomic('$push', atomic);
@@ -917,7 +921,6 @@ const methods = {
values = arguments;
} else {
values = [].map.call(arguments, this._cast, this);
- values = this[arraySchemaSymbol].applySetters(values, this[arrayParentSymbol]);
}
const arr = utils.isMongooseArray(this) ? this.__array : this;
diff --git a/package.json b/package.json
index c61f061a0de..d0236548a15 100644
--- a/package.json
+++ b/package.json
@@ -93,10 +93,10 @@
"docs:merge:6x": "git merge 6.x",
"docs:test": "npm run docs:generate && npm run docs:generate:search",
"docs:view": "node ./scripts/static.js",
- "docs:prepare:publish:stable": "npm run docs:checkout:gh-pages && npm run docs:merge:stable && npm run docs:clean:stable && npm run docs:generate && npm run docs:generate:search",
+ "docs:prepare:publish:stable": "npm run docs:checkout:gh-pages && npm run docs:merge:stable && npm run docs:generate && npm run docs:generate:search",
"docs:prepare:publish:5x": "npm run docs:checkout:5x && npm run docs:merge:5x && npm run docs:clean:stable && npm run docs:generate && npm run docs:copy:tmp && npm run docs:checkout:gh-pages && npm run docs:copy:tmp:5x",
"docs:prepare:publish:6x": "npm run docs:checkout:6x && npm run docs:merge:6x && npm run docs:clean:stable && env DOCS_DEPLOY=true npm run docs:generate && npm run docs:move:6x:tmp && npm run docs:checkout:gh-pages && npm run docs:copy:tmp:6x",
- "docs:prepare:publish:7x": "git checkout 7.x && npm run docs:clean:stable && env DOCS_DEPLOY=true npm run docs:generate && mv ./docs/7.x ./tmp && npm run docs:checkout:gh-pages && rimraf ./docs/7.x && ncp ./tmp ./docs/7.x",
+ "docs:prepare:publish:7x": "env DOCS_DEPLOY=true npm run docs:generate && npm run docs:checkout:gh-pages && rimraf ./docs/7.x && mv ./tmp ./docs/7.x",
"docs:check-links": "blc http://127.0.0.1:8089 -ro",
"lint": "eslint .",
"lint-js": "eslint . --ext .js --ext .cjs",
diff --git a/scripts/website.js b/scripts/website.js
index d14a7061dd0..3ec3845ae08 100644
--- a/scripts/website.js
+++ b/scripts/website.js
@@ -4,6 +4,7 @@ Error.stackTraceLimit = Infinity;
const acquit = require('acquit');
const fs = require('fs');
+const fsextra = require('fs-extra');
const path = require('path');
const pug = require('pug');
const pkg = require('../package.json');
@@ -31,7 +32,7 @@ require('acquit-ignore')();
const { marked: markdown } = require('marked');
const highlight = require('highlight.js');
const { promisify } = require("util");
-const renderer = {
+markdown.use({
heading: function(text, level, raw, slugger) {
const slug = slugger.slug(raw);
return `
@@ -40,7 +41,7 @@ const renderer = {
\n`;
}
-};
+});
markdown.setOptions({
highlight: function(code, language) {
if (!language) {
@@ -52,30 +53,100 @@ markdown.setOptions({
return highlight.highlight(code, { language }).value;
}
});
-markdown.use({ renderer });
-
-const testPath = path.resolve(cwd, 'test')
-
-const tests = [
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'geojson.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/transactions.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'schema.alias.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'model.middleware.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/date.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/lean.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/cast.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/findoneandupdate.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/custom-casting.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/getters-setters.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/virtuals.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/defaults.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/discriminators.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/promises.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/schematypes.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/validation.test.js')).toString()),
- ...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/schemas.test.js')).toString())
+
+const testPath = path.resolve(cwd, 'test');
+
+/** additional test files to scan, relative to `test/` */
+const additionalTestFiles = [
+ 'geojson.test.js',
+ 'schema.alias.test.js'
+];
+/** ignored files from `test/docs/` */
+const ignoredTestFiles = [
+ // ignored because acquit does not like "for await"
+ 'asyncIterator.test.js'
];
+/**
+ * Load all test file contents with acquit
+ * @returns {Object[]} acquit ast array
+ */
+function getTests() {
+ const testDocs = path.resolve(testPath, 'docs');
+ const filesToScan = [
+ ...additionalTestFiles.map(v => path.join(testPath, v)),
+ ...fs.readdirSync(testDocs).filter(v => !ignoredTestFiles.includes(v)).map(v => path.join(testDocs, v))
+ ];
+
+ const retArray = [];
+
+ for (const file of filesToScan) {
+ try {
+ retArray.push(acquit.parse(fs.readFileSync(file).toString()));
+ } catch (err) {
+ // add a file path to a acquit error, for better debugging
+ err.filePath = file;
+ throw err;
+ }
+ }
+
+ return retArray.flat();
+}
+
+function deleteAllHtmlFiles() {
+ try {
+ console.log('Delete', path.join(versionObj.versionedPath, 'index.html'));
+ fs.unlinkSync(path.join(versionObj.versionedPath, 'index.html'));
+ } catch (err) {
+ if (err.code !== 'ENOENT') {
+ throw err;
+ }
+ }
+ const foldersToClean = [
+ path.join('.', versionObj.versionedPath, 'docs'),
+ path.join('.', versionObj.versionedPath, 'docs', 'tutorials'),
+ path.join('.', versionObj.versionedPath, 'docs', 'typescript'),
+ path.join('.', versionObj.versionedPath, 'docs', 'api'),
+ path.join('.', versionObj.versionedPath, 'docs', 'source', '_docs'),
+ './tmp'
+ ];
+ for (const folder of foldersToClean) {
+ let files = [];
+
+ try {
+ files = fs.readdirSync(folder);
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ continue;
+ }
+ }
+ for (const file of files) {
+ if (file.endsWith('.html')) {
+ console.log('Delete', path.join(folder, file));
+ fs.unlinkSync(path.join(folder, file));
+ }
+ }
+ }
+}
+
+function moveDocsToTemp() {
+ if (!versionObj.versionedPath) {
+ throw new Error('Cannot move unversioned deploy to /tmp');
+ }
+ try {
+ fs.rmSync('./tmp', { recursive: true });
+ } catch (err) {
+ if (err.code !== 'ENOENT') {
+ throw err;
+ }
+ }
+ const folder = versionObj.versionedPath.replace(/^\//, '');
+ const directory = fs.readdirSync(folder);
+ for (const file of directory) {
+ fsextra.moveSync(`${folder}/${file}`, `./tmp/${file}`);
+ }
+}
+
/**
* Array of array of semver numbers, sorted with highest number first
* @example
@@ -227,7 +298,7 @@ const versionObj = (() => {
getLatestVersionOf(5),
]
};
- const versionedDeploy = process.env.DOCS_DEPLOY === "true" ? !(base.currentVersion.listed === base.latestVersion.listed) : false;
+ const versionedDeploy = !!process.env.DOCS_DEPLOY ? !(base.currentVersion.listed === base.latestVersion.listed) : false;
const versionedPath = versionedDeploy ? `/docs/${base.currentVersion.path}` : '';
@@ -364,7 +435,7 @@ async function pugify(filename, options, isReload = false) {
let contents = fs.readFileSync(path.resolve(cwd, inputFile)).toString();
if (options.acquit) {
- contents = transform(contents, tests);
+ contents = transform(contents, getTests());
contents = contents.replaceAll(/^```acquit$/gmi, "```javascript");
}
@@ -423,7 +494,7 @@ async function pugify(filename, options, isReload = false) {
});
}
-// extra function to start watching for file-changes, without having to call this file directly with "watch"
+/** extra function to start watching for file-changes, without having to call this file directly with "watch" */
function startWatch() {
Object.entries(docsFilemap.fileMap).forEach(([file, fileValue]) => {
let watchPath = path.resolve(cwd, file);
@@ -491,7 +562,7 @@ const pathsToCopy = [
'docs/js',
'docs/css',
'docs/images'
-]
+];
/** Copy all static files when versionedDeploy is used */
async function copyAllRequiredFiles() {
@@ -500,7 +571,6 @@ async function copyAllRequiredFiles() {
return;
}
- const fsextra = require('fs-extra');
await Promise.all(pathsToCopy.map(async v => {
const resultPath = path.resolve(cwd, path.join('.', versionObj.versionedPath, v));
await fsextra.copy(v, resultPath);
@@ -517,8 +587,16 @@ exports.cwd = cwd;
// only run the following code if this file is the main module / entry file
if (isMain) {
- console.log(`Processing ~${files.length} files`);
- Promise.all([pugifyAllFiles(), copyAllRequiredFiles()]).then(() => {
- console.log("Done Processing");
- })
+ (async function main() {
+ console.log(`Processing ~${files.length} files`);
+
+ await deleteAllHtmlFiles();
+ await pugifyAllFiles();
+ await copyAllRequiredFiles();
+ if (!!process.env.DOCS_DEPLOY && !!versionObj.versionedPath) {
+ await moveDocsToTemp();
+ }
+
+ console.log('Done Processing');
+ })();
}
diff --git a/test/query.test.js b/test/query.test.js
index 5cba1f62c1e..f19c0e1a6aa 100644
--- a/test/query.test.js
+++ b/test/query.test.js
@@ -1370,7 +1370,7 @@ describe('Query', function() {
});
describe('setOptions', function() {
- it('works', async function() {
+ it('works', function() {
const q = new Query();
q.setOptions({ thing: 'cat' });
q.setOptions({ populate: ['fans'] });
@@ -1394,16 +1394,6 @@ describe('Query', function() {
assert.equal(q.options.hint.index2, -1);
assert.equal(q.options.readPreference.mode, 'secondary');
assert.equal(q.options.readPreference.tags[0].dc, 'eu');
-
- const Product = db.model('Product', productSchema);
- const [, doc2] = await Product.create([
- { numbers: [3, 4, 5] },
- { strings: 'hi there'.split(' '), w: 'majority' }
- ]);
-
- const docs = await Product.find().setOptions({ limit: 1, sort: { _id: -1 }, read: 'n' });
- assert.equal(docs.length, 1);
- assert.equal(docs[0].id, doc2.id);
});
it('populate as array in options (gh-4446)', function() {
diff --git a/test/types.array.test.js b/test/types.array.test.js
index f1a9192dc97..ccc83e02558 100644
--- a/test/types.array.test.js
+++ b/test/types.array.test.js
@@ -1915,4 +1915,26 @@ describe('types array', function() {
}
});
});
+
+ it('calls array setters (gh-11380)', function() {
+ let called = 0;
+ const Test = db.model('Test', new Schema({
+ intArr: [{
+ type: Number,
+ set: v => {
+ ++called;
+ return Math.floor(v);
+ }
+ }]
+ }));
+
+ assert.equal(called, 0);
+ const doc = new Test({ intArr: [3.14] });
+ assert.deepStrictEqual(doc.intArr, [3]);
+ assert.equal(called, 1);
+
+ doc.intArr.push(2.718);
+ assert.deepStrictEqual(doc.intArr, [3, 2]);
+ assert.equal(called, 2);
+ });
});
diff --git a/test/types/models.test.ts b/test/types/models.test.ts
index c84bd87a4f4..ff473840f1c 100644
--- a/test/types/models.test.ts
+++ b/test/types/models.test.ts
@@ -845,3 +845,11 @@ async function gh14072() {
await M.bulkWrite(bulkWriteArray);
}
+
+async function gh14003() {
+ const schema = new Schema({ name: String });
+ const TestModel = model('Test', schema);
+
+ await TestModel.validate({ name: 'foo' }, ['name']);
+ await TestModel.validate({ name: 'foo' }, { pathsToSkip: ['name'] });
+}
diff --git a/types/models.d.ts b/types/models.d.ts
index 73e0e4480d8..45f67138b46 100644
--- a/types/models.d.ts
+++ b/types/models.d.ts
@@ -466,8 +466,9 @@ declare module 'mongoose' {
/** Casts and validates the given object against this model's schema, passing the given `context` to custom validators. */
validate(): Promise;
- validate(optional: any): Promise;
- validate(optional: any, pathsToValidate: PathsToValidate): Promise;
+ validate(obj: any): Promise;
+ validate(obj: any, pathsOrOptions: PathsToValidate): Promise;
+ validate(obj: any, pathsOrOptions: { pathsToSkip?: pathsToSkip }): Promise;
/** Watches the underlying collection for changes using [MongoDB change streams](https://www.mongodb.com/docs/manual/changeStreams/). */
watch(pipeline?: Array>, options?: mongodb.ChangeStreamOptions & { hydrate?: boolean }): mongodb.ChangeStream;
diff --git a/types/query.d.ts b/types/query.d.ts
index 54549a2900d..ed867cb8bde 100644
--- a/types/query.d.ts
+++ b/types/query.d.ts
@@ -212,7 +212,7 @@ declare module 'mongoose' {
allowDiskUse(value: boolean): this;
/** Specifies arguments for an `$and` condition. */
- and(array: FilterQuery[]): this;
+ and(array: FilterQuery[]): this;
/** Specifies the batchSize option. */
batchSize(val: number): this;
@@ -261,7 +261,7 @@ declare module 'mongoose' {
/** Specifies this query as a `countDocuments` query. */
countDocuments(
- criteria?: FilterQuery,
+ criteria?: FilterQuery,
options?: QueryOptions
): QueryWithHelpers;
@@ -277,10 +277,10 @@ declare module 'mongoose' {
* collection, regardless of the value of `single`.
*/
deleteMany(
- filter?: FilterQuery,
+ filter?: FilterQuery,
options?: QueryOptions
): QueryWithHelpers;
- deleteMany(filter: FilterQuery): QueryWithHelpers<
+ deleteMany(filter: FilterQuery): QueryWithHelpers<
any,
DocType,
THelpers,
@@ -295,10 +295,10 @@ declare module 'mongoose' {
* option.
*/
deleteOne(
- filter?: FilterQuery,
+ filter?: FilterQuery,
options?: QueryOptions
): QueryWithHelpers;
- deleteOne(filter: FilterQuery): QueryWithHelpers<
+ deleteOne(filter: FilterQuery): QueryWithHelpers<
any,
DocType,
THelpers,
@@ -310,7 +310,7 @@ declare module 'mongoose' {
/** Creates a `distinct` query: returns the distinct values of the given `field` that match `filter`. */
distinct(
field: DocKey,
- filter?: FilterQuery
+ filter?: FilterQuery
): QueryWithHelpers : ResultType>, DocType, THelpers, RawDocType, 'distinct'>;
/** Specifies a `$elemMatch` query condition. When called with one argument, the most recent path passed to `where()` is used. */
@@ -350,65 +350,65 @@ declare module 'mongoose' {
/** Creates a `find` query: gets a list of documents that match `filter`. */
find(
- filter: FilterQuery,
- projection?: ProjectionType | null,
+ filter: FilterQuery,
+ projection?: ProjectionType | null,
options?: QueryOptions | null
): QueryWithHelpers, DocType, THelpers, RawDocType, 'find'>;
find(
- filter: FilterQuery,
- projection?: ProjectionType | null
+ filter: FilterQuery,
+ projection?: ProjectionType | null
): QueryWithHelpers, DocType, THelpers, RawDocType, 'find'>;
find(
- filter: FilterQuery
- ): QueryWithHelpers, DocType, THelpers, RawDocType, 'find'>;
+ filter: FilterQuery
+ ): QueryWithHelpers, DocType, THelpers, RawDocType, 'find'>;
find(): QueryWithHelpers, DocType, THelpers, RawDocType, 'find'>;
/** Declares the query a findOne operation. When executed, returns the first found document. */
findOne(
- filter?: FilterQuery,
- projection?: ProjectionType | null,
+ filter?: FilterQuery,
+ projection?: ProjectionType | null,
options?: QueryOptions | null
): QueryWithHelpers;
findOne(
- filter?: FilterQuery,
- projection?: ProjectionType | null
+ filter?: FilterQuery,
+ projection?: ProjectionType | null
): QueryWithHelpers;
findOne(
- filter?: FilterQuery
- ): QueryWithHelpers;
+ filter?: FilterQuery
+ ): QueryWithHelpers;
/** Creates a `findOneAndDelete` query: atomically finds the given document, deletes it, and returns the document as it was before deletion. */
findOneAndDelete(
- filter?: FilterQuery,
+ filter?: FilterQuery,
options?: QueryOptions | null
): QueryWithHelpers;
/** Creates a `findOneAndUpdate` query: atomically find the first document that matches `filter` and apply `update`. */
findOneAndUpdate(
- filter: FilterQuery,
- update: UpdateQuery,
+ filter: FilterQuery,
+ update: UpdateQuery,
options: QueryOptions & { includeResultMetadata: true }
): QueryWithHelpers, DocType, THelpers, RawDocType, 'findOneAndUpdate'>;
findOneAndUpdate(
- filter: FilterQuery,
- update: UpdateQuery,
+ filter: FilterQuery,
+ update: UpdateQuery,
options: QueryOptions & { upsert: true } & ReturnsNewDoc
): QueryWithHelpers;
findOneAndUpdate(
- filter?: FilterQuery,
- update?: UpdateQuery,
+ filter?: FilterQuery,
+ update?: UpdateQuery,
options?: QueryOptions | null
): QueryWithHelpers;
/** Declares the query a findById operation. When executed, returns the document with the given `_id`. */
findById(
id: mongodb.ObjectId | any,
- projection?: ProjectionType | null,
+ projection?: ProjectionType | null,
options?: QueryOptions | null
): QueryWithHelpers;
findById(
id: mongodb.ObjectId | any,
- projection?: ProjectionType | null
+ projection?: ProjectionType | null
): QueryWithHelpers;
findById(
id: mongodb.ObjectId | any
@@ -423,22 +423,22 @@ declare module 'mongoose' {
/** Creates a `findOneAndUpdate` query, filtering by the given `_id`. */
findByIdAndUpdate(
id: mongodb.ObjectId | any,
- update: UpdateQuery,
+ update: UpdateQuery,
options: QueryOptions & { includeResultMetadata: true }
): QueryWithHelpers;
findByIdAndUpdate(
id: mongodb.ObjectId | any,
- update: UpdateQuery,
+ update: UpdateQuery,
options: QueryOptions & { upsert: true } & ReturnsNewDoc
): QueryWithHelpers;
findByIdAndUpdate(
id?: mongodb.ObjectId | any,
- update?: UpdateQuery,
+ update?: UpdateQuery,
options?: QueryOptions | null
): QueryWithHelpers;
findByIdAndUpdate(
id: mongodb.ObjectId | any,
- update: UpdateQuery
+ update: UpdateQuery
): QueryWithHelpers;
/** Specifies a `$geometry` condition */
@@ -452,7 +452,7 @@ declare module 'mongoose' {
get(path: string): any;
/** Returns the current query filter (also known as conditions) as a POJO. */
- getFilter(): FilterQuery;
+ getFilter(): FilterQuery;
/** Gets query options. */
getOptions(): QueryOptions;
@@ -461,7 +461,7 @@ declare module 'mongoose' {
getPopulatedPaths(): Array;
/** Returns the current query filter. Equivalent to `getFilter()`. */
- getQuery(): FilterQuery;
+ getQuery(): FilterQuery;
/** Returns the current update operations as a JSON object. */
getUpdate(): UpdateQuery | UpdateWithAggregationPipeline | null;
@@ -531,7 +531,7 @@ declare module 'mongoose' {
maxTimeMS(ms: number): this;
/** Merges another Query or conditions object into this one. */
- merge(source: Query | FilterQuery): this;
+ merge(source: Query | FilterQuery): this;
/** Specifies a `$mod` condition, filters documents for documents whose `path` property is a number that is equal to `remainder` modulo `divisor`. */
mod(path: K, val: number): this;
@@ -559,10 +559,10 @@ declare module 'mongoose' {
nin(val: Array): this;
/** Specifies arguments for an `$nor` condition. */
- nor(array: Array>): this;
+ nor(array: Array>): this;
/** Specifies arguments for an `$or` condition. */
- or(array: Array>): this;
+ or(array: Array>): this;
/**
* Make this query throw an error if no documents match the given `filter`.
@@ -619,7 +619,7 @@ declare module 'mongoose' {
* not accept any [atomic](https://www.mongodb.com/docs/manual/tutorial/model-data-for-atomic-operations/#pattern) operators (`$set`, etc.)
*/
replaceOne(
- filter?: FilterQuery,
+ filter?: FilterQuery,
replacement?: DocType | AnyObject,
options?: QueryOptions | null
): QueryWithHelpers;
@@ -678,9 +678,9 @@ declare module 'mongoose' {
setOptions(options: QueryOptions, overwrite?: boolean): this;
/** Sets the query conditions to the provided JSON object. */
- setQuery(val: FilterQuery | null): void;
+ setQuery(val: FilterQuery | null): void;
- setUpdate(update: UpdateQuery | UpdateWithAggregationPipeline): void;
+ setUpdate(update: UpdateQuery | UpdateWithAggregationPipeline): void;
/** Specifies an `$size` query condition. When called with one argument, the most recent path passed to `where()` is used. */
size(path: K, val: number): this;
@@ -718,8 +718,8 @@ declare module 'mongoose' {
* the `multi` option.
*/
updateMany(
- filter?: FilterQuery,
- update?: UpdateQuery | UpdateWithAggregationPipeline,
+ filter?: FilterQuery,
+ update?: UpdateQuery | UpdateWithAggregationPipeline,
options?: QueryOptions | null
): QueryWithHelpers;
@@ -728,8 +728,8 @@ declare module 'mongoose' {
* `update()`, except it does not support the `multi` or `overwrite` options.
*/
updateOne(
- filter?: FilterQuery,
- update?: UpdateQuery | UpdateWithAggregationPipeline,
+ filter?: FilterQuery,
+ update?: UpdateQuery | UpdateWithAggregationPipeline,
options?: QueryOptions | null
): QueryWithHelpers;