Skip to content

Commit

Permalink
Merge branch '7.x'
Browse files Browse the repository at this point in the history
  • Loading branch information
vkarpov15 committed Nov 28, 2023
2 parents 6b78ba1 + d31310a commit 4dead0a
Show file tree
Hide file tree
Showing 11 changed files with 221 additions and 103 deletions.
12 changes: 12 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,15 @@
7.6.6 / 2023-11-27
==================
* perf: avoid double-running setter logic when calling `push()` #14120 #11380
* fix(populate): set populated docs in correct order when populating virtual underneath doc array with justOne #14105 #14018
* fix: bump mongodb driver -> 5.9.1 #14084 #13829 [lorand-horvath](https://github.com/lorand-horvath)
* types: allow defining document array using [{ prop: String }] syntax #14095 #13424
* types: correct types for when includeResultMetadata: true is set #14078 #13987 [prathamVaidya](https://github.com/prathamVaidya)
* types(query): base filters and projections off of RawDocType instead of DocType so autocomplete doesn't show populate #14118 #14077
* types: make property names show up in intellisense for UpdateQuery #14123 #14090
* types(model): support calling Model.validate() with pathsToSkip option #14088 #14003
* docs: remove "DEPRECATED" warning mistakenly added to read() tags param #13980

8.0.1 / 2023-11-15
==================
* fix: retain key order with aliases when creating indexes with alias #14042 [meabed](https://github.com/meabed)
Expand Down
2 changes: 1 addition & 1 deletion lib/aggregate.js
Original file line number Diff line number Diff line change
Expand Up @@ -665,7 +665,7 @@ Aggregate.prototype.unionWith = function(options) {
* await Model.aggregate(pipeline).read('primaryPreferred');
*
* @param {String|ReadPreference} pref one of the listed preference options or their aliases
* @param {Array} [tags] optional tags for this query. DEPRECATED
* @param {Array} [tags] optional tags for this query.
* @return {Aggregate} this
* @api public
* @see mongodb https://www.mongodb.com/docs/manual/applications/replication/#read-preference
Expand Down
6 changes: 5 additions & 1 deletion lib/stateMachine.js
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,11 @@ StateMachine.ctor = function() {
*/

StateMachine.prototype._changeState = function _changeState(path, nextState) {
const prevBucket = this.states[this.paths[path]];
const prevState = this.paths[path];
if (prevState === nextState) {
return;
}
const prevBucket = this.states[prevState];
if (prevBucket) delete prevBucket[path];

this.paths[path] = nextState;
Expand Down
21 changes: 12 additions & 9 deletions lib/types/array/methods/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,15 @@ const methods = {
if (val != null && utils.hasUserDefinedProperty(val, '$each')) {
atomics.$push = val;
} else {
atomics.$push.$each = atomics.$push.$each.concat(val);
if (val.length === 1) {
atomics.$push.$each.push(val[0]);
} else if (val.length < 10000) {
atomics.$push.$each.push(...val);
} else {
for (const v of val) {
atomics.$push.$each.push(v);
}
}
}
} else {
atomics[op] = val;
Expand Down Expand Up @@ -403,8 +411,7 @@ const methods = {
addToSet() {
_checkManualPopulation(this, arguments);

let values = [].map.call(arguments, this._mapCast, this);
values = this[arraySchemaSymbol].applySetters(values, this[arrayParentSymbol]);
const values = [].map.call(arguments, this._mapCast, this);
const added = [];
let type = '';
if (values[0] instanceof ArraySubdocument) {
Expand All @@ -415,7 +422,7 @@ const methods = {
type = 'ObjectId';
}

const rawValues = utils.isMongooseArray(values) ? values.__array : this;
const rawValues = utils.isMongooseArray(values) ? values.__array : values;
const rawArray = utils.isMongooseArray(this) ? this.__array : this;

rawValues.forEach(function(v) {
Expand Down Expand Up @@ -682,10 +689,7 @@ const methods = {

_checkManualPopulation(this, values);

const parent = this[arrayParentSymbol];
values = [].map.call(values, this._mapCast, this);
values = this[arraySchemaSymbol].applySetters(values, parent, undefined,
undefined, { skipDocumentArrayCast: true });
let ret;
const atomics = this[arrayAtomicsSymbol];
this._markModified();
Expand All @@ -711,7 +715,7 @@ const methods = {
'with different `$position`');
}
atomic = values;
ret = [].push.apply(arr, values);
ret = _basePush.apply(arr, values);
}

this._registerAtomic('$push', atomic);
Expand Down Expand Up @@ -917,7 +921,6 @@ const methods = {
values = arguments;
} else {
values = [].map.call(arguments, this._cast, this);
values = this[arraySchemaSymbol].applySetters(values, this[arrayParentSymbol]);
}

const arr = utils.isMongooseArray(this) ? this.__array : this;
Expand Down
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -93,10 +93,10 @@
"docs:merge:6x": "git merge 6.x",
"docs:test": "npm run docs:generate && npm run docs:generate:search",
"docs:view": "node ./scripts/static.js",
"docs:prepare:publish:stable": "npm run docs:checkout:gh-pages && npm run docs:merge:stable && npm run docs:clean:stable && npm run docs:generate && npm run docs:generate:search",
"docs:prepare:publish:stable": "npm run docs:checkout:gh-pages && npm run docs:merge:stable && npm run docs:generate && npm run docs:generate:search",
"docs:prepare:publish:5x": "npm run docs:checkout:5x && npm run docs:merge:5x && npm run docs:clean:stable && npm run docs:generate && npm run docs:copy:tmp && npm run docs:checkout:gh-pages && npm run docs:copy:tmp:5x",
"docs:prepare:publish:6x": "npm run docs:checkout:6x && npm run docs:merge:6x && npm run docs:clean:stable && env DOCS_DEPLOY=true npm run docs:generate && npm run docs:move:6x:tmp && npm run docs:checkout:gh-pages && npm run docs:copy:tmp:6x",
"docs:prepare:publish:7x": "git checkout 7.x && npm run docs:clean:stable && env DOCS_DEPLOY=true npm run docs:generate && mv ./docs/7.x ./tmp && npm run docs:checkout:gh-pages && rimraf ./docs/7.x && ncp ./tmp ./docs/7.x",
"docs:prepare:publish:7x": "env DOCS_DEPLOY=true npm run docs:generate && npm run docs:checkout:gh-pages && rimraf ./docs/7.x && mv ./tmp ./docs/7.x",
"docs:check-links": "blc http://127.0.0.1:8089 -ro",
"lint": "eslint .",
"lint-js": "eslint . --ext .js --ext .cjs",
Expand Down
144 changes: 111 additions & 33 deletions scripts/website.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ Error.stackTraceLimit = Infinity;

const acquit = require('acquit');
const fs = require('fs');
const fsextra = require('fs-extra');
const path = require('path');
const pug = require('pug');
const pkg = require('../package.json');
Expand Down Expand Up @@ -31,7 +32,7 @@ require('acquit-ignore')();
const { marked: markdown } = require('marked');
const highlight = require('highlight.js');
const { promisify } = require("util");
const renderer = {
markdown.use({
heading: function(text, level, raw, slugger) {
const slug = slugger.slug(raw);
return `<h${level} id="${slug}">
Expand All @@ -40,7 +41,7 @@ const renderer = {
</a>
</h${level}>\n`;
}
};
});
markdown.setOptions({
highlight: function(code, language) {
if (!language) {
Expand All @@ -52,30 +53,100 @@ markdown.setOptions({
return highlight.highlight(code, { language }).value;
}
});
markdown.use({ renderer });

const testPath = path.resolve(cwd, 'test')

const tests = [
...acquit.parse(fs.readFileSync(path.join(testPath, 'geojson.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/transactions.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'schema.alias.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'model.middleware.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/date.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/lean.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/cast.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/findoneandupdate.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/custom-casting.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/getters-setters.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/virtuals.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/defaults.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/discriminators.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/promises.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/schematypes.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/validation.test.js')).toString()),
...acquit.parse(fs.readFileSync(path.join(testPath, 'docs/schemas.test.js')).toString())

const testPath = path.resolve(cwd, 'test');

/** additional test files to scan, relative to `test/` */
const additionalTestFiles = [
'geojson.test.js',
'schema.alias.test.js'
];
/** ignored files from `test/docs/` */
const ignoredTestFiles = [
// ignored because acquit does not like "for await"
'asyncIterator.test.js'
];

/**
* Load all test file contents with acquit
* @returns {Object[]} acquit ast array
*/
function getTests() {
const testDocs = path.resolve(testPath, 'docs');
const filesToScan = [
...additionalTestFiles.map(v => path.join(testPath, v)),
...fs.readdirSync(testDocs).filter(v => !ignoredTestFiles.includes(v)).map(v => path.join(testDocs, v))
];

const retArray = [];

for (const file of filesToScan) {
try {
retArray.push(acquit.parse(fs.readFileSync(file).toString()));
} catch (err) {
// add a file path to a acquit error, for better debugging
err.filePath = file;
throw err;
}
}

return retArray.flat();
}

function deleteAllHtmlFiles() {
try {
console.log('Delete', path.join(versionObj.versionedPath, 'index.html'));
fs.unlinkSync(path.join(versionObj.versionedPath, 'index.html'));
} catch (err) {
if (err.code !== 'ENOENT') {
throw err;
}
}
const foldersToClean = [
path.join('.', versionObj.versionedPath, 'docs'),
path.join('.', versionObj.versionedPath, 'docs', 'tutorials'),
path.join('.', versionObj.versionedPath, 'docs', 'typescript'),
path.join('.', versionObj.versionedPath, 'docs', 'api'),
path.join('.', versionObj.versionedPath, 'docs', 'source', '_docs'),
'./tmp'
];
for (const folder of foldersToClean) {
let files = [];

try {
files = fs.readdirSync(folder);
} catch (err) {
if (err.code === 'ENOENT') {
continue;
}
}
for (const file of files) {
if (file.endsWith('.html')) {
console.log('Delete', path.join(folder, file));
fs.unlinkSync(path.join(folder, file));
}
}
}
}

function moveDocsToTemp() {
if (!versionObj.versionedPath) {
throw new Error('Cannot move unversioned deploy to /tmp');
}
try {
fs.rmSync('./tmp', { recursive: true });
} catch (err) {
if (err.code !== 'ENOENT') {
throw err;
}
}
const folder = versionObj.versionedPath.replace(/^\//, '');
const directory = fs.readdirSync(folder);
for (const file of directory) {
fsextra.moveSync(`${folder}/${file}`, `./tmp/${file}`);
}
}

/**
* Array of array of semver numbers, sorted with highest number first
* @example
Expand Down Expand Up @@ -227,7 +298,7 @@ const versionObj = (() => {
getLatestVersionOf(5),
]
};
const versionedDeploy = process.env.DOCS_DEPLOY === "true" ? !(base.currentVersion.listed === base.latestVersion.listed) : false;
const versionedDeploy = !!process.env.DOCS_DEPLOY ? !(base.currentVersion.listed === base.latestVersion.listed) : false;

const versionedPath = versionedDeploy ? `/docs/${base.currentVersion.path}` : '';

Expand Down Expand Up @@ -364,7 +435,7 @@ async function pugify(filename, options, isReload = false) {
let contents = fs.readFileSync(path.resolve(cwd, inputFile)).toString();

if (options.acquit) {
contents = transform(contents, tests);
contents = transform(contents, getTests());

contents = contents.replaceAll(/^```acquit$/gmi, "```javascript");
}
Expand Down Expand Up @@ -423,7 +494,7 @@ async function pugify(filename, options, isReload = false) {
});
}

// extra function to start watching for file-changes, without having to call this file directly with "watch"
/** extra function to start watching for file-changes, without having to call this file directly with "watch" */
function startWatch() {
Object.entries(docsFilemap.fileMap).forEach(([file, fileValue]) => {
let watchPath = path.resolve(cwd, file);
Expand Down Expand Up @@ -491,7 +562,7 @@ const pathsToCopy = [
'docs/js',
'docs/css',
'docs/images'
]
];

/** Copy all static files when versionedDeploy is used */
async function copyAllRequiredFiles() {
Expand All @@ -500,7 +571,6 @@ async function copyAllRequiredFiles() {
return;
}

const fsextra = require('fs-extra');
await Promise.all(pathsToCopy.map(async v => {
const resultPath = path.resolve(cwd, path.join('.', versionObj.versionedPath, v));
await fsextra.copy(v, resultPath);
Expand All @@ -517,8 +587,16 @@ exports.cwd = cwd;

// only run the following code if this file is the main module / entry file
if (isMain) {
console.log(`Processing ~${files.length} files`);
Promise.all([pugifyAllFiles(), copyAllRequiredFiles()]).then(() => {
console.log("Done Processing");
})
(async function main() {
console.log(`Processing ~${files.length} files`);

await deleteAllHtmlFiles();
await pugifyAllFiles();
await copyAllRequiredFiles();
if (!!process.env.DOCS_DEPLOY && !!versionObj.versionedPath) {
await moveDocsToTemp();
}

console.log('Done Processing');
})();
}
12 changes: 1 addition & 11 deletions test/query.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -1370,7 +1370,7 @@ describe('Query', function() {
});

describe('setOptions', function() {
it('works', async function() {
it('works', function() {
const q = new Query();
q.setOptions({ thing: 'cat' });
q.setOptions({ populate: ['fans'] });
Expand All @@ -1394,16 +1394,6 @@ describe('Query', function() {
assert.equal(q.options.hint.index2, -1);
assert.equal(q.options.readPreference.mode, 'secondary');
assert.equal(q.options.readPreference.tags[0].dc, 'eu');

const Product = db.model('Product', productSchema);
const [, doc2] = await Product.create([
{ numbers: [3, 4, 5] },
{ strings: 'hi there'.split(' '), w: 'majority' }
]);

const docs = await Product.find().setOptions({ limit: 1, sort: { _id: -1 }, read: 'n' });
assert.equal(docs.length, 1);
assert.equal(docs[0].id, doc2.id);
});

it('populate as array in options (gh-4446)', function() {
Expand Down
22 changes: 22 additions & 0 deletions test/types.array.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -1915,4 +1915,26 @@ describe('types array', function() {
}
});
});

it('calls array setters (gh-11380)', function() {
let called = 0;
const Test = db.model('Test', new Schema({
intArr: [{
type: Number,
set: v => {
++called;
return Math.floor(v);
}
}]
}));

assert.equal(called, 0);
const doc = new Test({ intArr: [3.14] });
assert.deepStrictEqual(doc.intArr, [3]);
assert.equal(called, 1);

doc.intArr.push(2.718);
assert.deepStrictEqual(doc.intArr, [3, 2]);
assert.equal(called, 2);
});
});
8 changes: 8 additions & 0 deletions test/types/models.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -845,3 +845,11 @@ async function gh14072() {

await M.bulkWrite(bulkWriteArray);
}

async function gh14003() {
const schema = new Schema({ name: String });
const TestModel = model('Test', schema);

await TestModel.validate({ name: 'foo' }, ['name']);
await TestModel.validate({ name: 'foo' }, { pathsToSkip: ['name'] });
}
Loading

0 comments on commit 4dead0a

Please sign in to comment.