diff --git a/config/defaults/development.js b/config/defaults/development.js index a8b8c2f09..d6b527c5f 100644 --- a/config/defaults/development.js +++ b/config/defaults/development.js @@ -32,7 +32,7 @@ module.exports = { */ }, promise: global.Promise, - restoreExceptions: ['uploads'], + restoreExceptions: [], // collections exceptions for db restore : npm run seed:mongorestore }, // SSL on express server (FYI : Wiki) // secure: { @@ -106,10 +106,10 @@ module.exports = { }, // zxcvbn is used to manage password security zxcvbn: { - forbiddenPasswords: ['12345678', 'azertyui', 'qwertyui', 'azertyuiop', 'qwertyuiop'], - minSize: 8, - maxSize: 126, - minimumScore: 3, + forbiddenPasswords: ['12345678', 'azertyui', 'qwertyui', 'azertyuiop', 'qwertyuiop'], // passwords forbidden + minSize: 8, // min password size + maxSize: 126, // max password size + minimumScore: 3, // min password complexity score }, // jwt is for token authentification jwt: { diff --git a/modules/uploads/repositories/uploads.repository.js b/modules/uploads/repositories/uploads.repository.js index db1017882..ca9c19c5f 100644 --- a/modules/uploads/repositories/uploads.repository.js +++ b/modules/uploads/repositories/uploads.repository.js @@ -107,3 +107,32 @@ exports.purge = async (kind, collection, key) => { }); return { deletedCount: toDelete.length }; }; + +/** + * @desc Function to import list of uploads in db + * @param {[Object]} uploads + * @param {[String]} filters + * @return {Object} uploads + */ +exports.import = (uploads, filters, collection) => { + const _schema = new mongoose.Schema({}, { collection, strict: false }); + let model; + try { + model = mongoose.model(collection); + } catch (error) { + model = mongoose.model(collection, _schema); + } + return model.bulkWrite(uploads.map((upload) => { + const filter = {}; + filters.forEach((value) => { + filter[value] = upload[value]; + }); + return { + updateOne: { + filter, + update: upload, + upsert: true, + }, + }; + })); +}; diff --git a/modules/uploads/services/uploads.data.service.js b/modules/uploads/services/uploads.data.service.js index 5ac60135c..99dee4906 100644 --- a/modules/uploads/services/uploads.data.service.js +++ b/modules/uploads/services/uploads.data.service.js @@ -22,3 +22,14 @@ exports.delete = async (user) => { const result = await UploadRepository.deleteMany({ 'metadata.user': user._id }); return Promise.resolve(result); }; + +/** + * @desc Function to ask repository to import a list of uploads + * @param {[Object]} uploads + * @param {[String]} filters + * @return {Promise} uploads + */ +exports.import = (uploads, filters, collection) => { + const result = UploadRepository.import(uploads, filters, collection); + return result; +}; diff --git a/scripts/db/mongorestore.js b/scripts/db/mongorestore.js index d3f854a24..698a07b69 100644 --- a/scripts/db/mongorestore.js +++ b/scripts/db/mongorestore.js @@ -1,3 +1,6 @@ +/* eslint-disable no-await-in-loop */ +/* eslint-disable no-restricted-syntax */ + /** * Module dependencies */ @@ -7,6 +10,7 @@ const path = require('path'); const fs = require('fs'); const bson = require('bson'); +const fsPromises = fs.promises; const config = require(path.resolve('./config')); const mongooseService = require(path.resolve('./lib/services/mongoose')); @@ -14,6 +18,22 @@ const mongooseService = require(path.resolve('./lib/services/mongoose')); * Work */ +const listDir = async (database) => { + try { + return fsPromises.readdir(path.resolve(`./scripts/db/dump/${database}`)); + } catch (err) { + console.error('Error occured while reading directory dump! ./scripts/db/dump/', err); + } +}; + +const importFile = async (database, collection) => { + try { + return fsPromises.readFile(path.resolve(`./scripts/db/dump/${database}/${collection}.bson`)); + } catch (err) { + console.error('Error occured while reading directory dump! ./scripts/db/dump/', err); + } +}; + const seedData = async () => { try { console.log(chalk.bold.green('Start Seed Dump by update items if differents')); @@ -24,24 +44,32 @@ const seedData = async () => { let database = config.db.uri.split('/')[config.db.uri.split('/').length - 1]; database = database.split('?')[0]; - console.log(chalk.bold.green(`database selected: ${database}`)); - fs.readdirSync(path.resolve(`./scripts/db/dump/${database}`)).forEach((file) => { + const files = await listDir(database); + + for (const file of files) { if (file.slice(-4) === 'bson' && !config.db.restoreExceptions.includes(file.split('.')[0])) { const collection = file.slice(0, -5); - const buffer = fs.readFileSync(path.resolve(`./scripts/db/dump/${database}/${collection}.bson`)); + // read file + const buffer = await importFile(database, collection); let bfIdx = 0; const items = []; while (bfIdx < buffer.length) bfIdx = bson.deserializeStream(buffer, bfIdx, 1, items, items.length); - const Service = require(path.resolve(`./modules/${collection}/services/${collection}.data.service`)); - Service.import(items, ['_id']); + // insert + if (collection.split('.')[0] === 'uploads') { + const Service = require(path.resolve(`./modules/${collection.split('.')[0]}/services/${collection.split('.')[0]}.data.service`)); + await Service.import(items, ['_id'], collection); + } else { + const Service = require(path.resolve(`./modules/${collection}/services/${collection}.data.service`)); + await Service.import(items, ['_id']); + } console.log(chalk.blue(`Database Seeding ${collection} : ${items.length}`)); } - }); + } } catch (err) { console.log(chalk.bold.red(`Error ${err}`)); }