diff --git a/.eslintrc b/.eslintrc new file mode 100644 index 00000000..b2056b30 --- /dev/null +++ b/.eslintrc @@ -0,0 +1,7 @@ +{ + "parserOptions": { + "ecmaVersion": 2017 + }, + "plugins": ["mocha"], + "extends": "walmart" +} diff --git a/.travis.yml b/.travis.yml index 4aed1f83..b3a349d8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,6 +11,7 @@ addons: repo_token: 205921b25cd059b92e7234ee98533a35f08c81c5debd68dad3e6d468258c0489 hosts: - localhost.walmart.com + mariadb: '10.0' before_install: - sudo update-java-alternatives -s java-8-oracle @@ -18,4 +19,4 @@ before_install: - java -version - wget https://www.dropbox.com/s/f86zyi3qmhqvfbp/apache-cassandra-3.9.tar.gz?dl=1 -O apache-cassandra-3.9.tar.gz && tar -xvzf apache-cassandra-3.9.tar.gz - sudo sh ./apache-cassandra-3.9/bin/cassandra -R - - sleep 20 \ No newline at end of file + - sleep 20 diff --git a/electrode-ota-server-dao-factory/README.md b/electrode-ota-server-dao-factory/README.md new file mode 100644 index 00000000..5f837c97 --- /dev/null +++ b/electrode-ota-server-dao-factory/README.md @@ -0,0 +1,39 @@ +# electrode-ota-server-dao-factory + +This project is part of the [electrode-ota-server](https://github.com/electrode-io/electrode-ota-server) + +It is not meant to be used as a standalone module, use at your own risk. + +This service provides an abstraction layer for the DAO data store. It allows for pluggable datastore driver used to power DAO storage. +Currently support datastores are MariaDB (electrode-ota-server-dao-mariadb) and Cassandra (electrode-ota-server-dao-cassandra). + +## Install + +``` +$ npm install electrode-ota-server-dao-factory +``` + +## Usage + +* Install this module in your package.json. +* Update your config to use this package, specify a driver. +* Add the driver and driver options. + +In this sample configuration, we are using the MariaDB driver to save DAO instances. + +``` +{ + "electrode-ota-server-dao-factory": { + "options": { + "driver": "electrode-ota-server-dao-mariadb" + } + }, + "electrode-ota-server-dao-mariadb": { + "options": { + host: '127.0.0.1', + user: 'root', + password: 'root' + } + } +} +``` diff --git a/electrode-ota-server-dao-factory/package.json b/electrode-ota-server-dao-factory/package.json new file mode 100644 index 00000000..723a15cb --- /dev/null +++ b/electrode-ota-server-dao-factory/package.json @@ -0,0 +1,27 @@ +{ + "name": "electrode-ota-server-dao-factory", + "version": "3.0.0", + "description": "Electrode OTA Server - DAO Factory", + "author": "Dat Vong ", + "license": "Apache-2.0", + "repository": "https://github.com/electrode-io/electrode-ota-server", + "bugs": { + "url": "https://github.com/electrode-io/electrode-ota-server/issues" + }, + "homepage": "https://github.com/electrode-io/electrode-ota-server", + "main": "lib/index.js", + "scripts": { + "test": "ota-mocha", + "build": "ota-babel", + "coverage": "ota-nyc", + "prepublish": "npm run build" + }, + "keywords": ["electrode", "ota", "code-push", "react-native", "cordova"], + "dependencies": { + "electrode-ota-server-diregister": "^2.1.0-beta.7" + }, + "devDependencies": { + "electrode-ota-server-util-dev": "^2.1.0-beta.7", + "bluebird": "^3.5.1" + } +} diff --git a/electrode-ota-server-dao-factory/src/factory.js b/electrode-ota-server-dao-factory/src/factory.js new file mode 100644 index 00000000..14c86fdf --- /dev/null +++ b/electrode-ota-server-dao-factory/src/factory.js @@ -0,0 +1,477 @@ +import { alreadyExistsMsg } from "electrode-ota-server-errors"; +import { promiseMap, reducer, toJSON } from "electrode-ota-server-util"; +import _ from "lodash"; + +const isEmpty = arr => { + if (arr == null) return true; + if (arr.length === 0) return true; + return false; +}; +const isNotEmpty = arr => !isEmpty(arr); + +const apply = (target, source) => { + if (!source) { + return target; + } + for (const key of Object.keys(target)) { + if (key == "_validators") continue; + if (source.hasOwnProperty(key)) { + const newValue = source[key]; + target[key] = newValue; + } + } + return target; +}; + +const ACCESSKEY = [ + "name", + "id", + "expires", + "description", + "lastAccess", + "createdTime", + "createdBy", + "friendlyName" +]; + +// Set any property that's undefined or not exist to null +const objDefaultNull = (obj, keys) => { + for (const key of keys) { + if (obj[key] === void 0) { + obj[key] = null; + } + } + return obj; +}; + +const historySort = history => + history && + history.sort((a, b) => b.created_.getTime() - a.created_.getTime()); + +/** + * DAO Factory methods to save/update/delete DAOs to/from DataStore. + * + * Models + * - App + * - Deployment + * - Package + * - User + * + * Model functions + * - deleteAsync() + * - findOneAsync(query, options) + * - findAsync(query, options) + * - saveAsync(options) + * - updateAsync(updates, options) + */ +export default class DaoFactory { + constructor({ driver, logger }) { + this.driver = driver; + this.logger = logger; + } + async init() { + return this.driver.init(); + } + + async createUser({ email, name, accessKeys, linkedProviders = [] }) { + const user = new this.driver.User({ + email, + name, + accessKeys, + linkedProviders + }); + const exists = await user.saveAsync(); + alreadyExistsMsg(exists, `User already exists ${email}`); + + return user; + } + + userById(id) { + return this.driver.User.findOneAsync({ id }); + } + + userByAccessKey(accessKeys) { + return this.driver.User.findOneAsync({ accessKeys }); + } + + async userByEmail(email) { + return this.driver.User.findOneAsync({ email }); + } + + async updateUser(currentEmail, update) { + const user = await this.userByEmail(currentEmail); + apply(user, update); + await user.saveAsync(); + const js = toJSON(user); + + _.each(js.accessKeys, (value, key) => { + objDefaultNull(js.accessKeys[key], ACCESSKEY); + }); + return js; + } + + /** + * Create a new App + * + * @param {object} values + * name: name of app, string + * deployments: list of deployments + * collaborators: map of collaborators + * + */ + async createApp({ name, deployments = {}, collaborators }) { + const app = new this.driver.App({ name, collaborators }); + const savedApp = await app.saveAsync(); + + const deps = Object.keys(deployments); + if (isNotEmpty(deps)) { + for (const name of deps) { + await this.addDeployment(app.id, name, deployments[name]); + } + } + savedApp.deployments = deps; + return savedApp; + } + + /** + * Remove an app + * @param {*} appId + */ + async removeApp(appId) { + const app = await this.appById(appId); + if (isEmpty(app)) return; + return app.deleteAsync(); + } + + /** + * Update an app + * @param {*} id + * @param {*} param1 + */ + async updateApp(id, { name, collaborators }) { + const app = await this.appById(id); + app.name = name; + app.collaborators = collaborators; + const resp = await app.saveAsync(); + return app; + } + + /** + * Get an app by id + * @param {*} id + */ + appById(id) { + return this.driver.App.findOneAsync({ id }); + } + + /** + * Get all apps for a given email + * + * @param {*} email + */ + appsForCollaborator(email) { + return this.driver.App.findAsync({ collaborators: email }); + } + + appForCollaborator(email, appName) { + return this.driver.App.findOneAsync({ + collaborators: email, + name: appName + }); + } + + /** + * Add a deployment + * @param {*} app + * @param {*} name + * @param {*} param2 + */ + addDeployment(app, name, { key }) { + const deployment = new this.driver.Deployment({ name, key, AppId: app }); + return deployment.saveAsync(); + } + + /** + * Remove a deployment + * + * @param {*} appId + * @param {*} deploymentName + */ + async removeDeployment(appId, deploymentName) { + const deployment = await this.driver.Deployment.findOneAsync({ + AppId: appId, + name: deploymentName + }); + return deployment.deleteAsync(); + } + + /** + * Rename a deployment + * + * @param {*} appId + * @param {*} oldName + * @param {*} newName + */ + async renameDeployment(appId, oldName, newName) { + const deployment = await this.driver.Deployment.findOneAsync({ + AppId: appId, + name: oldName + }); + deployment.name = newName; + return deployment.saveAsync(); + } + + /** + * Get deployment by key + * + * @param {*} deploymentKey + */ + async deploymentForKey(deploymentKey) { + let dep = await this.driver.Deployment.findOneAsync({ key: deploymentKey }); + if (dep && isNotEmpty(dep.history_)) { + dep = toJSON(dep); + dep.package = await this.driver.Package.findOneAsync({ + id_: dep.history_[0] + }); + return dep; + } + return dep; + } + + /** + * Get all deployments for an app + * + * @param {*} appId + * @param {*} deployments + */ + async deploymentsByApp(appId, deployments) { + if (isEmpty(deployments)) { + return []; + } + const deps = await this.driver.Deployment.findAsync({ + appId, + name: deployments + }); + if (isEmpty(deps)) return []; + return promiseMap( + reducer(deps, (ret, d) => (ret[d.name] = this.deploymentForKey(d.key))) + ); + } + + /** + * Get a deployment + * + * @param {*} appId + * @param {*} deployment + */ + async deploymentByApp(appId, deployment) { + const d = await this.driver.Deployment.findOneAsync({ + appId, + name: deployment + }); + if (!d) return; + return this.deploymentForKey(d.key); + } + + async _historyForDeployment(key) { + const res = await this.driver.Deployment.findOneAsync({ key }); + return res.history_; + } + + /** + * Add a package to a deployment + * + * @param {*} deploymentKey + * @param {*} value + */ + async addPackage(deploymentKey, value) { + const deployment = await this.driver.Deployment.findOneAsync({ + key: deploymentKey + }); + if (isEmpty(deployment)) { + throw new Error(`Can not find deployment ${deploymentKey}.`); + } + const pkg = new this.driver.Package(value); + await pkg.saveAsync(); + await deployment.associateAsync(pkg); + + return pkg; + } + + /** + * Update a package of a deployment + * + * @param {*} deploymentKey + * @param {*} pkg + * @param {*} label + */ + async updatePackage(deploymentKey, pkg, label) { + const history_ = await this._historyForDeployment(deploymentKey); + if (isEmpty(history_)) { + throw new Error( + `Can not update a package without history, probably means things have gone awry.` + ); + } + let rpkg; + + if (label) { + rpkg = await this.driver.Package.findOneAsync({ id_: history_, label }); + } else { + rpkg = await this.driver.Package.findOneAsync({ id_: history_ }); + } + await rpkg.updateAsync(pkg); + return rpkg; + } + + /** + * Get history of a deployment. + * @param {*} appId + * @param {*} deploymentName + */ + async history(appId, deploymentName) { + const deployment = await this.deploymentByApp(appId, deploymentName); + if (!deployment || !deployment.history_) { + return []; + } + const pkgs = await this.driver.Package.findAsync({ + id_: deployment.history_ + }); + + return historySort(pkgs); + } + + /** + * Get history of a deployment by id(s) + * + * @param {*} historyIds + */ + async historyByIds(historyIds) { + if (historyIds == null || historyIds.length == 0) { + return []; + } + const pkgs = await this.driver.Package.findAsync({ id_: historyIds }); + return historySort(pkgs); + } + + /** + * Clear the history of a deployment + * + * @param {*} appId + * @param {*} deploymentName + */ + async clearHistory(appId, deploymentName) { + const deployment = await this.deploymentByApp(appId, deploymentName); + if (deployment && isNotEmpty(deployment.history_)) { + return this.driver.Package.delete({ id_: deployment.history_ }); + } + } + + /** + * Get history of a deployment by label + * + * @param {*} appId + * @param {*} deploymentName + * @param {*} label + */ + async historyLabel(appId, deploymentName, label) { + const deployment = await this.deploymentByApp(appId, deploymentName); + if (!deployment || isEmpty(deployment.history_)) { + return; + } + const pkg = await this.driver.Package.findOneAsync({ + id_: deployment.history_, + label: label + }); + return pkg; + } + + /** + * Get a package by Id + */ + async packageById(pkgId) { + if (!pkgId) return; + return this.driver.Package.findOneAsync({ id_: pkgId }); + } + + /** + * Upload a package + * + */ + async upload(packageHash, content) { + if (!Buffer.isBuffer(content)) { + content = Buffer.from(content, "utf8"); + } + const pkg = new this.driver.PackageContent({ packageHash, content }); + const exists = await pkg.saveAsync(); + return exists; + } + + /** + * Download a package + * + */ + async download(packageHash) { + const pkg = await this.driver.PackageContent.findOneAsync({ packageHash }); + if (pkg != null) { + return pkg.content; + } + } + + /** + * Get metrics for a deployment + * + */ + metrics(deploymentKey) { + return this.driver.Metric.findAsync({ deploymentKey }); + } + + /** + * Record download or deploy metric + * + * @param {object} values + * appVersion text, + * deploymentKey text, + * clientUniqueId text, + * label text + * status text, + * previousLabelOrAppVersion text, + * previousDeploymentKey text, + * + * "appVersion": "1.0.0", + * "deploymentKey": "5UfjnOxv1FnCJ_DwPqMWQYSVlp0H4yecGHaB-", + * "clientUniqueId": "fe231438a4f62c70", + * "label": "v1", + * "status": "DeploymentSucceeded", + * "previousLabelOrAppVersion": "1.0.0", + * "previousDeploymentKey": "5UfjnOxv1FnCJ_DwPqMWQYSVlp0H4yecGHaB-" + */ + async insertMetric(values) { + const metric = new this.driver.Metric(values); + return metric.saveAsync(); + } + + clientRatio(clientUniqueId, packageHash) { + return this.driver.ClientRatio.findOneAsync({ + clientUniqueId, + packageHash + }); + } + + /** + * Tracks whether we updated the client or not last time. + * + * @param {string} clientUniqueId : Client Device Unique ID + * @param {string} packageHash : Package hash to update to. + * @param {float} ratio : Deployment ratio + * @param {bool} updated : flag indicating if client was/was not updated. + */ + insertClientRatio(clientUniqueId, packageHash, ratio, updated) { + const clientRatio = new this.driver.ClientRatio({ + clientUniqueId, + packageHash, + ratio, + updated + }); + return clientRatio.saveAsync(); + } +} diff --git a/electrode-ota-server-dao-factory/src/index.js b/electrode-ota-server-dao-factory/src/index.js new file mode 100644 index 00000000..c5ce551f --- /dev/null +++ b/electrode-ota-server-dao-factory/src/index.js @@ -0,0 +1,43 @@ +import diregister from "electrode-ota-server-diregister"; +import Factory from "./factory"; + +/** + * DAO service provides a uniform data store abstraction layer for the OTA server. + * + * All data store interactions go through this service. + * Define the desires driver class for the supported data stores (ie. Cassandra, MariaDB). + * + * Configuration Options + * { + * "driver": "", + * } + * driver = specifies the datastore driver used to power this DAO service. + * + * Example + * "electrode-ota-server-dao-factory": { + * "driver": "electrode-ota-server-dao-mariadb" + * } + * "electrode-ota-server-dao-mariadb": { + * ... + * } + * + * @param {dict} options : configuration options + * @param {*} register : electrode register + * @param {*} logger : ota logger + */ +export const daoFactory = async (options, driver, logger) => { + if (driver == null) { + throw new Error("DAO driver not loaded"); + } + return new Factory({ driver, logger }); +}; + +export const register = diregister( + { + name: "ota!dao", + multiple: false, + connections: false, + dependencies: ["ota!dao-driver", "ota!logger"] + }, + daoFactory +); diff --git a/electrode-ota-server-dao-factory/test/dao-test.js b/electrode-ota-server-dao-factory/test/dao-test.js new file mode 100644 index 00000000..2565e28d --- /dev/null +++ b/electrode-ota-server-dao-factory/test/dao-test.js @@ -0,0 +1,7 @@ +import { expect } from "chai"; + +describe("server/dao", function() { + this.timeout(200000); + + it("test insert metric", () => {}); +}); diff --git a/electrode-ota-server-dao-factory/yarn.lock b/electrode-ota-server-dao-factory/yarn.lock new file mode 100644 index 00000000..9a448533 --- /dev/null +++ b/electrode-ota-server-dao-factory/yarn.lock @@ -0,0 +1,7 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +bluebird@^3.5.1: + version "3.5.1" + resolved "http://npme.walmart.com/bluebird/-/bluebird-3.5.1.tgz#d9551f9de98f1fcda1e683d17ee91a0602ee2eb9" diff --git a/electrode-ota-server-dao-mariadb/README.md b/electrode-ota-server-dao-mariadb/README.md new file mode 100644 index 00000000..a26989a6 --- /dev/null +++ b/electrode-ota-server-dao-mariadb/README.md @@ -0,0 +1,40 @@ +# electrode-ota-server-dao-mariadb + +This project is part of the [electrode-ota-server](https://github.com/electrode-io/electrode-ota-server) + +It is not meant to be used standalone, use at your own risk. + +## Install + +``` +% npm install electrode-ota-server-dao-mariadb +``` + +## Usage + +Specify connection information in the config options. +Reference sequelizejs for config parameters. + +``` +"plugins": { + "electrode-ota-server-dao-mariadb": { + "config": { + "host": "localhost", + "port": 3306, + "db": "example", + "user": "root", + "password": "" + } + } +} +``` + +Use this driver in DAO factory + +``` +"plugins": { + "electrode-ota-server-dao-factory": { + driver: "electrode-ota-server-dao-mariadb" + } +} +``` diff --git a/electrode-ota-server-dao-mariadb/package.json b/electrode-ota-server-dao-mariadb/package.json new file mode 100644 index 00000000..c5704afd --- /dev/null +++ b/electrode-ota-server-dao-mariadb/package.json @@ -0,0 +1,31 @@ +{ + "name": "electrode-ota-server-dao-mariadb", + "version": "3.0.0", + "description": "Electrode OTA - MariaDB driver", + "author": "Dat Vong ", + "license": "Apache-2.0", + "repository": "https://github.com/electrode-io/electrode-ota-server", + "bugs": { + "url": "https://github.com/electrode-io/electrode-ota-server/issues" + }, + "homepage": "https://github.com/electrode-io/electrode-ota-server", + "main": "lib/index.js", + "scripts": { + "test": "ota-mocha", + "build": "ota-babel", + "coverage": "ota-nyc", + "prepublish": "npm run build" + }, + "keywords": ["code-push", "ota", "electrode", "react-native", "cordova"], + "dependencies": { + "electrode-ota-server-diregister": "^2.1.0-beta.7", + "electrode-ota-server-util": "^3.0.0", + "mysql2": "^1.5.1", + "sequelize": "^4.28.1" + }, + "devDependencies": { + "bluebird": "^3.5.1", + "electrode-ota-server-util-dev": "^2.1.0-beta.7", + "sinon": "^4.1.3" + } +} diff --git a/electrode-ota-server-dao-mariadb/src/client.js b/electrode-ota-server-dao-mariadb/src/client.js new file mode 100644 index 00000000..f0c1c6a0 --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/client.js @@ -0,0 +1,143 @@ +import { Sequelize } from "sequelize"; +import { ProxyModelWrapper } from "./proxy"; + +/** + * Default database connection configs + */ +const defaultConfig = { + host: "127.0.0.1", + port: 3306, + dialect: "mysql", + db: "", + user: "root", + password: "", + pool_max: 5, + pool_idle: 30000, + pool_acquire: 60000 +}; + +/** + * Create a Sequelize instance with given options + * + * @param {object} options - connection options + * @returns {client} sequelize client + */ +export const createSequelizeClient = (options = {}) => { + const config = Object.assign({}, defaultConfig, options); + const client = new Sequelize(config.db, config.user, config.password, { + host: config.host, + port: config.port, + dialect: config.dialect, + pool: { + max: config.pool_max, + idle: config.pool_idle, + acquire: config.pool_acquire + }, + logging: config.logging || false + }); + + return client; +}; + +/** + * Create a test database + * Note: Should only be used for tests + * + * @param {*} options - connection options + * @returns {client} sequelize client + */ +export const createDatabaseForTest = options => { + const configCopy = Object.assign({}, options); + const db = configCopy.db; + delete configCopy.db; + const client = createSequelizeClient({ config: configCopy }); + if (process.env.NODE_ENV == "production") { + throw new Error( + "testCreateDatabase() should not be called in production; only in test" + ); + } + return client + .query(`DROP DATABASE IF EXISTS ${db}`) + .then(client.query(`CREATE DATABASE ${db}`)) + .then(() => client.close()); +}; + +/** + * MariaDB driver for use with `electrode-ota-server-dao-factory`. + */ +export default class DaoMariaDB { + constructor({ options, logger }) { + this.sequelize = createSequelizeClient(options); + this.logger = logger; + this.logger.info("DAO MariaDB registered with", options); + } + + /** + * Initialize this driver asynchronously. + * + * Loads the DB models and synchronizing with the database (creates if missing) + * @returns {none} None + */ + async init() { + this.logger.info("DAO MariaDB loading models and synchronizing tables"); + await this._loadModels(); + await this._synchronizeModels(); + Object.assign(this, { + App: ProxyModelWrapper( + this.sequelize, + this.sequelize.models.App, + this.logger + ), + ClientRatio: ProxyModelWrapper( + this.sequelize, + this.sequelize.models.ClientRatio, + this.logger + ), + Deployment: ProxyModelWrapper( + this.sequelize, + this.sequelize.models.Deployment, + this.logger + ), + Metric: ProxyModelWrapper( + this.sequelize, + this.sequelize.models.Metric, + this.logger + ), + Package: ProxyModelWrapper( + this.sequelize, + this.sequelize.models.Package, + this.logger + ), + PackageContent: ProxyModelWrapper( + this.sequelize, + this.sequelize.models.PackageContent, + this.logger + ), + User: ProxyModelWrapper( + this.sequelize, + this.sequelize.models.User, + this.logger + ) + }); + } + + _loadModels() { + // loads from ./models/index.js + const modelLoader = require("./models").default; + modelLoader(this.sequelize); + } + + _synchronizeModels() { + // creates tables if missing + return this.sequelize.sync(); + } + + /** + * Close the connection + * + * @returns {status} close status + */ + closeAsync() { + return this.sequelize.close(); + } +} diff --git a/electrode-ota-server-dao-mariadb/src/index.js b/electrode-ota-server-dao-mariadb/src/index.js new file mode 100644 index 00000000..bfae7e86 --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/index.js @@ -0,0 +1,19 @@ +import diregister from "electrode-ota-server-diregister"; +import client, { createDatabaseForTest } from "./client"; + +export const daoDriver = async (options, logger) => { + let mariaClient = new client({ options, logger }); + await mariaClient.init(); + return mariaClient; +}; +export { createDatabaseForTest }; + +export const register = diregister( + { + name: "ota!dao-driver", + multiple: false, + connections: false, + dependencies: ["ota!logger"] + }, + daoDriver +); diff --git a/electrode-ota-server-dao-mariadb/src/models/App.js b/electrode-ota-server-dao-mariadb/src/models/App.js new file mode 100644 index 00000000..d5b2adde --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/models/App.js @@ -0,0 +1,50 @@ +import Sequelize from "sequelize"; +import _ from "lodash"; + +export default sequelize => { + const AppCollaborator = sequelize.import("./AppCollaborator"); + const Deployment = sequelize.import("./Deployment"); + const App = sequelize.define( + "App", + { + id: { + type: Sequelize.UUID, + defaultValue: Sequelize.UUIDV1, + primaryKey: true + }, + name: Sequelize.STRING + }, + { + tableName: "apps", + indexes: [{ fields: ["name"] }], + version: true + } + ); + App.prototype.toJSON = function() { + let collaborators = this.get("collaborators"); + collaborators = _.keyBy(collaborators, c => c.user); + collaborators = _.mapValues(collaborators, c => ({ + permission: c.permission + })); + let deployments = this.get("deployments"); + deployments = _.map(deployments, (val, key) => val.name); + if (_.isEmpty(deployments)) { + deployments = null; + } + return { + id: this.get("id", { plain: true }), + name: this.get("name", { plain: true }), + collaborators, + deployments + }; + }; + + App.hasMany(AppCollaborator, { as: "collaborators", onDelete: "CASCADE" }); + App.hasMany(Deployment, { as: "deployments", onDelete: "CASCADE" }); + App._associations = { + collaborators: { model: AppCollaborator, searchField: "user" }, + deployments: { model: Deployment, searchField: "id" } + }; + + return App; +}; diff --git a/electrode-ota-server-dao-mariadb/src/models/AppCollaborator.js b/electrode-ota-server-dao-mariadb/src/models/AppCollaborator.js new file mode 100644 index 00000000..44a4f507 --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/models/AppCollaborator.js @@ -0,0 +1,17 @@ +import Sequelize from "sequelize"; + +export default sequelize => { + let AppCollaborator = sequelize.define( + "AppCollaborator", + { + user: Sequelize.STRING, + permission: Sequelize.STRING + }, + { + tableName: "app_collaborators", + indexes: [{ fields: ["user", "AppId"], unique: true }], + version: true + } + ); + return AppCollaborator; +}; diff --git a/electrode-ota-server-dao-mariadb/src/models/AppDeployment.js b/electrode-ota-server-dao-mariadb/src/models/AppDeployment.js new file mode 100644 index 00000000..3261a7f9 --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/models/AppDeployment.js @@ -0,0 +1,11 @@ +import Sequelize from "sequelize"; + +export default sequelize => + sequelize.define( + "AppDeployment", + {}, + { + tableName: "app_deployments", + version: true + } + ); diff --git a/electrode-ota-server-dao-mariadb/src/models/ClientRatio.js b/electrode-ota-server-dao-mariadb/src/models/ClientRatio.js new file mode 100644 index 00000000..a4de46ca --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/models/ClientRatio.js @@ -0,0 +1,40 @@ +import Sequelize from "sequelize"; + +export default sequelize => { + const ClientRatio = sequelize.define( + "ClientRatio", + { + clientUniqueId: { + type: Sequelize.STRING, + allowNull: false + }, + packageHash: { + type: Sequelize.STRING, + allowNull: false + }, + inserted: Sequelize.DATE, + ratio: Sequelize.FLOAT, + updated: Sequelize.BOOLEAN + }, + { + tableName: "client_ratio", + createdAt: "inserted", + indexes: [ + { + fields: ["clientUniqueId", "inserted"] + }, + { + fields: ["packageHash"] + } + ], + version: true + } + ); + ClientRatio.prototype.toJSON = function() { + let asJson = this.get({ plain: true }); + delete asJson["version"]; + delete asJson["updatedAt"]; + return asJson; + }; + return ClientRatio; +}; diff --git a/electrode-ota-server-dao-mariadb/src/models/Deployment.js b/electrode-ota-server-dao-mariadb/src/models/Deployment.js new file mode 100644 index 00000000..5e7bcbf5 --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/models/Deployment.js @@ -0,0 +1,63 @@ +import Sequelize from "sequelize"; +import _ from "lodash"; +import assert from "assert"; + +export default sequelize => { + const Package = sequelize.import("./Package"); + const DeploymentHistory = sequelize.import("./DeploymentHistory"); + + let Deployment = sequelize.define( + "Deployment", + { + id: { + type: Sequelize.UUID, + defaultValue: Sequelize.UUIDV1, + primaryKey: true + }, + key: Sequelize.STRING, + name: Sequelize.STRING + }, + { + tableName: "deployments", + indexes: [{ fields: ["key"] }, { fields: ["name"] }], + createdAt: "createdTime", + version: true + } + ); + Deployment.belongsToMany(Package, { + as: "history_", + through: DeploymentHistory, + foreignKey: "deploymentId", + otherKey: "packageId" + }); + Deployment._associations = { + history_: { + model: Package, + searchField: "packageId", + through: DeploymentHistory, + attributes: ["id_"], + // Sort history_ by created DESC + order: [ + { model: Package, as: "history_", through: DeploymentHistory }, + "created_", + "DESC" + ] + } + }; + + Deployment.prototype.toJSON = function() { + let asJson = this.get({ plain: true }); + asJson.history_ = _.map(asJson.history_, p => p.id_); + return asJson; + }; + Deployment.prototype.associate = function(pkg) { + assert(pkg != null, "Associating null package to Deployment"); + assert( + pkg instanceof Package, + "Only Package can be associated to Deployment" + ); + return this.addHistory_(pkg); + }; + + return Deployment; +}; diff --git a/electrode-ota-server-dao-mariadb/src/models/DeploymentHistory.js b/electrode-ota-server-dao-mariadb/src/models/DeploymentHistory.js new file mode 100644 index 00000000..97cf2224 --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/models/DeploymentHistory.js @@ -0,0 +1,15 @@ +import Sequelize from "sequelize"; + +export default sequelize => + sequelize.define( + "DeploymentHistory", + {}, + { + tableName: "deployment_history", + defaultScope: { + order: [["createdAt", "DESC"]] + }, + indexes: [{ fields: ["createdAt"] }], + version: true + } + ); diff --git a/electrode-ota-server-dao-mariadb/src/models/Metric.js b/electrode-ota-server-dao-mariadb/src/models/Metric.js new file mode 100644 index 00000000..b389dcfb --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/models/Metric.js @@ -0,0 +1,34 @@ +import Sequelize from "sequelize"; + +export default sequelize => { + const Metric = sequelize.define( + "Metric", + { + id: { + type: Sequelize.UUID, + defaultValue: Sequelize.UUIDV1, + primaryKey: true + }, + deploymentKey: Sequelize.STRING, + appVersion: Sequelize.STRING, + clientUniqueId: Sequelize.STRING, + label: Sequelize.STRING, + previousDeploymentKey: Sequelize.STRING, + previousLabelOrAppVersion: Sequelize.STRING, + status: Sequelize.STRING + }, + { + tableName: "metrics", + indexes: [{ fields: ["deploymentKey"] }], + version: true + } + ); + Metric.prototype.toJSON = function() { + let asJson = this.get({ plain: true }); + delete asJson["updatedAt"]; + delete asJson["createdAt"]; + delete asJson["version"]; + return asJson; + }; + return Metric; +}; diff --git a/electrode-ota-server-dao-mariadb/src/models/Package.js b/electrode-ota-server-dao-mariadb/src/models/Package.js new file mode 100644 index 00000000..9684a522 --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/models/Package.js @@ -0,0 +1,89 @@ +import Sequelize from "sequelize"; +import _ from "lodash"; + +export default sequelize => { + const PackageDiff = sequelize.import("./PackageDiff"); + const Package = sequelize.define( + "Package", + { + id_: { + type: Sequelize.UUID, + defaultValue: Sequelize.UUIDV1, + primaryKey: true + }, + appVersion: Sequelize.STRING, + blobUrl: Sequelize.STRING(2048), + description: Sequelize.STRING(2048), + isDisabled: { + type: Sequelize.BOOLEAN, + defaultValue: false + }, + isMandatory: { + type: Sequelize.BOOLEAN, + defaultValue: false + }, + label: Sequelize.STRING, + manifestBlobUrl: Sequelize.STRING(2048), + originalDeployment: Sequelize.STRING, + originalLabel: Sequelize.STRING, + packageHash: Sequelize.STRING, + releaseMethod: Sequelize.STRING, + releasedBy: Sequelize.STRING, + rollout: Sequelize.INTEGER, + size: Sequelize.BIGINT.UNSIGNED, + uploadTime: Sequelize.DATE + }, + { + tableName: "packages", + createdAt: "created_", + indexes: [ + { + fields: ["label"] + } + ], + defaultScope: { + order: [["created_", "DESC"]] + }, + version: true + } + ); + + Package.hasMany(PackageDiff, { + as: "diffPackageMap", + foreignKey: "packageId", + onDelete: "CASCADE" + }); + Package._associations = { + diffPackageMap: { model: PackageDiff, searchField: "packageHash" } + }; + + Package.prototype.createOrUpdateAssociate = function(updates) { + const packDiff = _.find(this.diffPackageMap, { + packageId: this.id_, + packageHash: updates.packageHash + }); + if (packDiff) { + return packDiff.update(updates); + } else { + return PackageDiff.create( + _.assign(updates, { + packageId: this.id_, + packageHash: updates.packageHash + }) + ).then(packageDiff => { + this.diffPackageMap.push(packageDiff); + return packageDiff; + }); + } + }; + + Package.prototype.toJSON = function() { + let asJson = this.get({ plain: true }); + asJson.diffPackageMap = _.keyBy( + asJson.diffPackageMap, + pkg => pkg.packageHash + ); + return asJson; + }; + return Package; +}; diff --git a/electrode-ota-server-dao-mariadb/src/models/PackageContent.js b/electrode-ota-server-dao-mariadb/src/models/PackageContent.js new file mode 100644 index 00000000..2e12fa67 --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/models/PackageContent.js @@ -0,0 +1,20 @@ +import Sequelize from "sequelize"; + +export default sequelize => { + const PackageContent = sequelize.define( + "PackageContent", + { + packageHash: { + type: Sequelize.STRING, + primaryKey: true + }, + content: Sequelize.BLOB("long") + }, + { + tableName: "packages_content", + version: true + } + ); + + return PackageContent; +}; diff --git a/electrode-ota-server-dao-mariadb/src/models/PackageDiff.js b/electrode-ota-server-dao-mariadb/src/models/PackageDiff.js new file mode 100644 index 00000000..c9baf7df --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/models/PackageDiff.js @@ -0,0 +1,19 @@ +import Sequelize from "sequelize"; + +export default sequelize => { + const PackageDiff = sequelize.define( + "PackageDiff", + { + packageHash: Sequelize.STRING, + size: Sequelize.BIGINT.UNSIGNED, + url: Sequelize.STRING(2048) + }, + { + tableName: "packages_diff", + indexes: [{ fields: [{ attribute: "createdAt", order: "DESC" }] }], + version: true + } + ); + + return PackageDiff; +}; diff --git a/electrode-ota-server-dao-mariadb/src/models/User.js b/electrode-ota-server-dao-mariadb/src/models/User.js new file mode 100644 index 00000000..77ea707b --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/models/User.js @@ -0,0 +1,72 @@ +import { Sequelize, Model } from "sequelize"; +import _ from "lodash"; + +export default sequelize => { + const UserAccessKey = sequelize.import("./UserAccessKey"); + const User = sequelize.define( + "User", + { + id: { + type: Sequelize.UUID, + defaultValue: Sequelize.UUIDV1, + primaryKey: true + }, + email: { + type: Sequelize.STRING, + allowNull: false, + unique: true + }, + linkedProviders: { + type: Sequelize.STRING(2048), + get() { + const val = this.getDataValue("linkedProviders"); + return val ? val.split(",") : []; + }, + set(val) { + if (val) { + this.setDataValue("linkedProviders", val.join(",")); + } + } + }, + name: Sequelize.STRING + }, + { + tableName: "users", + createdAt: "createdTime", + indexes: [{ fields: ["email"] }], + version: true + } + ); + + User.prototype.toJSON = function() { + let accessKeys = _.keyBy(this.accessKeys, c => c.key); + accessKeys = _.mapValues(accessKeys, k => k.toJSON()); + return { + id: this.get("id", { plain: true }), + email: this.get("email", { plain: true }), + name: this.get("name", { plain: true }), + createdTime: this.get("createdTime", { plain: true }), + linkedProviders: this.get("linkedProviders", { plain: true }), + accessKeys + }; + }; + + User.hasMany(UserAccessKey, { + as: "accessKeys", + onDelete: "CASCADE" + }); + User._associations = { + accessKeys: { model: UserAccessKey, searchField: "key" } + }; + User.prototype.createOrUpdateAssociate = function(values) { + const accessKey = _.find(this.accessKeys, { key: values.key }); + const valuesWithAssoc = Object.assign({}, values, { UserId: this.id }); + if (accessKey) { + return accessKey.update(values); + } else { + return UserAccessKey.create(valuesWithAssoc); + } + }; + + return User; +}; diff --git a/electrode-ota-server-dao-mariadb/src/models/UserAccessKey.js b/electrode-ota-server-dao-mariadb/src/models/UserAccessKey.js new file mode 100644 index 00000000..a885636d --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/models/UserAccessKey.js @@ -0,0 +1,42 @@ +import Sequelize from "sequelize"; +import _ from "lodash"; + +/** + * Associated to the User model. + */ +export default sequelize => { + const UserAccessKey = sequelize.define( + "UserAccessKey", + { + key: { + type: Sequelize.STRING, + primaryKey: true + }, + id: Sequelize.STRING, + name: Sequelize.STRING, + expires: Sequelize.DATE, + description: Sequelize.STRING(2048), + lastAccess: Sequelize.DATE, + createdBy: Sequelize.STRING, + friendlyName: Sequelize.STRING + }, + { + tableName: "users_access_key", + createdAt: "createdTime", + version: true + } + ); + UserAccessKey.prototype.toJSON = function() { + const option = { plain: true }; + return { + id: this.get("id", option), + name: this.get("name", option), + expires: this.get("expires", option), + description: this.get("description", option), + lastAccess: this.get("lastAccess", option), + createdBy: this.get("createdBy", option), + friendlyName: this.get("friendlyName", option) + }; + }; + return UserAccessKey; +}; diff --git a/electrode-ota-server-dao-mariadb/src/models/index.js b/electrode-ota-server-dao-mariadb/src/models/index.js new file mode 100644 index 00000000..311a537d --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/models/index.js @@ -0,0 +1,13 @@ +export default client => { + const sources = [ + "App", // + AppCollaborator, AppDeployment, Deployment, DeploymentHistory + "ClientRatio", + "Metric", + "Package", // + PackageDiff + "PackageContent", + "User" // + UserAccessKey + ]; + for (const source of sources) { + client.import(source); + } +}; diff --git a/electrode-ota-server-dao-mariadb/src/proxy.js b/electrode-ota-server-dao-mariadb/src/proxy.js new file mode 100644 index 00000000..651fd2e4 --- /dev/null +++ b/electrode-ota-server-dao-mariadb/src/proxy.js @@ -0,0 +1,283 @@ +import _ from "lodash"; +import Sequelize from "sequelize"; + +/** + * Use `in` for querying an array + * OTA Query: { name: ['cat', 'tom'] } + * Sequelize equivalent: { name: {[Sequelize.Op.in]: ['cat', 'tom']} } + * @param {*} val If value is an array, use `in` + * @returns sequelize search value + */ +const toSequelizeIn = val => { + if (_.isArray(val)) return { [Sequelize.Op.in]: val }; + else return val; +}; + +/** + * Convert OTA query containing an association to Sequelize query + * OTA Query: { 'accessKeys': 'abc' } + * Sequelize equivalent: { + * model: UserAccessKeys, + * as: 'accessKeys', + * where: { + * 'key': 'abc' + * } + * } + * @param {*} key + * @param {*} association + * @param {*} queryValue + * @return Sequelize include term + */ +const toSequelizeIncludeTerm = (key, association, queryValue) => { + let term = { + model: association.model, + as: key + }; + if (queryValue) { + term["where"] = { + [association.searchField]: toSequelizeIn(queryValue) + }; + } + if (association.through) { + term["through"] = association.through; + } + if (association.attributes) { + term["attributes"] = association.attributes; + } + return term; +}; + +/** + * Convert OTA query to Sequelize query + * + * @param {*} modelDefinition + * @param {*} query + * @returns Sequelize query + */ +const generateSequelizeQuery = (modelDefinition, query) => { + let include = [], + order = []; + let where = _.mapValues(query, val => toSequelizeIn(val)); + + _.each(modelDefinition._associations, (assoc, assocAs) => { + if (where[assocAs]) { + include.push(toSequelizeIncludeTerm(assocAs, assoc, where[assocAs])); + delete where[assocAs]; + } else { + include.push(toSequelizeIncludeTerm(assocAs, assoc, null)); + } + if (assoc.order) { + order.push(assoc.order); + } + }); + let seqQuery = {}; + if (!_.isEmpty(where)) seqQuery["where"] = where; + if (!_.isEmpty(include)) seqQuery["include"] = include; + if (!_.isEmpty(order)) seqQuery["order"] = order; + return seqQuery; +}; + +/** + * Options to include for updates/inserts + * + * @param {*} modelDefinition + * @param {*} options + * @returns sequelize option parameters + */ +const toSequelizeOptions = (modelDefinition, options) => { + let extensions = { + include: [] + }; + _.each(modelDefinition._associations, (association, assocationAs) => { + extensions.include.push( + toSequelizeIncludeTerm(assocationAs, association, null) + ); + }); + return Object.assign({}, options, extensions); +}; + +const NullLogger = { + info: () => {}, + error: () => {} +}; + +export function ProxyModelWrapper( + client, + modelDefinition, + logger = NullLogger +) { + /** + * ProxyModel wraps a Sequelize Model. + * Provides consistent interface to OTA server + */ + class ProxyModel { + constructor(values) { + Object.assign(this, values); + + var _originalModel = null; + this._setOriginal = function(m) { + _originalModel = m; + }; + this._getOriginal = function() { + return _originalModel; + }; + } + + /** + * Create ProxyModel from MySQL Model + * @param {*} model + */ + static constructFromSequelizeModel(model) { + if (model == null) return null; + const mapper = new ProxyModel(); + mapper.refreshFromSequelizeModel(model); + return mapper; + } + + /** + * Update this ProxyModel to match the Sequelize Model. + * @param {*} model + */ + refreshFromSequelizeModel(model) { + this._setOriginal(model); + const modelAsJson = ProxyModel._fromSequelizeFormat(model); + Object.assign(this, modelAsJson); + return this; + } + + /** + * Save this ProxyModel Async + * @param {*} options + */ + saveAsync(options = {}) { + if (this._getOriginal()) { + return this.updateAsync(this, options); + } else { + const extendedOptions = toSequelizeOptions(modelDefinition, options); + const asJson = ProxyModel._toSequelizeFormat(this); + return modelDefinition + .create(asJson, extendedOptions) + .then(model => this.refreshFromSequelizeModel(model)) + .catch(Sequelize.UniqueConstraintError, e => { + logger.info("UniqueConstraint violation", e); + return false; + }); + } + } + + /** + * Update async + * @param {object} updates + * @param {object} options + * sequelize options + */ + updateAsync(updates, options = {}) { + const jsonUpdates = ProxyModel._toSequelizeFormat(updates); + const seqOptions = toSequelizeOptions(modelDefinition, options); + return client.transaction(transaction => { + return this._assignAssociations(jsonUpdates, { transaction }) + .then(_ => + this._getOriginal().update( + jsonUpdates, + Object.assign(seqOptions, { transaction }) + ) + ) + .then(model => this.refreshFromSequelizeModel(model)); + }); + } + + /** + * Delete async + */ + deleteAsync() { + return this._getOriginal().destroy(); + } + + /** + * Add an associated object + * @param {ProxyModel} model - associated object to add + */ + associateAsync(model) { + return this._getOriginal().associate(model._getOriginal()); + } + + static _fromSequelizeFormat(sequelizeRecord) { + if (!sequelizeRecord) return null; + let jsonRecord = sequelizeRecord.toJSON(); + return jsonRecord; + } + static _toSequelizeFormat(values) { + let ret = Object.assign({}, values); + if (modelDefinition._associations) { + _.each(modelDefinition._associations, (assocation, associationAs) => { + ret[associationAs] = _.map(ret[associationAs], (vals, key) => + Object.assign({}, vals, { [assocation.searchField]: key }) + ); + }); + } + return ret; + } + + /** + * Find one object that matches the provided query + * @param {*} queryParams : find query (key-value pairs) + */ + static findOneAsync(queryParams) { + let sequelizeQuery = generateSequelizeQuery(modelDefinition, queryParams); + return modelDefinition + .findOne(sequelizeQuery) + .then(ProxyModel.constructFromSequelizeModel); + } + + /** + * Find all objects that match the provided query + * @param {*} queryParams + */ + static findAsync(queryParams) { + const sequelizeQuery = generateSequelizeQuery( + modelDefinition, + queryParams + ); + return modelDefinition + .findAll(sequelizeQuery) + .then(results => + _.map(results, ProxyModel.constructFromSequelizeModel) + ); + } + + /** + * Delete objects matching the specified query + * @param {*} query + */ + static delete(query) { + return ProxyModel.findOneAsync(query).then(model => model.deleteAsync()); + } + + _assignAssociations(updates, options = {}) { + const actions = _.reduce( + modelDefinition._associations, + (accu, assoc, assocAs) => { + if (updates[assocAs]) { + accu = accu.concat( + _.map(updates[assocAs], assocItem => { + if ( + this._getOriginal() && + this._getOriginal().createOrUpdateAssociate + ) { + return this._getOriginal().createOrUpdateAssociate(assocItem); + } else { + return Promise.resolve(); + } + }) + ); + } + return accu; + }, + [] + ); + return Promise.all(actions); + } + } + + return ProxyModel; +} diff --git a/electrode-ota-server-dao-mariadb/test/mariadb-test.js b/electrode-ota-server-dao-mariadb/test/mariadb-test.js new file mode 100644 index 00000000..be114f49 --- /dev/null +++ b/electrode-ota-server-dao-mariadb/test/mariadb-test.js @@ -0,0 +1,671 @@ +import initMariaDao, { + shutdownMaria +} from "electrode-ota-server-test-support/lib/init-maria-dao"; +import eql from "electrode-ota-server-test-support/lib/eql"; +import { expect } from "chai"; +import sinon from "sinon"; + +const alwaysFail = () => { + throw new Error(`should have failed`); +}; + +describe("dao/mariadb", function() { + this.timeout(200000); + let dao; + let clock; + before(async () => { + dao = await initMariaDao(); + clock = sinon.useFakeTimers({ now: 1483228800000 }); + }); + after(async () => { + await shutdownMaria(); + clock.restore(); + }); + + it("should insert user", () => + dao + .createUser({ + email: "joe@b.com", + name: "Joe", + linkedProviders: ["GitHub"] + }) + .then(user => { + expect(user.email).to.eql("joe@b.com"); + expect(user.linkedProviders).to.eql(["GitHub"]); + expect(user.name).to.eql("Joe"); + expect(user.id).to.exist; + })); + + it("should fail insert user on duplicate email", () => + dao + .createUser({ email: "joe@c.com", name: "JoeC" }) + .then(_ => dao.createUser({ email: "joe@c.com", name: "Joe" })) + .then(alwaysFail, e => { + expect(e.message).eql("User already exists joe@c.com"); + })); + + it("should find by id", () => + dao + .createUser({ email: "joe@d.com", name: "Joe D" }) + .then(user => dao.userById(user.id)) + .then(foundUser => { + expect(foundUser.email).to.eql("joe@d.com"); + expect(foundUser.name).to.eql("Joe D"); + })); + + it("should find by email", () => + dao + .createUser({ email: "joe@e.com", name: "Joe E" }) + .then(user => dao.userByEmail("joe@e.com")) + .then(foundUser => { + expect(foundUser.email).to.eql("joe@e.com"); + expect(foundUser.name).to.eql("Joe E"); + })); + + it("should insert and update keys", () => + dao + .createUser({ + email: "joe1@f.com", + name: "Joe F", + accessKeys: { abc: { name: "key" } } + }) + .then(user => { + expect(user.accessKeys) + .to.have.property("abc") + .with.property("name", "key"); + user.accessKeys.abc.name = "abc"; + user.accessKeys.def = { name: "def" }; + return dao + .updateUser(user.email, user) + .then(u => u.accessKeys) + .then( + eql({ + abc: { + name: "abc", + expires: null, + description: null, + lastAccess: null, + createdTime: null, + createdBy: null, + friendlyName: null, + id: null + }, + def: { + name: "def", + expires: null, + description: null, + createdTime: null, + lastAccess: null, + createdBy: null, + friendlyName: null, + id: null + } + }) + ) + .then(_ => dao.userByEmail(user.email)) + .then(u => expect(u.accessKeys).to.include.all.keys("abc", "def")); + })); + + it(`should find user based on accessKey`, () => + dao + .createUser({ + email: "joe2@g.com", + name: "Joe G", + accessKeys: { + abc123: { + name: "key", + expires: new Date(1997, 0, 0), + description: "Description", + friendlyName: "Friendly" + } + } + }) + .then(u => + dao.userByAccessKey("abc123").then(fu => { + expect(fu.id).to.eql(u.id); + expect(fu.accessKeys["abc123"].name).to.eql("key"); + expect(fu.accessKeys["abc123"].expires).to.eql(new Date(1997, 0, 0)); + expect(fu.accessKeys["abc123"].description).to.eql("Description"); + expect(fu.accessKeys["abc123"].friendlyName).to.eql("Friendly"); + }) + )); + + it("should add an app and find by collaborators", () => + dao + .createApp({ + name: "Hello2", + deployments: { + staging: { + key: "123" + } + }, + collaborators: { "test@t.com": { permission: "Owner" } } + }) + .then(app => { + const { id } = app; + + return dao + .appsForCollaborator("test@t.com") + .then(all => + expect(JSON.stringify(all[0].id)).to.eql(JSON.stringify(id)) + ); + })); + + it("should update app collaborators", () => + dao + .createApp({ + name: "ByeBye", + collaborators: { "friend@walmart.com": { permission: "Friend" } } + }) + .then(app => { + expect(app.name).to.eql("ByeBye"); + return dao.updateApp(app.id, { + name: "ByeBye2", + collaborators: { + "friend1@walmart.com": { permission: "Friend" }, + "friend2@walmart.com": { permission: "Friend" } + } + }); + }) + .then(app => { + expect(app.name).to.eql("ByeBye2"); + expect(app.collaborators).to.eql({ + "friend1@walmart.com": { permission: "Friend" }, + "friend2@walmart.com": { permission: "Friend" } + }); + })); + + it("should add/remove/rename deployments", () => + dao + .createApp({ + name: "Hello", + deployments: { + staging: { + key: "123" + } + }, + collaborators: { "test@t.com": { permission: "Owner" } } + }) + .then(app => { + const appId = app.id; + const getApp = () => dao.appById(appId); + + return dao + .addDeployment(appId, "stuff", { key: "stuff" }) + .then(getApp) + .then( + eql({ + collaborators: { + "test@t.com": { + permission: "Owner" + } + }, + deployments: ["staging", "stuff"], + name: "Hello" + }) + ) + .then(_ => dao.removeDeployment(appId, "staging")) + .then(getApp) + .then( + eql({ + collaborators: { + "test@t.com": { + permission: "Owner" + } + }, + deployments: ["stuff"], + name: "Hello" + }) + ) + .then(_ => dao.renameDeployment(appId, "stuff", "newStuff")) + .then(getApp) + .then(res => { + expect(res.deployments).to.eql(["newStuff"]); + }) + .then(_ => dao.removeDeployment(appId, "newStuff")) + .then(getApp) + .then(res => { + expect(res.deployments).to.eql(null); + }); + })); + + it("should find an app based on name and user", () => + dao + .createApp({ + name: "Hello", + deployments: { + staging: { + key: "123" + } + }, + collaborators: { "test@t.com": { permission: "Owner" } } + }) + .then(_ => + dao.createApp({ + name: "Hello", + deployments: { + staging: { + key: "123" + } + }, + collaborators: { "test@nt.com": { permission: "Owner" } } + }) + ) + .then(_ => + dao.appForCollaborator("test@nt.com", "Hello").then(app => { + expect(app.name).to.eql("Hello"); + expect(Object.keys(app.collaborators)).to.eql(["test@nt.com"]); + }) + )); + + it("should add a package to a deployment", () => { + return dao + .createApp({ + name: "Hello", + deployments: { + staging: { + key: "123" + } + }, + collaborators: { "test@t.com": { permission: "Owner" } } + }) + .then(app => { + return dao.addPackage("123", { + packageHash: "abc", + description: "This is a package" + }); + }) + .then(_ => dao.deploymentForKey("123")) + .then(dep => { + expect(dep.package.description).to.eql("This is a package"); + }); + }); + + it("should add remove an app with a deployment", () => { + return dao + .createApp({ + name: "Hello", + deployments: { + staging: { + key: "123" + }, + other: { + key: "456" + } + }, + collaborators: { "addremove@t.com": { permission: "Owner" } } + }) + .then(app => + dao + .addPackage("123", { + packageHash: "abc", + description: "This is a package" + }) + .then(_ => dao.removeApp(app.id)) + .then(_ => dao.appsForCollaborator("addremove@t.com")) + ) + .then(apps => expect(apps).to.eql([])); + }); + + it("should get deployments by keys", () => + dao + .createApp({ + name: "Hello", + deployments: { + staging: { + key: "123" + }, + other: { + key: "456" + } + }, + collaborators: { "test@t.com": { permission: "Owner" } } + }) + .then(app => dao.deploymentsByApp(app.id, app.deployments)) + .then(deployments => { + expect(deployments.staging).to.exist; + expect(deployments.other).to.exist; + })); + + it("should get single deployment by key", () => + dao + .createApp({ + name: "HelloApp", + deployments: { + single: { key: "aaa" }, + other: { key: "bbb" } + } + }) + .then(app => dao.deploymentByApp(app.id, "single")) + .then(deployment => { + expect(deployment).to.exist; + expect(deployment.key).to.equal("aaa"); + })); + + it("Deployment.findOneAsync should order history_ by createdAt desc", () => + dao + .createApp({ + name: "App History", + deployments: { + staging: { key: "sorted_history" } + } + }) + .then(app => { + let pkg1_hash, pkg2_hash, pkg3_hash; + return dao + .addPackage("sorted_history", { + packageHash: "pkg1", + description: "Package 1" + }) + .then(phash => (pkg1_hash = phash)) + .then(_ => clock.tick(1000)) + .then(_ => + dao.addPackage("sorted_history", { + packageHash: "pkg2", + description: "Package 2" + }) + ) + .then(phash => (pkg2_hash = phash)) + .then(_ => clock.tick(-2000)) + .then(_ => + dao.addPackage("sorted_history", { + packageHash: "pkg3", + description: "Package 3" + }) + ) + .then(phash => (pkg3_hash = phash)) + .then(_ => dao.deploymentForKey("sorted_history")) + .then(dep => { + expect(dep.history_).to.have.deep.equal([ + pkg2_hash.id_, + pkg1_hash.id_, + pkg3_hash.id_ + ]); + }); + })); + it("updatePackage updates the latest package", () => + dao + .createApp({ + name: "updatePackage updates latest", + deployments: { staging: { key: "updatepackage" } } + }) + .then(app => { + let pkg_id; + return dao + .addPackage("updatepackage", { + packageHash: "Old package", + description: "Old package" + }) + .then(_ => clock.tick(3000)) + .then(_ => + dao.addPackage("updatepackage", { + packageHash: "Latest package", + description: "Latest package" + }) + ) + .then(pkg => (pkg_id = pkg.id_)) + .then(_ => clock.tick(-5000)) + .then(_ => + dao.addPackage("updatepackage", { + packageHash: "Oldest package", + description: "Oldest package" + }) + ) + .then(_ => + dao.updatePackage("updatepackage", { + description: "Latest greatest package" + }) + ) + .then(pkg => dao.packageById(pkg_id)) + .then(pkg => { + expect(pkg.description).to.equal("Latest greatest package"); + }); + })); + + it("history by id", () => + dao + .createApp({ + name: "History by id", + deployments: { + staging: { key: "history_by_ids" } + } + }) + .then(app => + dao + .addPackage("history_by_ids", { + packageHash: "pkg1", + description: "Package 1" + }) + .then(_ => clock.tick(1000)) + .then(_ => + dao.addPackage("history_by_ids", { + packageHash: "pkg2", + description: "Package 2" + }) + ) + .then(_ => clock.tick(-2000)) + .then(_ => + dao.addPackage("history_by_ids", { + packageHash: "pkg3", + description: "Package 3" + }) + ) + .then(_ => dao.history(app.id, "staging")) + .then(packages => dao.historyByIds(packages.map(h => h.id_))) + .then(pkgs => { + expect(pkgs).has.length(3); + expect(pkgs[0].packageHash).to.equal("pkg2"); + expect(pkgs[1].packageHash).to.equal("pkg1"); + expect(pkgs[2].packageHash).to.equal("pkg3"); + }) + )); + + it("history by label", () => + dao + .createApp({ + name: "History by Label", + deployments: { + staging: { key: "history_by_label" } + } + }) + .then(app => + dao + .addPackage("history_by_label", { + packageHash: "package1", + description: "Package 1 with label BLUE", + label: "BLUE" + }) + .then(_ => + dao.addPackage("history_by_label", { + packageHash: "package2", + description: "Package 2 with label RED", + label: "RED" + }) + ) + .then(_ => dao.historyLabel(app.id, "staging", "RED")) + .then(pkg => { + expect(pkg.packageHash).to.equal("package2"); + }) + )); + + it("should clear deployment history", () => + dao + .createApp({ + name: "App to clear", + deployments: { + staging: { + key: "delete_me" + } + } + }) + .then(app => + dao + .addPackage("delete_me", { + packageHash: "def", + description: "This is a package to delete" + }) + .then(_ => dao.history(app.id, "staging")) + .then(history => { + expect(history).to.have.length(1); + expect(history[0].packageHash).to.equal("def"); + expect(history[0].description).to.equal( + "This is a package to delete" + ); + }) + .then(_ => dao.clearHistory(app.id, "staging")) + .then(_ => dao.history(app.id, "staging")) + .then(history => expect(history).to.be.empty) + )); + + it("should set and update isDisabled and isMandatory", () => + dao + .createApp({ + name: "App with disabled and mandatory packages", + deployments: { + staging: { key: "staging_disabled_mandatory" } + } + }) + .then(app => + dao + .addPackage("staging_disabled_mandatory", { + packageHash: "mandatory_package_hash", + description: "Mandatory package", + isMandatory: true + }) + .then(_ => dao.history(app.id, "staging")) + .then(history => { + expect(history[0].packageHash).to.equal("mandatory_package_hash"); + expect(history[0].isMandatory).is.true; + expect(history[0].isDisabled).is.false; + history[0].isMandatory = false; + history[0].isDisabled = true; + return dao.updatePackage("staging_disabled_mandatory", history[0]); + }) + .then(pkg => { + expect(pkg.isMandatory).is.false; + expect(pkg.isDisabled).is.true; + }) + )); + + it("save diffPackage", () => + dao + .createApp({ + name: "App to test diffPackage", + deployments: { staging: { key: "staging_diff_package" } } + }) + .then(app => { + return dao + .addPackage("staging_diff_package", { + packageHash: "hash1ForDiff", + description: "Package for testing Diff packages" + }) + .then(pkg => { + expect(pkg.diffPackageMap).to.be.empty; + pkg.diffPackageMap["diff_pkg_1"] = { url: "url1", size: 100 }; + return dao.updatePackage("staging_diff_package", pkg); + }) + .then(pkg => { + pkg.diffPackageMap["diff_pkg_2"] = { url: "url2", size: 101 }; + return dao.updatePackage("staging_diff_package", pkg); + }) + .then(pkg => dao.packageById(pkg.id_)) + .then(pkg => { + expect(Object.keys(pkg.diffPackageMap)).to.have.all.members([ + "diff_pkg_1", + "diff_pkg_2" + ]); + }); + })); + it("Test rollback thru addPackage()", () => + dao + .createApp({ + name: "App for rollback", + deployments: { staging: { key: "rollback_deployment" } } + }) + .then(app => { + return dao + .addPackage("rollback_deployment", { + packageHash: "originalFirstPackage", + description: "Original Package to rollback" + }) + .then(rollbackTo => { + const pkg = Object.assign({}, rollbackTo, { + uploadTime: Date.now(), + rollout: 100, + releasedBy: "rollback_man@walmart.com", + releaseMethod: "Rollback", + originalLabel: rollbackTo.label, + label: `v12` + }); + delete pkg["id_"]; + delete pkg["created_"]; + return dao + .addPackage("rollback_deployment", pkg) + .then(rolledBackPkg => { + expect(rolledBackPkg.releaseMethod).to.equal("Rollback"); + expect(rolledBackPkg.rollout).to.equal(100); + expect(rolledBackPkg.created_).to.not.equal( + rollbackTo.created_ + ); + expect(rolledBackPkg.releasedBy).to.equal( + "rollback_man@walmart.com" + ); + }); + }); + })); + + it("should insert and get metrics", () => + dao + .createApp({ + name: "appWIthDeployment", + deployments: { + staging: { key: "5UfjnOxv1FnCJ_DwPqMWQYSVlp0H4yecGHaB-" } + } + }) + .then(app => + dao.insertMetric({ + appVersion: "1.0.0", + deploymentKey: "5UfjnOxv1FnCJ_DwPqMWQYSVlp0H4yecGHaB-", + clientUniqueId: "fe231438a4f62c70", + label: "v1", + status: "DeploymentSucceeded", + previousLabelOrAppVersion: "1.0.0", + previousDeploymentKey: "5UfjnOxv1FnCJ_DwPqMWQYSVlp0H4yecGHaB-" + }) + ) + .then(_ => dao.metrics("5UfjnOxv1FnCJ_DwPqMWQYSVlp0H4yecGHaB-")) + .then( + eql([ + { + appVersion: "1.0.0", + deploymentKey: "5UfjnOxv1FnCJ_DwPqMWQYSVlp0H4yecGHaB-", + clientUniqueId: "fe231438a4f62c70", + label: "v1", + status: "DeploymentSucceeded", + previousLabelOrAppVersion: "1.0.0", + previousDeploymentKey: "5UfjnOxv1FnCJ_DwPqMWQYSVlp0H4yecGHaB-" + } + ]) + )); + + it("should insert and retrieve client ratio", () => + dao + .insertClientRatio("clientUniqueId1", "packageHash1", 12.0, true) + .then(_ => dao.clientRatio("clientUniqueId1", "packageHash1")) + .then(ratio => { + expect(ratio.updated).to.equal(true); + expect(ratio.ratio).to.equal(12.0); + }) + .then(_ => dao.clientRatio("clientUniqueid1", "packageHashNonExistant")) + .then(ratio => { + expect(ratio).is.null; + })); + + it("upload and download package content", () => + dao + .upload("a-package-hash", "This is the package content") + .then(_ => dao.download("a-package-hash")) + .then(content => { + expect(content + "").to.equal("This is the package content"); + })); +}); diff --git a/electrode-ota-server-dao-mariadb/yarn.lock b/electrode-ota-server-dao-mariadb/yarn.lock new file mode 100644 index 00000000..5ad012fc --- /dev/null +++ b/electrode-ota-server-dao-mariadb/yarn.lock @@ -0,0 +1,352 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@types/geojson@^1.0.0": + version "1.0.6" + resolved "https://registry.yarnpkg.com/@types/geojson/-/geojson-1.0.6.tgz#3e02972728c69248c2af08d60a48cbb8680fffdf" + +"@types/node@*": + version "8.5.7" + resolved "https://registry.yarnpkg.com/@types/node/-/node-8.5.7.tgz#9c498c35af354dcfbca3790fb2e81129e93cf0e2" + +ansicolors@~0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/ansicolors/-/ansicolors-0.2.1.tgz#be089599097b74a5c9c4a84a0cdbcdb62bd87aef" + +bluebird@^3.4.6, bluebird@^3.5.1: + version "3.5.1" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.1.tgz#d9551f9de98f1fcda1e683d17ee91a0602ee2eb9" + +cardinal@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/cardinal/-/cardinal-1.0.0.tgz#50e21c1b0aa37729f9377def196b5a9cec932ee9" + dependencies: + ansicolors "~0.2.1" + redeyed "~1.0.0" + +cls-bluebird@^2.0.1: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cls-bluebird/-/cls-bluebird-2.1.0.tgz#37ef1e080a8ffb55c2f4164f536f1919e7968aee" + dependencies: + is-bluebird "^1.0.2" + shimmer "^1.1.0" + +core-util-is@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + +debug@^2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + dependencies: + ms "2.0.0" + +debug@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" + dependencies: + ms "2.0.0" + +denque@^1.1.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/denque/-/denque-1.2.2.tgz#e06cf7cf0da8badc88cbdaabf8fc0a70d659f1d4" + +depd@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.1.tgz#5783b4e1c459f06fa5ca27f991f3d06e7a310359" + +diff@^3.1.0: + version "3.4.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-3.4.0.tgz#b1d85507daf3964828de54b37d0d73ba67dda56c" + +dottie@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/dottie/-/dottie-2.0.0.tgz#da191981c8b8d713ca0115d5898cf397c2f0ddd0" + +esprima@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-3.0.0.tgz#53cf247acda77313e551c3aa2e73342d3fb4f7d9" + +formatio@1.2.0, formatio@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/formatio/-/formatio-1.2.0.tgz#f3b2167d9068c4698a8d51f4f760a39a54d818eb" + dependencies: + samsam "1.x" + +generate-function@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/generate-function/-/generate-function-2.0.0.tgz#6858fe7c0969b7d4e9093337647ac79f60dfbe74" + +generic-pool@^3.1.8: + version "3.4.0" + resolved "https://registry.yarnpkg.com/generic-pool/-/generic-pool-3.4.0.tgz#6b76fc201bb49a0ff381450f585352378fb1c08f" + +has-flag@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-2.0.0.tgz#e8207af1cc7b30d446cc70b734b5e8be18f88d51" + +iconv-lite@^0.4.18: + version "0.4.19" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.19.tgz#f7468f60135f5e5dad3399c0a81be9a1603a082b" + +inflection@1.12.0: + version "1.12.0" + resolved "https://registry.yarnpkg.com/inflection/-/inflection-1.12.0.tgz#a200935656d6f5f6bc4dc7502e1aecb703228416" + +inherits@~2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + +is-bluebird@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-bluebird/-/is-bluebird-1.0.2.tgz#096439060f4aa411abee19143a84d6a55346d6e2" + +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + +isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + +just-extend@^1.1.26: + version "1.1.27" + resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-1.1.27.tgz#ec6e79410ff914e472652abfa0e603c03d60e905" + +lodash.get@^4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" + +lodash@^4.17.1: + version "4.17.4" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae" + +lolex@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/lolex/-/lolex-1.6.0.tgz#3a9a0283452a47d7439e72731b9e07d7386e49f6" + +lolex@^2.2.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/lolex/-/lolex-2.3.1.tgz#3d2319894471ea0950ef64692ead2a5318cff362" + +long@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/long/-/long-3.2.0.tgz#d821b7138ca1cb581c172990ef14db200b5c474b" + +lru-cache@2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.5.0.tgz#d82388ae9c960becbea0c73bb9eb79b6c6ce9aeb" + +lru-cache@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.1.tgz#622e32e82488b49279114a4f9ecf45e7cd6bba55" + dependencies: + pseudomap "^1.0.2" + yallist "^2.1.2" + +moment-timezone@^0.5.4: + version "0.5.14" + resolved "https://registry.yarnpkg.com/moment-timezone/-/moment-timezone-0.5.14.tgz#4eb38ff9538b80108ba467a458f3ed4268ccfcb1" + dependencies: + moment ">= 2.9.0" + +"moment@>= 2.9.0", moment@^2.13.0: + version "2.20.1" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.20.1.tgz#d6eb1a46cbcc14a2b2f9434112c1ff8907f313fd" + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + +mysql2@^1.5.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-1.5.1.tgz#2411d6fb958af86b2304b7a53bc54b26e77e682b" + dependencies: + cardinal "1.0.0" + denque "^1.1.1" + generate-function "^2.0.0" + iconv-lite "^0.4.18" + long "^3.2.0" + lru-cache "^4.1.1" + named-placeholders "1.1.1" + object-assign "^4.1.1" + readable-stream "2.3.2" + safe-buffer "^5.0.1" + seq-queue "0.0.5" + sqlstring "^2.2.0" + +named-placeholders@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/named-placeholders/-/named-placeholders-1.1.1.tgz#3b7a0d26203dd74b3a9df4c9cfb827b2fb907e64" + dependencies: + lru-cache "2.5.0" + +nise@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/nise/-/nise-1.2.0.tgz#079d6cadbbcb12ba30e38f1c999f36ad4d6baa53" + dependencies: + formatio "^1.2.0" + just-extend "^1.1.26" + lolex "^1.6.0" + path-to-regexp "^1.7.0" + text-encoding "^0.6.4" + +object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + +path-to-regexp@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.7.0.tgz#59fde0f435badacba103a84e9d3bc64e96b9937d" + dependencies: + isarray "0.0.1" + +process-nextick-args@~1.0.6: + version "1.0.7" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-1.0.7.tgz#150e20b756590ad3f91093f25a4f2ad8bff30ba3" + +pseudomap@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" + +readable-stream@2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.2.tgz#5a04df05e4f57fe3f0dc68fdd11dc5c97c7e6f4d" + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~1.0.6" + safe-buffer "~5.1.0" + string_decoder "~1.0.0" + util-deprecate "~1.0.1" + +redeyed@~1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/redeyed/-/redeyed-1.0.1.tgz#e96c193b40c0816b00aec842698e61185e55498a" + dependencies: + esprima "~3.0.0" + +retry-as-promised@^2.3.1: + version "2.3.2" + resolved "https://registry.yarnpkg.com/retry-as-promised/-/retry-as-promised-2.3.2.tgz#cd974ee4fd9b5fe03cbf31871ee48221c07737b7" + dependencies: + bluebird "^3.4.6" + debug "^2.6.9" + +safe-buffer@^5.0.1, safe-buffer@~5.1.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853" + +samsam@1.x: + version "1.3.0" + resolved "https://registry.yarnpkg.com/samsam/-/samsam-1.3.0.tgz#8d1d9350e25622da30de3e44ba692b5221ab7c50" + +semver@^5.0.1: + version "5.4.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.4.1.tgz#e059c09d8571f0540823733433505d3a2f00b18e" + +seq-queue@0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/seq-queue/-/seq-queue-0.0.5.tgz#d56812e1c017a6e4e7c3e3a37a1da6d78dd3c93e" + +sequelize@^4.28.1: + version "4.29.0" + resolved "https://registry.yarnpkg.com/sequelize/-/sequelize-4.29.0.tgz#d858dacda903a5e67b7239c600fa322b83db80f6" + dependencies: + bluebird "^3.4.6" + cls-bluebird "^2.0.1" + debug "^3.0.0" + depd "^1.1.0" + dottie "^2.0.0" + generic-pool "^3.1.8" + inflection "1.12.0" + lodash "^4.17.1" + moment "^2.13.0" + moment-timezone "^0.5.4" + retry-as-promised "^2.3.1" + semver "^5.0.1" + terraformer-wkt-parser "^1.1.2" + toposort-class "^1.0.1" + uuid "^3.0.0" + validator "^9.1.0" + wkx "^0.4.1" + +shimmer@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/shimmer/-/shimmer-1.2.0.tgz#f966f7555789763e74d8841193685a5e78736665" + +sinon@^4.1.3: + version "4.1.3" + resolved "https://registry.yarnpkg.com/sinon/-/sinon-4.1.3.tgz#fc599eda47ed9f1a694ce774b94ab44260bd7ac5" + dependencies: + diff "^3.1.0" + formatio "1.2.0" + lodash.get "^4.4.2" + lolex "^2.2.0" + nise "^1.2.0" + supports-color "^4.4.0" + type-detect "^4.0.5" + +sqlstring@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/sqlstring/-/sqlstring-2.3.0.tgz#525b8a4fd26d6f71aa61e822a6caf976d31ad2a8" + +string_decoder@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.0.3.tgz#0fc67d7c141825de94282dd536bec6b9bce860ab" + dependencies: + safe-buffer "~5.1.0" + +supports-color@^4.4.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.5.0.tgz#be7a0de484dec5c5cddf8b3d59125044912f635b" + dependencies: + has-flag "^2.0.0" + +terraformer-wkt-parser@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/terraformer-wkt-parser/-/terraformer-wkt-parser-1.1.2.tgz#336a0c8fc82094a5aff83288f69aedecd369bf0c" + dependencies: + terraformer "~1.0.5" + +terraformer@~1.0.5: + version "1.0.8" + resolved "https://registry.yarnpkg.com/terraformer/-/terraformer-1.0.8.tgz#51e0ad89746fcf2161dc6f65aa70e42377c8b593" + dependencies: + "@types/geojson" "^1.0.0" + +text-encoding@^0.6.4: + version "0.6.4" + resolved "https://registry.yarnpkg.com/text-encoding/-/text-encoding-0.6.4.tgz#e399a982257a276dae428bb92845cb71bdc26d19" + +toposort-class@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/toposort-class/-/toposort-class-1.0.1.tgz#7ffd1f78c8be28c3ba45cd4e1a3f5ee193bd9988" + +type-detect@^4.0.5: + version "4.0.5" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.5.tgz#d70e5bc81db6de2a381bcaca0c6e0cbdc7635de2" + +util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + +uuid@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.1.0.tgz#3dd3d3e790abc24d7b0d3a034ffababe28ebbc04" + +validator@^9.1.0: + version "9.2.0" + resolved "https://registry.yarnpkg.com/validator/-/validator-9.2.0.tgz#ad216eed5f37cac31a6fe00ceab1f6b88bded03e" + +wkx@^0.4.1: + version "0.4.2" + resolved "https://registry.yarnpkg.com/wkx/-/wkx-0.4.2.tgz#776d35a634a5c22e656e4744bdeb54f83fd2ce8d" + dependencies: + "@types/node" "*" + +yallist@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" diff --git a/electrode-ota-server-model-app/src/app.js b/electrode-ota-server-model-app/src/app.js index 5cbf7d54..b7b9769a 100644 --- a/electrode-ota-server-model-app/src/app.js +++ b/electrode-ota-server-model-app/src/app.js @@ -1,338 +1,477 @@ -import {id, key, toJSON} from 'electrode-ota-server-util'; -import {isZip, generate, manifestHash} from 'electrode-ota-server-model-manifest/lib/manifest'; -import {shasum} from 'electrode-ota-server-util'; +import { id, key, toJSON } from "electrode-ota-server-util"; +import { + isZip, + generate, + manifestHash +} from "electrode-ota-server-model-manifest/lib/manifest"; +import { shasum } from "electrode-ota-server-util"; import { - alreadyExists, - alreadyExistsMsg, - notFound, - missingParameter, - notAuthorized, - invalidRequest -} from 'electrode-ota-server-errors'; - -const excludeNull = obj => Object.keys(obj).reduce((ret, key) => { + alreadyExists, + alreadyExistsMsg, + notFound, + missingParameter, + notAuthorized, + invalidRequest +} from "electrode-ota-server-errors"; + +const excludeNull = obj => + Object.keys(obj).reduce((ret, key) => { if (obj[key] == null) return ret; ret[key] = obj[key]; return ret; -}, {}); - + }, {}); export const Perms = { - Owner: ['Owner'], - Collaborator: ['Collaborator'], - Any: ['Owner', 'Collaborator'] + Owner: ["Owner"], + Collaborator: ["Collaborator"], + Any: ["Owner", "Collaborator"] }; -const toBuffer = (obj) => JSON.stringify(obj); - -const hasDeploymentName = ({deployments}, deployment) => { - if (!deployments)return false; - if (Array.isArray(deployments)) { - return deployments.indexOf(deployment) > -1; - } - return deployment in deployments; +const toBuffer = obj => JSON.stringify(obj); + +const hasDeploymentName = ({ deployments }, deployment) => { + if (!deployments) return false; + if (Array.isArray(deployments)) { + return deployments.indexOf(deployment) > -1; + } + return deployment in deployments; }; const packageContainsChanges = (deploymentInDb, uploadedContent) => { - if (deploymentInDb && deploymentInDb.package && deploymentInDb.package.packageHash) { - let uploadedContentPackageHash = shasum(uploadedContent); - return uploadedContentPackageHash !== deploymentInDb.package.packageHash; - } - return true; + if ( + deploymentInDb && + deploymentInDb.package && + deploymentInDb.package.packageHash + ) { + let uploadedContentPackageHash = shasum(uploadedContent); + return uploadedContentPackageHash !== deploymentInDb.package.packageHash; + } + return true; }; const hasPerm = (app, email, perms = Perms.Any) => { - missingParameter(app, 'app'); - missingParameter(email, 'email'); - const c = app.collaborators[email]; - return c && perms.indexOf(c.permission) > -1; + missingParameter(app, "app"); + missingParameter(email, "email"); + const c = app.collaborators[email]; + return c && perms.indexOf(c.permission) > -1; }; const _newDeployment = name => { - return { - "createdTime": Date.now(), - name, - "key": key(), - "id": id(), - "package": null - } + return { + createdTime: Date.now(), + name, + key: key(), + id: id(), + package: null + }; }; const _addDeployment = (app, name) => { - if (!app.deployments) { - app.deployments = {}; - } - return (app.deployments[name] = _newDeployment(name)); + if (!app.deployments) { + app.deployments = {}; + } + return (app.deployments[name] = _newDeployment(name)); }; const notAuthorizedPerm = (app, email, perm, message) => { - if (hasPerm(app, email, Perms[perm] || perm)) { - return app; - } - return notAuthorized(false, message); + if (hasPerm(app, email, Perms[perm] || perm)) { + return app; + } + return notAuthorized(false, message); }; export default (options, dao, upload, download, logger) => { - const api = {}; - return Object.assign(api, { - findApp({email, app}){ - return dao.appForCollaborator(email, app).then(result => notFound(result, `App not found ${app}`)).then(toJSON); - }, - _findApp(find, perm = 'Owner', errorMessage = `Do not have permission to do this operation.`){ - return api.findApp(find).then(app => notAuthorizedPerm(app, find.email, perm, errorMessage)); - }, - createApp({email, name, deployments = ["Production", "Staging"]}) { - if (!name) { - return Promise.reject(missingParameter(name, 'name')); + const api = {}; + return Object.assign(api, { + findApp({ email, app }) { + return dao + .appForCollaborator(email, app) + .then(result => notFound(result, `App not found ${app}`)) + .then(toJSON); + }, + _findApp( + find, + perm = "Owner", + errorMessage = `Do not have permission to do this operation.` + ) { + return api + .findApp(find) + .then(app => notAuthorizedPerm(app, find.email, perm, errorMessage)); + }, + createApp({ email, name, deployments = ["Production", "Staging"] }) { + if (!name) { + return Promise.reject(missingParameter(name, "name")); + } + return dao.appForCollaborator(email, name).then(check => { + alreadyExists(!check, name); + + const app = { + name, + collaborators: { + [email]: { + permission: "Owner" } - return dao.appForCollaborator(email, name).then(check => { - alreadyExists(!check, name); - - const app = ({ - name, - "collaborators": { - [email]: { - "permission": "Owner" - } - }, - "deployments": {} - }); - - deployments.forEach((name) => app.deployments[name] = _addDeployment(app, name)); - return dao.createApp(app) - .tap((app) => logger.info({ name, appId : app.id }, "app created")); - }); - }, - - removeApp(find) { - return api._findApp(find, 'Owner', 'Must be owner of app to remove') - .then(app => dao.removeApp(app.id).then(v => app)) - .tap((app) => logger.info({ appId : app.id }, "app removed")); - }, - - renameApp(find){ - return api._findApp(find, 'Owner', 'Must be owner of app to rename').then(app => { - const oldName = app.name; - app.name = find.name; - return dao.updateApp(app.id, app).then(v => app) - .tap(() => logger.info({ oldName, newName : app.name, appId : app.id }, "app renamed")); - }); - }, - - transferApp(find) - { - return api._findApp(find, 'Owner', 'Must be owner of app to transfer').then(app => dao.userByEmail(find.transfer).then(u => { - notFound(u, `The specified e-mail address doesn't represent a registered user`); - const owner = app.collaborators[find.email]; - const transfer = app.collaborators[find.transfer] || (app.collaborators[find.transfer] = {}); - owner.permission = 'Collaborator'; - transfer.permission = 'Owner'; - return dao.updateApp(app.id, app).then(toJSON) - .tap(() => logger.info({ - id : app.id, - oldOwner : find.email, - newOwner : find.transfer - }, "app transferred")); - })); - }, - - listApps({email}){ - return dao.appsForCollaborator(email).then(toJSON) - .tap(() => logger.info({ additional : { email }}, "got app list")); - }, - - listDeployments(find){ - return api.findApp(find).then(app => dao.deploymentsByApp(app.id, app.deployments) - .then(deployments => { - logger.info({ appId : app.id }, "fetched deployments"); - return app.deployments.map(name => deployments[name]); - })); - }, - - async getDeployment(find){ - const app = await api.findApp(find); - const deployment = await dao.deploymentByApp(app.id, find.deployment); - logger.info({ appId : app.id, deployment : find.deployment }, "fetched deployment") - return deployment; - }, - - removeDeployment(params){ - return api.findApp(params).then(app => { - return dao.removeDeployment(app.id, params.deployment) - .tap(() => logger.info({ appId : app.id, deployment : params.deployment }, "removed deployment")); - }); - }, - - renameDeployment(params){ - return api.findApp(params).then(app => { - const {deployment, name} = params; - notFound(hasDeploymentName(app, deployment), `Deployment '${deployment}' not found ${params.app}`); - alreadyExists(!hasDeploymentName(app, name), name, 'deployment'); - return dao.renameDeployment(app.id, deployment, name) - .tap(() => logger.info({ appId : app.id, oldName : deployment, newName : name }, "renamed deployment")); - }); - }, - - promoteDeployment(params) - { - return api.findApp(params).then(app => { - - return dao.deploymentsByApp(app.id, [params.deployment, params.to]).then(async (deployments) => { - const f = deployments[params.deployment]; - - notFound(f && f.package, `Deployment "${params.deployment}" does not exist.`); - - const t = notFound(deployments[params.to], `Deployment "${params.to}" does not exist.`); - - let pkg = f.package; - - if (params.label) { - const packages = await dao.history(app.id, [params.deployment]); - pkg = packages.find((packag) => { - return packag.label === params.label; - }); - - notFound(pkg, `Deployment "${params.deployment}" has no package with label "${params.label}"`); - } - - const existingPackage = t.package; - - // check to make sure that it is not already promoted - if (existingPackage) { - alreadyExistsMsg((existingPackage.packageHash !== pkg.packageHash), - `Deployment ${params.deployment}:${pkg.label} has already been promoted to ${params.to}:${existingPackage.label}.`); - } - - // rollout property should not be carried forward on promotion - const { - isDisabled = pkg.isDisabled, - isMandatory = pkg.isMandatory, - rollout, // = pkg.rollout, - appVersion = pkg.appVersion, - description = pkg.description, - } = excludeNull(params); - - return dao.addPackage(t.key, { - packageHash: pkg.packageHash, - isDisabled, - isMandatory, - rollout, - appVersion, - uploadTime: Date.now(), - description, - releasedBy: params.email, - releaseMethod: "Promote", - originalLabel: pkg.label, - originalDeployment: params.deployment, - blobUrl : pkg.blobUrl, - manifestBlobUrl : pkg.manifestBlobUrl, - size : pkg.size, - label : "v" + (t.history_ ? t.history_.length + 1 : 1) - }).tap(() => { - logger.info({ - appId : app.id, - fromDeployment : params.deployment, - toDeployment : params.to, - originalLabel : pkg.label - }, "promoted deployment"); - }); - }); - }); - }, - - async historyDeployment({app, deployment, email}) - { - const capp = await api.findApp({app, deployment, email}); - const all = await dao.history(capp.id, deployment); - const map = all.map(toJSON); - //TODO -make less worse. - for (let i = map.length - 1; i >= 0; --i) { - delete map[i].created_; + }, + deployments: {} + }; + + deployments.forEach( + name => (app.deployments[name] = _addDeployment(app, name)) + ); + return dao + .createApp(app) + .tap(app => logger.info({ name, appId: app.id }, "app created")); + }); + }, + + removeApp(find) { + return api + ._findApp(find, "Owner", "Must be owner of app to remove") + .then(app => dao.removeApp(app.id).then(v => app)) + .tap(app => logger.info({ appId: app.id }, "app removed")); + }, + + renameApp(find) { + return api + ._findApp(find, "Owner", "Must be owner of app to rename") + .then(app => { + const oldName = app.name; + app.name = find.name; + return dao + .updateApp(app.id, app) + .then(v => app) + .tap(() => + logger.info( + { oldName, newName: app.name, appId: app.id }, + "app renamed" + ) + ); + }); + }, + + transferApp(find) { + return api + ._findApp(find, "Owner", "Must be owner of app to transfer") + .then(app => + dao.userByEmail(find.transfer).then(u => { + notFound( + u, + `The specified e-mail address doesn't represent a registered user` + ); + const owner = app.collaborators[find.email]; + const transfer = + app.collaborators[find.transfer] || + (app.collaborators[find.transfer] = {}); + owner.permission = "Collaborator"; + transfer.permission = "Owner"; + return dao + .updateApp(app.id, app) + .then(toJSON) + .tap(() => + logger.info( + { + id: app.id, + oldOwner: find.email, + newOwner: find.transfer + }, + "app transferred" + ) + ); + }) + ); + }, + + listApps({ email }) { + return dao + .appsForCollaborator(email) + .then(toJSON) + .tap(() => logger.info({ additional: { email } }, "got app list")); + }, + + listDeployments(find) { + return api.findApp(find).then(app => + dao.deploymentsByApp(app.id, app.deployments).then(deployments => { + logger.info({ appId: app.id }, "fetched deployments"); + return app.deployments.map(name => deployments[name]); + }) + ); + }, + + async getDeployment(find) { + const app = await api.findApp(find); + const deployment = await dao.deploymentByApp(app.id, find.deployment); + logger.info( + { appId: app.id, deployment: find.deployment }, + "fetched deployment" + ); + return deployment; + }, + + removeDeployment(params) { + return api.findApp(params).then(app => { + return dao + .removeDeployment(app.id, params.deployment) + .tap(() => + logger.info( + { appId: app.id, deployment: params.deployment }, + "removed deployment" + ) + ); + }); + }, + + renameDeployment(params) { + return api.findApp(params).then(app => { + const { deployment, name } = params; + notFound( + hasDeploymentName(app, deployment), + `Deployment '${deployment}' not found ${params.app}` + ); + alreadyExists(!hasDeploymentName(app, name), name, "deployment"); + return dao + .renameDeployment(app.id, deployment, name) + .tap(() => + logger.info( + { appId: app.id, oldName: deployment, newName: name }, + "renamed deployment" + ) + ); + }); + }, + + promoteDeployment(params) { + return api.findApp(params).then(app => { + return dao + .deploymentsByApp(app.id, [params.deployment, params.to]) + .then(async deployments => { + const f = deployments[params.deployment]; + + notFound( + f && f.package, + `Deployment "${params.deployment}" does not exist.` + ); + + const t = notFound( + deployments[params.to], + `Deployment "${params.to}" does not exist.` + ); + + let pkg = f.package; + + if (params.label) { + const packages = await dao.history(app.id, [params.deployment]); + pkg = packages.find(packag => { + return packag.label === params.label; + }); + + notFound( + pkg, + `Deployment "${params.deployment}" has no package with label "${ + params.label + }"` + ); } - logger.info({ appId : capp.id, deployment : deployment }, "fetched deployment history"); - return map; - }, - - - updateDeployment(params) - { - return api.findApp(params).then(app => dao.deploymentByApp(app.id, params.deployment).then(deployment => { - notFound(deployment, `Deployment not found '${params.deployment}'`); - notFound(deployment.package, `Deployment has no releases.`); - - const pkg = deployment.package; - const { - isDisabled = pkg.isDisabled, - isMandatory = pkg.isMandatory, - rollout = pkg.rollout, - appVersion = pkg.appVersion, - description = pkg.description - } = excludeNull(params); - - invalidRequest(!(params.rollout != null && (pkg.rollout != null && params.rollout < pkg.rollout)), `Can not set rollout below existing rollout ${pkg.rollout}`); - - const npkg = { - isDisabled, - isMandatory, - rollout, - appVersion, - description - }; - - return dao.updatePackage(deployment.key, npkg) - .tap(() => logger.info({ appId : app.id, deployment : params.deployment }, "updated deployment")); - })).then(toJSON); - }, - - addDeployment({email, app, name}) - { - return api._findApp({ - email, - app - }, 'Any', `Do not have permission to add deployment to '${app}'.`).then(app => { - - alreadyExists(!hasDeploymentName(app, name), name, `deployment`); - return dao.addDeployment(app.id, name, _newDeployment(name)) - .tap(() => logger.info({ appId : app.id, deployment : name }, "added deployment")); - }); - }, - - removeCollaborator({email, app, collaborator}) - { - return api._findApp({email, app}, 'Owner', `Must be owner to remove a collaborator`).then(app => { - - notAuthorized(app.collaborators[collaborator].permission !== 'Owner', - `Cannot remove the owner of the app from collaborator list.`); - notFound((email in app.collaborators), - `The given account is not a collaborator for this app.`); - - delete app.collaborators[collaborator]; - return dao.updateApp(app.id, app) - .tap(() => logger.info({ appId : app.id, collaborator }, "removed collaborator")); - }); - }, - - - addCollaborator({email, app, collaborator}) - { - return api._findApp({email, app}, 'Owner', `Must be owner to add collaborator`).then(app => { - alreadyExistsMsg(!(collaborator in app.collaborators), `The given account is already a collaborator for this app.`); - - - return dao.userByEmail(collaborator).then(a => { - - notFound(a, `The specified e-mail address doesn't represent a registered user`); - - app.collaborators[collaborator] = { - "permission": "Collaborator" - }; - return dao.updateApp(app.id, app).then(v => true) - .tap(() => logger.info({ appId : app.id, collaborator }, "added collaborator")); - }); - }); - }, + const existingPackage = t.package; + + // check to make sure that it is not already promoted + if (existingPackage) { + alreadyExistsMsg( + existingPackage.packageHash !== pkg.packageHash, + `Deployment ${params.deployment}:${ + pkg.label + } has already been promoted to ${params.to}:${ + existingPackage.label + }.` + ); + } - /** + // rollout property should not be carried forward on promotion + const { + isDisabled = pkg.isDisabled, + isMandatory = pkg.isMandatory, + rollout, // = pkg.rollout, + appVersion = pkg.appVersion, + description = pkg.description + } = excludeNull(params); + + return dao + .addPackage(t.key, { + packageHash: pkg.packageHash, + isDisabled, + isMandatory, + rollout, + appVersion, + uploadTime: Date.now(), + description, + releasedBy: params.email, + releaseMethod: "Promote", + originalLabel: pkg.label, + originalDeployment: params.deployment, + blobUrl: pkg.blobUrl, + manifestBlobUrl: pkg.manifestBlobUrl, + size: pkg.size, + label: "v" + (t.history_ ? t.history_.length + 1 : 1) + }) + .tap(() => { + logger.info( + { + appId: app.id, + fromDeployment: params.deployment, + toDeployment: params.to, + originalLabel: pkg.label + }, + "promoted deployment" + ); + }); + }); + }); + }, + + async historyDeployment({ app, deployment, email }) { + const capp = await api.findApp({ app, deployment, email }); + const all = await dao.history(capp.id, deployment); + const map = all.map(toJSON); + //TODO -make less worse. + for (let i = map.length - 1; i >= 0; --i) { + delete map[i].created_; + } + logger.info( + { appId: capp.id, deployment: deployment }, + "fetched deployment history" + ); + return map; + }, + + updateDeployment(params) { + return api + .findApp(params) + .then(app => + dao.deploymentByApp(app.id, params.deployment).then(deployment => { + notFound(deployment, `Deployment not found '${params.deployment}'`); + notFound(deployment.package, `Deployment has no releases.`); + + const pkg = deployment.package; + const { + isDisabled = pkg.isDisabled, + isMandatory = pkg.isMandatory, + rollout = pkg.rollout, + appVersion = pkg.appVersion, + description = pkg.description + } = excludeNull(params); + + invalidRequest( + !( + params.rollout != null && + (pkg.rollout != null && params.rollout < pkg.rollout) + ), + `Can not set rollout below existing rollout ${pkg.rollout}` + ); + + const npkg = { + isDisabled, + isMandatory, + rollout, + appVersion, + description + }; + + return dao + .updatePackage(deployment.key, npkg) + .tap(() => + logger.info( + { appId: app.id, deployment: params.deployment }, + "updated deployment" + ) + ); + }) + ) + .then(toJSON); + }, + + addDeployment({ email, app, name }) { + return api + ._findApp( + { + email, + app + }, + "Any", + `Do not have permission to add deployment to '${app}'.` + ) + .then(app => { + alreadyExists(!hasDeploymentName(app, name), name, `deployment`); + return dao + .addDeployment(app.id, name, _newDeployment(name)) + .tap(() => + logger.info( + { appId: app.id, deployment: name }, + "added deployment" + ) + ); + }); + }, + + removeCollaborator({ email, app, collaborator }) { + return api + ._findApp( + { email, app }, + "Owner", + `Must be owner to remove a collaborator` + ) + .then(app => { + notAuthorized( + app.collaborators[collaborator].permission !== "Owner", + `Cannot remove the owner of the app from collaborator list.` + ); + notFound( + email in app.collaborators, + `The given account is not a collaborator for this app.` + ); + + delete app.collaborators[collaborator]; + return dao + .updateApp(app.id, app) + .tap(() => + logger.info( + { appId: app.id, collaborator }, + "removed collaborator" + ) + ); + }); + }, + + addCollaborator({ email, app, collaborator }) { + return api + ._findApp({ email, app }, "Owner", `Must be owner to add collaborator`) + .then(app => { + alreadyExistsMsg( + !(collaborator in app.collaborators), + `The given account is already a collaborator for this app.` + ); + + return dao.userByEmail(collaborator).then(a => { + notFound( + a, + `The specified e-mail address doesn't represent a registered user` + ); + + app.collaborators[collaborator] = { + permission: "Collaborator" + }; + return dao + .updateApp(app.id, app) + .then(v => true) + .tap(() => + logger.info( + { appId: app.id, collaborator }, + "added collaborator" + ) + ); + }); + }); + }, + + /** * { "package": { "description": "", @@ -356,156 +495,211 @@ export default (options, dao, upload, download, logger) => { * param packageInfo */ - upload(vals){ - const { - app, - email, - deployment = 'Staging', - downloadUrl = '', - packageInfo: { - description = '', - isDisabled = false, label, isMandatory = false, rollout = 100, appVersion = '1.0.0' + upload(vals) { + const { + app, + email, + deployment = "Staging", + downloadUrl = "", + packageInfo: { + description = "", + isDisabled = false, + label, + isMandatory = false, + rollout = 100, + appVersion = "1.0.0" + } + } = vals; + + return api.findApp({ email, app }).then(_app => { + notFound( + hasDeploymentName(_app, deployment), + `Not a valid deployment '${deployment}' for app '${app}'` + ); + const zip = isZip("", vals.package); + + return dao + .deploymentByApp(_app.id, deployment) + .then(async deployments => { + alreadyExistsMsg( + packageContainsChanges(deployments, vals.package), + "No changes detected in uploaded content for this deployment." + ); + + //noinspection JSUnresolvedVariable + const pkg = { + description, + isDisabled, + isMandatory, + rollout, + appVersion, + releaseMethod: "Upload", + uploadTime: Date.now(), + label: + label || + "v" + + (deployments.history_ ? deployments.history_.length + 1 : 1), + releasedBy: email + }; + + if (zip) { + // Generate manifest to get the packageHash + const { blobUrl } = await generate(vals.package).then( + manifest => { + pkg.packageHash = manifestHash(manifest); + return upload(toBuffer(manifest)); } - } = vals; - - return api.findApp({email, app}).then(_app => { - - notFound(hasDeploymentName(_app, deployment), `Not a valid deployment '${deployment}' for app '${app}'`); - const zip = isZip('', vals.package); - - return dao.deploymentByApp(_app.id, deployment).then(async deployments => { - - alreadyExistsMsg(packageContainsChanges(deployments, vals.package), 'No changes detected in uploaded content for this deployment.'); - - //noinspection JSUnresolvedVariable - const pkg = { - description, isDisabled, isMandatory, rollout, appVersion, - releaseMethod: "Upload", - uploadTime: Date.now(), - label: label || "v" + (deployments.history_ ? deployments.history_.length + 1 : 1), - releasedBy: email - }; - - if (zip) { - // Generate manifest to get the packageHash - const {blobUrl} = await generate(vals.package) - .then((manifest) => { - pkg.packageHash = manifestHash(manifest); - return upload(toBuffer(manifest)); - }); - pkg.manifestBlobUrl = blobUrl; - } - return upload(vals.package, pkg.packageHash) - .then(resp => { - return dao.addPackage(deployments.key, Object.assign({}, pkg, resp)) - .tap(() => { - logger.info({ - appId : _app.id, - deployment, - releasedBy : email, - label : pkg.label, - appVersion, - }, "package uploaded") - }); - }); + ); + pkg.manifestBlobUrl = blobUrl; + } + return upload(vals.package, pkg.packageHash).then(resp => { + return dao + .addPackage(deployments.key, Object.assign({}, pkg, resp)) + .tap(() => { + logger.info( + { + appId: _app.id, + deployment, + releasedBy: email, + label: pkg.label, + appVersion + }, + "package uploaded" + ); }); - }) - }, - - clearHistory(params){ - return api._findApp(params, 'Owner', `Must be owner to clear history`) - .then(app => { - return dao.clearHistory(app.id, params.deployment) - .tap(() => logger.info({ appId : app.id, deployment : params.deployment }, "history cleared")); + }); + }); + }); + }, + + clearHistory(params) { + return api + ._findApp(params, "Owner", `Must be owner to clear history`) + .then(app => { + return dao + .clearHistory(app.id, params.deployment) + .tap(() => + logger.info( + { appId: app.id, deployment: params.deployment }, + "history cleared" + ) + ); + }); + }, + + metrics(params) { + return api.findApp(params).then(app => { + notFound( + hasDeploymentName(app, params.deployment), + params.deployment, + "deployment" + ); + return dao.deploymentByApp(app.id, params.deployment).then(deployment => + dao.metrics(deployment.key).then((metrics = []) => { + const { label } = deployment.package || {}; + // "DeploymentSucceeded" | "DeploymentFailed" | "Downloaded"; + + logger.info({ deployment: params.deployment }, "fetched metrics"); + + return metrics.reduce((obj, val) => { + const key = val.label || val.appversion; + const ret = + obj[key] || + (obj[key] = { + active: 0, + downloaded: 0, + installed: 0, + failed: 0 }); - }, - - - metrics(params) - { - return api.findApp(params).then(app => { - - notFound(hasDeploymentName(app, params.deployment), params.deployment, 'deployment'); - return dao.deploymentByApp(app.id, params.deployment).then((deployment) => dao.metrics(deployment.key).then((metrics = []) => { - - const {label} = deployment.package || {}; - // "DeploymentSucceeded" | "DeploymentFailed" | "Downloaded"; - - logger.info({ deployment : params.deployment }, 'fetched metrics'); - - return metrics.reduce((obj, val) => { - const key = val.label || val.appversion; - const ret = obj[key] || (obj[key] = { - active: 0, - downloaded: 0, - installed: 0, - failed: 0 - }); - switch (val.status) { - case 'DeploymentSucceeded': - ret.active++; - if (label === val.label) { - //pervious deployment is no longer active. - /* obj[val.previouslabelorappversion] || (obj[val.previouslabelorappversion] = { + switch (val.status) { + case "DeploymentSucceeded": + ret.active++; + if (label === val.label) { + //pervious deployment is no longer active. + /* obj[val.previouslabelorappversion] || (obj[val.previouslabelorappversion] = { active: 0, downloaded: 0, installed: 0, failed: 0 });*/ - if (obj[val.previouslabelorappversion]) - obj[val.previouslabelorappversion].active--; - } - ret.installed++; - break; - case 'DeploymentFailed': - ret.failed++; - break; - case 'Downloaded': - ret.downloaded++; - break; - - } - return obj; - }, {}); - })); - }); - }, - - rollback(params) - { - let appId = null; - return api.findApp(params).then(app => { - appId = app.id; - if (params.label) { - return dao.historyLabel(app.id, params.deployment, params.label).then(rollto => dao.deploymentByApp(app.id, params.deployment).then(deployment => ({ - rollto, - deployment - }))); - } else { - return dao.deploymentByApp(app.id, params.deployment) - .then(deployment => deployment.history_ && dao.packageById(deployment.history_[1]).then(rollto => ({ - deployment, - rollto - }))); - } - }).then(({rollto, deployment}) => { - notFound(rollto, `Cannot perform rollback because there are no releases on this deployment.`); - const {history_} = deployment; - const dpkg = deployment.package; - const pkg = Object.assign({}, rollto, { - uploadTime: Date.now(), - rollout: 100, - releasedBy: params.email, - releaseMethod: "Rollback", - originalLabel: rollto.label, - label: `v${history_.length + 1}` - }); - return dao.addPackage(deployment.key, pkg).then(v => pkg) - .tap(() => logger.info({ appId, deployment : params.deployment, originalLabel : rollto.label }, 'rolled back package')); - }); - - } - }); - + if (obj[val.previouslabelorappversion]) + obj[val.previouslabelorappversion].active--; + } + ret.installed++; + break; + case "DeploymentFailed": + ret.failed++; + break; + case "Downloaded": + ret.downloaded++; + break; + } + return obj; + }, {}); + }) + ); + }); + }, + + rollback(params) { + let appId = null; + return api + .findApp(params) + .then(app => { + appId = app.id; + if (params.label) { + return dao + .historyLabel(app.id, params.deployment, params.label) + .then(rollto => + dao + .deploymentByApp(app.id, params.deployment) + .then(deployment => ({ + rollto, + deployment + })) + ); + } else { + return dao.deploymentByApp(app.id, params.deployment).then( + deployment => + deployment.history_ && + dao.packageById(deployment.history_[1]).then(rollto => ({ + deployment, + rollto + })) + ); + } + }) + .then(({ rollto, deployment }) => { + notFound( + rollto, + `Cannot perform rollback because there are no releases on this deployment.` + ); + const { history_ } = deployment; + const dpkg = deployment.package; + const pkg = Object.assign({}, rollto, { + uploadTime: Date.now(), + rollout: 100, + releasedBy: params.email, + releaseMethod: "Rollback", + originalLabel: rollto.label, + label: `v${history_.length + 1}` + }); + delete pkg["id_"]; + delete pkg["created_"]; + return dao + .addPackage(deployment.key, pkg) + .then(v => pkg) + .tap(() => + logger.info( + { + appId, + deployment: params.deployment, + originalLabel: rollto.label + }, + "rolled back package" + ) + ); + }); + } + }); }; - diff --git a/electrode-ota-server-test-support/src/init-maria-dao.js b/electrode-ota-server-test-support/src/init-maria-dao.js new file mode 100644 index 00000000..8d3f93b0 --- /dev/null +++ b/electrode-ota-server-test-support/src/init-maria-dao.js @@ -0,0 +1,40 @@ +import { + daoDriver, + createDatabaseForTest +} from "electrode-ota-server-dao-mariadb"; +import { daoFactory } from "electrode-ota-server-dao-factory"; + +let client; +const mockRegister = (driver, options, callback) => { + callback(); +}; +const mockLogger = { info: () => {}, error: () => {} }; + +export const shutdownMaria = async () => { + if (client) { + await client.closeAsync(); + client = null; + } +}; + +export default async (options = {}) => { + try { + if (client != null) { + throw new Error("shutdown was not called"); + } + const config = { + host: "localhost", + port: 3306, + db: "ota_db_test", + user: "root", + password: "" + }; + await createDatabaseForTest(config); + + const driver = await daoDriver(config, mockLogger); + return daoFactory({}, driver, mockLogger); + } catch (e) { + console.trace(e); + throw e; + } +}; diff --git a/package.json b/package.json index 78476610..3c80ca19 100644 --- a/package.json +++ b/package.json @@ -10,11 +10,15 @@ "homepage": "https://github.com/electrode-io/electrode-ota-server", "main": "index.js", "scripts": { - "rebuild": "lerna clean --yes && lerna bootstrap --include-filtered-dependencies && yarn run relink ", + "rebuild": + "lerna clean --yes && lerna bootstrap --include-filtered-dependencies && yarn run relink ", "relink": "lr-link", "lerna": "yarn run rebuild", - "test": "yarn run lerna && mocha --compilers js:./electrode-ota-server-util-dev/babelhook ./electrode-ota-server*/test/*-test.js", - "coverage": "./node_modules/.bin/nyc --sourceMap=false --reportDir=./coverage --instrument=false --all --include=./electrode-ota-server*/src/**/*.js mocha --compilers js:./electrode-ota-server-util-dev/babelhook-coverage ./electrode-ota-server*/test/*-test.js && nyc report --reporter=html" + "test": + "yarn run lerna && mocha --compilers js:./electrode-ota-server-util-dev/babelhook ./electrode-ota-server*/test/*-test.js", + "lint": "./node_modules/.bin/eslint ./**/*.js", + "coverage": + "./node_modules/.bin/nyc --sourceMap=false --reportDir=./coverage --instrument=false --all --include=./electrode-ota-server*/src/**/*.js mocha --compilers js:./electrode-ota-server-util-dev/babelhook-coverage ./electrode-ota-server*/test/*-test.js && nyc report --reporter=html" }, "bin": { "electrode-ota-server": "./index.js" @@ -22,13 +26,7 @@ "engines": { "node": "^6.6.0" }, - "keywords": [ - "code-push", - "ota", - "electrode", - "react-native", - "cordova" - ], + "keywords": ["code-push", "ota", "electrode", "react-native", "cordova"], "dependencies": { "lerna": "^2.0.0-rc.2", "lerna-relinker": "^1.2.2" @@ -36,6 +34,7 @@ "devDependencies": { "babel-cli": "^6.24.1", "babel-core": "^6.24.1", + "babel-eslint": "^8.1.2", "babel-plugin-istanbul": "^4.1.1", "babel-plugin-syntax-async-functions": "^6.13.0", "babel-plugin-transform-async-to-generator": "^6.24.1", @@ -45,6 +44,15 @@ "babel-preset-node6": "^11.0.0", "babel-register": "^6.24.1", "chai": "^3.5.0", + "eslint": "^4.14.0", + "eslint-config-standard": "^11.0.0-beta.0", + "eslint-config-walmart": "^2.2.0", + "eslint-plugin-filenames": "^1.2.0", + "eslint-plugin-import": "^2.8.0", + "eslint-plugin-node": "^5.2.1", + "eslint-plugin-prettier": "^2.4.0", + "eslint-plugin-promise": "^3.6.0", + "eslint-plugin-standard": "^3.0.1", "mocha": "^2.5.3", "nyc": "^10.2.0", "rimraf": "^2.5.4",