diff --git a/couchdb/repositories/_design/branch_by_group/views/branch_by_group/map.js b/couchdb/repositories/_design/branch_by_group/views/branch_by_group/map.js new file mode 100644 index 00000000..be4f9aa8 --- /dev/null +++ b/couchdb/repositories/_design/branch_by_group/views/branch_by_group/map.js @@ -0,0 +1,9 @@ +function (doc) { + if (doc.type !== 'branch' || doc.referenceDeleted) return + if (doc.head && typeof doc.head === 'string') { + var branchName = doc.head.split('/') + if (branchName[1]) { + emit([doc.repositoryId, branchName[1]]) + } + } +} diff --git a/jobs/create-subgroup-initial-branch.js b/jobs/create-initial-subgroup-branch.js similarity index 100% rename from jobs/create-subgroup-initial-branch.js rename to jobs/create-initial-subgroup-branch.js diff --git a/jobs/github-event/push.js b/jobs/github-event/push.js index c12e9129..196d9d57 100644 --- a/jobs/github-event/push.js +++ b/jobs/github-event/push.js @@ -5,9 +5,13 @@ const env = require('../../lib/env') const { updateRepoDoc } = require('../../lib/repository-docs') const updatedAt = require('../../lib/updated-at') const diff = require('../../lib/diff-package-json') +const diffGreenkeeperJson = require('../../lib/diff-greenkeeper-json') const deleteBranches = require('../../lib/delete-branches') const { maybeUpdatePaymentsJob } = require('../../lib/payments') -const { getBranchesToDelete } = require('../../lib/branches-to-delete') +const { + getDependencyBranchesToDelete, + getGroupBranchesToDelete +} = require('../../lib/branches-to-delete') const getConfig = require('../../lib/get-config') module.exports = async function (data) { @@ -53,7 +57,8 @@ module.exports = async function (data) { } } - if (_.isEqual(oldPkg, pkg)) { + // check if there are changes in packag.json files or the greenkeeper config + if (_.isEqual(oldPkg, pkg) && _.isEqual(config, repoDoc.greenkeeper)) { await updateDoc(repositories, repository, repoDoc) return null } @@ -78,42 +83,55 @@ module.exports = async function (data) { // do diff + getBranchesToDelete per file for each group // TODO: for Tuesday -> deleting a package.json needs to be detected!! - const branches = [] - Object.keys(pkg).forEach((path) => { - let groupName = null - if (config.groups) { - Object.keys(config.groups).map((group) => { - if (config.groups[group].packages.includes(path)) { - groupName = group - } - }) - } - const changes = diff(oldPkg[path], pkg[path], groupName) - branches.push(getBranchesToDelete(changes)) - }) - - /* - const changes = diff(oldPkg, pkg) - console.log('changes', changes) + const branches = await getDependencyBranchesForAllGroups({pkg, oldPkg, config, repositories, repositoryId}) + const configChanges = diffGreenkeeperJson(config, repoDoc.greenkeeper) + console.log('configChanges', configChanges) + console.log('dependencyChanges', branches) - const branches = getBranchesToDelete(changes) - */ - // console.log('branches to be deleted!!', branches) - // do this per group, if groups, else once - - // MONDAY CONTINUE HERE - - // TODO: config includes no groups - // console.log('config', config) + const groupBranchesToDelete = await getGroupBranchesToDelete({configChanges, repositories, repositoryId}) + console.log('branches in push', branches) + const allBranchesToDelete = branches.concat(groupBranchesToDelete) + const _branches = _.uniqWith(_.flattenDeep(allBranchesToDelete), _.isEqual) + console.log('allBranchesToDelete flattend&uniq', _branches) await Promise.mapSeries( - _.uniqWith(_.flatten(branches), _.isEqual), + _branches, deleteBranches.bind(null, { installationId: installation.id, fullName: repository.full_name, repositoryId }) ) + + if (configChanges.added.length || configChanges.modified.length) { + const relevantModifiedGroups = configChanges.modified.filter((group) => { + if (!_.isEmpty(_.difference(repoDoc.greenkeeper.groups[group].packages, config.groups[group].packages))) { + return true + } + }) + console.log('relevantModifiedGroups', relevantModifiedGroups) + const groupsToRecvieveInitialBranch = configChanges.added.concat(relevantModifiedGroups) + console.log('groupsToRecvieveInitialBranch', groupsToRecvieveInitialBranch) + if (_.isEmpty(groupsToRecvieveInitialBranch)) return + // create subgroup initial pr + return _(groupsToRecvieveInitialBranch) + .map(groupName => ({ + data: { + name: 'create-initial-subgroup-branch', + repositoryId, + accountId: repoDoc.accountId, + groupName + } + })) + .value() + } + + // do this per group, if groups, else once + + // MONDAY CONTINUE HERE + + // TODO: config includes no groups + // console.log('config', config) } function updateDoc (repositories, repository, repoDoc) { @@ -129,8 +147,6 @@ function updateDoc (repositories, repository, repoDoc) { ) } -// check for relevant files in all folders! -// TODO: currently we might just detect those files in the root directory function hasRelevantChanges (commits, files) { return _.some(files, file => { return _.some(['added', 'removed', 'modified'], changeType => { @@ -151,3 +167,23 @@ async function disableRepo ({ repositories, repoDoc, repository }) { return maybeUpdatePaymentsJob(repoDoc.accountId, repoDoc.private) } } + +async function getDependencyBranchesForAllGroups ({pkg, oldPkg, config, repositories, repositoryId}) { + return Promise.all(Object.keys(pkg).map(async (path) => { + let groupName = null + if (config.groups) { + Object.keys(config.groups).map((group) => { + if (config.groups[group].packages.includes(path)) { + groupName = group + } + }) + } + // this can only happen if a package.json was modified + const dependencyDiff = diff(oldPkg[path], pkg[path], groupName) + console.log('dependencyDiff', dependencyDiff) + if (!_.isEmpty(dependencyDiff)) { + return getDependencyBranchesToDelete({changes: dependencyDiff, repositories, repositoryId}) + } + return [] + })) +} diff --git a/lib/branches-to-delete.js b/lib/branches-to-delete.js index af48fd5e..b6dc5904 100644 --- a/lib/branches-to-delete.js +++ b/lib/branches-to-delete.js @@ -1,14 +1,20 @@ const _ = require('lodash') +const semver = require('semver') -function getBranchesToDelete (changes) { - let branches = [] +module.exports = { + getDependencyChanges, + getDependencyBranchesToDelete, + getGroupBranchesToDelete +} + +function getDependencyChanges (changes) { + console.log('changes in getDependencyChanges', changes) + let dependencyChanges = [] _.each(changes, (type, dependencyType) => { _.each(type, (dep, dependency) => { - console.log('dep', dep) - console.log('dep.change === added', dep.change === 'added') if (dep.change === 'added') return - branches.push( + dependencyChanges.push( Object.assign( { dependency, @@ -20,9 +26,74 @@ function getBranchesToDelete (changes) { }) }) - return branches + return dependencyChanges } +async function getDependencyBranchesToDelete ({changes, repositories, repositoryId}) { + const dependencyChanges = getDependencyChanges(changes) -module.exports = { - getBranchesToDelete + return Promise.all(dependencyChanges.map(async dependencyChange => { + return getSingleDependencyBranchesToDelete({changes: dependencyChange, repositories, repositoryId}) + })) +} + +async function getSingleDependencyBranchesToDelete ({changes, repositories, repositoryId}) { + console.log('changes in getSingleDependencyBranchesToDelete', changes) + const { change, after, dependency, dependencyType, groupName } = changes + + let branches = [] + if (change !== 'removed' && !semver.validRange(after)) return [] + + // TODO: this would return multiple branch docs, possibly for irrelevant groups, + // or if a group is specified, for irrelevant root level package.json + branches = _.map( + (await repositories.query('branch_by_dependency', { + key: [repositoryId, dependency, dependencyType], + include_docs: true + })).rows, + 'doc' + ) + console.log('branches in getSingleDependencyBranchesToDelete', branches) + return _(branches) + .filter( + branch => + // include branch if dependency was removed + change === 'removed' || + // include branch if update version satisfies branch version (branch is outdated) + semver.satisfies(branch.version, after) || + // include branch if is not satisfied, but later (eg. update is an out of range major update) + semver.ltr(branch.version, after) + ) + .filter( + branch => { + // if groupName is passed in, only include branches of that group + // branch.head = 'greenkeeper/${groupName}/${dependency}' + if (groupName) { + console.log('has groupName', groupName) + return branch.head.includes(`greenkeeper/${groupName}/`) + } else { + // If there's no groupName, only return branches that don’t belong to groups + return branch.head.includes(`greenkeeper/${dependency}`) + } + }) + .value() +} + +async function getGroupBranchesToDelete ({configChanges, repositories, repositoryId}) { + if (configChanges.removed.length || configChanges.modified.length) { + const groups = _.uniq(configChanges.removed.concat(configChanges.modified)) + // delete all branches for those groups + console.log('groups', groups) + return Promise.all(_.map(groups, async (group) => { + console.log('map über group', group) + return Promise.all(_.map( + (await repositories.query('branch_by_group', { + key: [repositoryId, group], + include_docs: true + })).rows, + 'doc' + )) + })) + } else { + return [] + } } diff --git a/lib/delete-branches.js b/lib/delete-branches.js index 055a0aac..7070951f 100644 --- a/lib/delete-branches.js +++ b/lib/delete-branches.js @@ -1,66 +1,26 @@ -const _ = require('lodash') -const semver = require('semver') - const dbs = require('../lib/dbs') const updatedAt = require('../lib/updated-at') const githubQueue = require('./github-queue') module.exports = async function ( { installationId, fullName, repositoryId }, - { change, after, dependency, dependencyType, groupName } + branch ) { - if (change !== 'removed' && !semver.validRange(after)) return const { repositories } = await dbs() - const [owner, repo] = fullName.split('/') + console.log('branch to be deleted', branch) + if (!branch) return + let referenceDeleted = false + try { + // TODO: check if modified + await githubQueue(installationId).write(github => github.gitdata.deleteReference({ + owner, + repo, + ref: `heads/${branch.head}` + })) + referenceDeleted = true + } catch (e) {} + updatedAt(Object.assign(branch, { referenceDeleted })) - // TODO: this would return multiple branch docs, possibly for irrelevant groups, - // or if a group is specified, for irrelevant root level package.json - const branches = _.map( - (await repositories.query('branch_by_dependency', { - key: [repositoryId, dependency, dependencyType], - include_docs: true - })).rows, - 'doc' - ) - - const branchDocs = await Promise.all( - _(branches) - .filter( - branch => - // include branch if dependency was removed - change === 'removed' || - // include branch if update version satisfies branch version (branch is outdated) - semver.satisfies(branch.version, after) || - // include branch if is not satisfied, but later (eg. update is an out of range major update) - semver.ltr(branch.version, after) - ) - .filter( - branch => { - // if groupName is passed in, only include branches of that group - // branch.head = 'greenkeeper/${groupName}/${dependency}' - if (groupName) { - return branch.head.includes(`greenkeeper/${groupName}/`) - } else { - // If there's no groupName, only return branches that don’t belong to groups - return branch.head.includes(`greenkeeper/${dependency}`) - } - }) - .map(async branch => { - let referenceDeleted = false - try { - // TODO: check if modified - await githubQueue(installationId).write(github => github.gitdata.deleteReference({ - owner, - repo, - ref: `heads/${branch.head}` - })) - referenceDeleted = true - } catch (e) {} - return updatedAt(Object.assign(branch, { referenceDeleted })) - }) - .value() - ) - - return repositories.bulkDocs(branchDocs) + return repositories.bulkDocs([branch]) } diff --git a/lib/diff-greenkeeper-json.js b/lib/diff-greenkeeper-json.js new file mode 100644 index 00000000..30ede803 --- /dev/null +++ b/lib/diff-greenkeeper-json.js @@ -0,0 +1,16 @@ +const _ = require('lodash') + +module.exports = function (oldFile, newFile) { + const changes = {added: [], removed: [], modified: []} + if (!newFile || !oldFile) return changes + // new groups added + _.set(changes, 'added', _.difference(_.keys(newFile.groups), _.keys(oldFile.groups))) + // groups removed + _.set(changes, 'removed', _.difference(_.keys(oldFile.groups), _.keys(newFile.groups))) + // groups modified + _.set(changes, 'modified', _.compact(_.map(oldFile.groups, (group, key) => { + if (newFile.groups[key] && !_.isEqual(group.packages, newFile.groups[key].packages)) return key + }))) + + return changes +} diff --git a/test/jobs/github-event/push.js b/test/jobs/github-event/push.js index 43f762ad..3669f52f 100644 --- a/test/jobs/github-event/push.js +++ b/test/jobs/github-event/push.js @@ -971,14 +971,17 @@ describe('github-event push', async () => { /* TODO: Deletions: - - [ ] group deleted -> should delete all group’s branches - - [ ] file in group deleted -> should delete all group’s branches + - [x] group deleted -> should delete all group’s branches + - [x] file in group deleted -> should delete all group’s branches - [x] dependency in file in group deleted -> should delete all group’s branches Additions: - - [ ] group added -> should return create-initial-group-branch job - - [ ] file in group added -> should delete all group’s branches, create-initial-group-branch job - - [ ] dependency in file in group added -> nothing should happen except package.json update - Modifications ? + - [x] group added -> should return create-initial-group-branch job + - [x] file in group added -> should delete all group’s branches, create-initial-group-branch job + - [x] dependency in file in group added -> + Modifications: + - [x] group renamed -> delete all branches & new initial subgroup branch + - [x] package.json renamed -> delete all branches & new initial subgroup branch + - [x] package.json moved to another group -> delete all branches & new initial subgroup branch */ test('monorepo: 2 package.jsons in 2 groups with existing branches (777)', async () => { const configFileContent = { @@ -1399,7 +1402,7 @@ describe('github-event push', async () => { expect(repo.headSha).toEqual('9049f1265b7d61be4a8904a9a27120d2064dab3b') }) - test.only('monorepo: file in group deleted with existing branches (888)', async () => { + test('monorepo: file in group deleted with existing branches (888)', async () => { const configFileContent = { groups: { frontend: { @@ -1590,6 +1593,1343 @@ describe('github-event push', async () => { const frontend = await repositories.get('888:branch:1234abca') expect(frontend.referenceDeleted).toBeTruthy() const backend = await repositories.get('888:branch:1234abcb') + expect(backend.referenceDeleted).toBeFalsy() + expect(repo.headSha).toEqual('9049f1265b7d61be4a8904a9a27120d2064dab3b') + }) + + test('monorepo: group deleted with existing branches (999)', async () => { + const configFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json', + 'packages/lalalalala/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + + const newConfigFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json', + 'packages/lalalalala/package.json' + ] + } + } + } + + const { repositories } = await dbs() + + await Promise.all([ + repositories.bulkDocs([ + { + _id: '999', + fullName: 'hans/monorepo', + accountId: '321', + enabled: true, + headSha: 'hallo', + packages: { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/backend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/lalalalala/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + }, + greenkeeper: configFileContent + }, + { + _id: '999:branch:1234abca', + type: 'branch', + sha: '1234abcd', + repositoryId: '999', + version: '0.9.1', + dependency: 'lodash', + dependencyType: 'dependencies', + head: 'greenkeeper/frontend/lodash-2.0.0' + }, + { + _id: '999:branch:1234abcb', + type: 'branch', + sha: '1234abcd', + repositoryId: '999', + version: '0.9.1', + dependency: 'lodash', + dependencyType: 'dependencies', + head: 'greenkeeper/backend/lodash-2.0.0' + } + ]) + ]) + + const githubPush = requireFresh(pathToWorker) + + nock('https://api.github.com') + .post('/installations/11/access_tokens') + .reply(200, { + token: 'secret' + }) + .get('/rate_limit') + .reply(200, {}) + .get('/repos/hans/monorepo/contents/greenkeeper.json') + .reply(200, { + type: 'file', + path: 'greenkeeper.json', + name: 'greenkeeper.json', + content: Buffer.from(JSON.stringify(newConfigFileContent)).toString('base64') + }) + .get('/repos/hans/monorepo/contents/packages/frontend/package.json') + .reply(200, { + path: 'packages/frontend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .get('/repos/hans/monorepo/contents/packages/lalalalala/package.json') + .reply(200, { + path: 'packages/lalalalala/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .get('/repos/hans/monorepo/contents/packages/backend/package.json') + .reply(404, {}) + .delete('/repos/hans/monorepo/git/refs/heads/greenkeeper/backend/lodash-2.0.0') + .reply(200, {}) + .delete('/repos/hans/monorepo/git/refs/heads/greenkeeper/frontend/lodash-2.0.0') + .reply(200, () => { + // should not delete this one + expect(true).toBeFalsy() + return {} + }) + + const newJob = await githubPush({ + installation: { + id: 11 + }, + ref: 'refs/heads/master', + after: '9049f1265b7d61be4a8904a9a27120d2064dab3b', + head_commit: {}, + commits: [ + { + added: [], + removed: ['packages/backend/package.json'], + modified: ['greenkeeper.json'] + } + ], + repository: { + id: '999', + full_name: 'hans/monorepo', + name: 'test', + owner: { + login: 'hans' + }, + default_branch: 'master' + } + }) + + expect(newJob).toBeFalsy() + + const repo = await repositories.get('999') + const expectedFiles = { + 'package.json': [ 'packages/frontend/package.json', 'packages/lalalalala/package.json' ], + 'package-lock.json': [], + 'yarn.lock': [], + 'npm-shrinkwrap.json': [] + } + const expectedPackages = { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/lalalalala/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + } + + expect(repo.files).toMatchObject(expectedFiles) + expect(repo.packages).toMatchObject(expectedPackages) + expect(repo.greenkeeper).toMatchObject(newConfigFileContent) + const frontend = await repositories.get('999:branch:1234abca') + expect(frontend.referenceDeleted).toBeFalsy() + const backend = await repositories.get('999:branch:1234abcb') + expect(backend.referenceDeleted).toBeTruthy() + expect(repo.headSha).toEqual('9049f1265b7d61be4a8904a9a27120d2064dab3b') + }) + + test('monorepo: group added (1111)', async () => { + const configFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json', + 'packages/lalalalala/package.json' + ] + } + } + } + + const newConfigFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json', + 'packages/lalalalala/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + + const { repositories } = await dbs() + + await repositories.put( + { + _id: '1111', + fullName: 'hans/monorepo', + accountId: '321', + enabled: true, + headSha: 'hallo', + packages: { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/lalalalala/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + }, + greenkeeper: configFileContent + } + ) + + const githubPush = requireFresh(pathToWorker) + + nock('https://api.github.com') + .post('/installations/11/access_tokens') + .reply(200, { + token: 'secret' + }) + .get('/rate_limit') + .reply(200, {}) + .get('/repos/hans/monorepo/contents/greenkeeper.json') + .reply(200, { + type: 'file', + path: 'greenkeeper.json', + name: 'greenkeeper.json', + content: Buffer.from(JSON.stringify(newConfigFileContent)).toString('base64') + }) + .get('/repos/hans/monorepo/contents/packages/frontend/package.json') + .reply(200, { + path: 'packages/frontend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .get('/repos/hans/monorepo/contents/packages/lalalalala/package.json') + .reply(200, { + path: 'packages/lalalalala/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .get('/repos/hans/monorepo/contents/packages/backend/package.json') + .reply(200, { + path: 'packages/backend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + + const newJob = await githubPush({ + installation: { + id: 11 + }, + ref: 'refs/heads/master', + after: '9049f1265b7d61be4a8904a9a27120d2064dab3b', + head_commit: {}, + commits: [ + { + added: ['packages/backend/package.json'], + removed: [], + modified: ['greenkeeper.json'] + } + ], + repository: { + id: '1111', + full_name: 'hans/monorepo', + name: 'test', + owner: { + login: 'hans' + }, + default_branch: 'master' + } + }) + + expect(newJob).toBeTruthy() + expect(newJob).toHaveLength(1) + const job = newJob[0].data + expect(job.name).toEqual('create-initial-subgroup-branch') + expect(job.accountId).toEqual('321') + expect(job.repositoryId).toEqual('1111') + + const repo = await repositories.get('1111') + const expectedFiles = { + 'package.json': [ 'packages/frontend/package.json', 'packages/lalalalala/package.json', 'packages/backend/package.json' ], + 'package-lock.json': [], + 'yarn.lock': [], + 'npm-shrinkwrap.json': [] + } + const expectedPackages = { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/lalalalala/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/backend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + } + expect(repo.files).toMatchObject(expectedFiles) + expect(repo.packages).toMatchObject(expectedPackages) + expect(repo.greenkeeper).toMatchObject(newConfigFileContent) + expect(repo.headSha).toEqual('9049f1265b7d61be4a8904a9a27120d2064dab3b') + }) + + test('monorepo: file in group added with existing branches (1112)', async () => { + const configFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + + const newConfigFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json', + 'packages/lalalalala/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + + const { repositories } = await dbs() + + await Promise.all([ + repositories.bulkDocs([ + { + _id: '1112', + fullName: 'hans/monorepo', + accountId: '321', + enabled: true, + headSha: 'hallo', + packages: { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/backend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + }, + greenkeeper: configFileContent + }, + { + _id: '1112:branch:1234abca', + type: 'branch', + sha: '1234abcd', + repositoryId: '1112', + version: '0.9.1', + dependency: 'lodash', + dependencyType: 'dependencies', + head: 'greenkeeper/frontend/lodash-2.0.0' + }, + { + _id: '1112:branch:1234abcb', + type: 'branch', + sha: '1234abcd', + repositoryId: '1112', + version: '0.9.1', + dependency: 'lodash', + dependencyType: 'dependencies', + head: 'greenkeeper/backend/lodash-2.0.0' + } + ]) + ]) + + const githubPush = requireFresh(pathToWorker) + + nock('https://api.github.com') + .post('/installations/11/access_tokens') + .reply(200, { + token: 'secret' + }) + .get('/rate_limit') + .reply(200, {}) + .get('/repos/hans/monorepo/contents/greenkeeper.json') + .reply(200, { + type: 'file', + path: 'greenkeeper.json', + name: 'greenkeeper.json', + content: Buffer.from(JSON.stringify(newConfigFileContent)).toString('base64') + }) + .get('/repos/hans/monorepo/contents/packages/frontend/package.json') + .reply(200, { + path: 'packages/frontend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .get('/repos/hans/monorepo/contents/packages/lalalalala/package.json') + .reply(200, { + path: 'packages/lalalalala/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .get('/repos/hans/monorepo/contents/packages/backend/package.json') + .reply(200, { + path: 'packages/backend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .delete('/repos/hans/monorepo/git/refs/heads/greenkeeper/frontend/lodash-2.0.0') + .reply(200, {}) + .delete('/repos/hans/monorepo/git/refs/heads/greenkeeper/backend/lodash-2.0.0') + .reply(200, () => { + // should not delete this one + expect(true).toBeFalsy() + return {} + }) + + const newJob = await githubPush({ + installation: { + id: 11 + }, + ref: 'refs/heads/master', + after: '9049f1265b7d61be4a8904a9a27120d2064dab3b', + head_commit: {}, + commits: [ + { + added: ['packages/lalalalala/package.json'], + removed: [], + modified: ['greenkeeper.json'] + } + ], + repository: { + id: '1112', + full_name: 'hans/monorepo', + name: 'test', + owner: { + login: 'hans' + }, + default_branch: 'master' + } + }) + + expect(newJob).toBeTruthy() + expect(newJob).toHaveLength(1) + const job = newJob[0].data + expect(job.name).toEqual('create-initial-subgroup-branch') + expect(job.accountId).toEqual('321') + expect(job.repositoryId).toEqual('1112') + + const repo = await repositories.get('1112') + const expectedFiles = { + 'package.json': [ 'packages/frontend/package.json', 'packages/lalalalala/package.json', 'packages/backend/package.json' ], + 'package-lock.json': [], + 'yarn.lock': [], + 'npm-shrinkwrap.json': [] + } + const expectedPackages = { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/lalalalala/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/backend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + } + expect(repo.files).toMatchObject(expectedFiles) + expect(repo.packages).toMatchObject(expectedPackages) + expect(repo.greenkeeper).toMatchObject(newConfigFileContent) + const frontend = await repositories.get('1112:branch:1234abca') + expect(frontend.referenceDeleted).toBeTruthy() + const backend = await repositories.get('1112:branch:1234abcb') + expect(backend.referenceDeleted).toBeFalsy() + expect(repo.headSha).toEqual('9049f1265b7d61be4a8904a9a27120d2064dab3b') + }) + + test('monorepo: dependency in file in group added with existing branches (1113)', async () => { + const configFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + + const { repositories } = await dbs() + + await Promise.all([ + repositories.bulkDocs([ + { + _id: '1113', + fullName: 'hans/monorepo', + accountId: '321', + enabled: true, + headSha: 'hallo', + packages: { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/backend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + }, + greenkeeper: configFileContent + }, + { + _id: '1113:branch:1234abca', + type: 'branch', + sha: '1234abcd', + repositoryId: '1113', + version: '0.9.1', + dependency: 'lodash', + dependencyType: 'dependencies', + head: 'greenkeeper/frontend/lodash-2.0.0' + }, + { + _id: '1113:branch:1234abcb', + type: 'branch', + sha: '1234abcd', + repositoryId: '1113', + version: '0.9.1', + dependency: 'lodash', + dependencyType: 'dependencies', + head: 'greenkeeper/backend/lodash-2.0.0' + } + ]) + ]) + + const githubPush = requireFresh(pathToWorker) + + nock('https://api.github.com') + .post('/installations/11/access_tokens') + .reply(200, { + token: 'secret' + }) + .get('/rate_limit') + .reply(200, {}) + .get('/repos/hans/monorepo/contents/greenkeeper.json') + .reply(200, { + type: 'file', + path: 'greenkeeper.json', + name: 'greenkeeper.json', + content: Buffer.from(JSON.stringify(configFileContent)).toString('base64') + }) + .get('/repos/hans/monorepo/contents/packages/frontend/package.json') + .reply(200, { + path: 'packages/frontend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0', + react: '1.0.1' + } + }) + }) + .get('/repos/hans/monorepo/contents/packages/backend/package.json') + .reply(200, { + path: 'packages/backend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .delete('/repos/hans/monorepo/git/refs/heads/greenkeeper/frontend/lodash-2.0.0') + .reply(200, () => { + // should not delete this one + expect(true).toBeFalsy() + return {} + }) + .delete('/repos/hans/monorepo/git/refs/heads/greenkeeper/backend/lodash-2.0.0') + .reply(200, () => { + // should not delete this one + expect(true).toBeFalsy() + return {} + }) + + const newJob = await githubPush({ + installation: { + id: 11 + }, + ref: 'refs/heads/master', + after: '9049f1265b7d61be4a8904a9a27120d2064dab3b', + head_commit: {}, + commits: [ + { + added: [], + removed: [], + modified: ['packages/frontend/package.json'] + } + ], + repository: { + id: '1113', + full_name: 'hans/monorepo', + name: 'test', + owner: { + login: 'hans' + }, + default_branch: 'master' + } + }) + + expect(newJob).toBeFalsy() + + const repo = await repositories.get('1113') + const expectedFiles = { + 'package.json': [ 'packages/frontend/package.json', 'packages/backend/package.json' ], + 'package-lock.json': [], + 'yarn.lock': [], + 'npm-shrinkwrap.json': [] + } + const expectedPackages = { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0', + react: '1.0.1' + } + }, + 'packages/backend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + } + expect(repo.files).toMatchObject(expectedFiles) + expect(repo.packages).toMatchObject(expectedPackages) + expect(repo.greenkeeper).toMatchObject(configFileContent) + const frontend = await repositories.get('1113:branch:1234abca') + expect(frontend.referenceDeleted).toBeFalsy() + const backend = await repositories.get('1113:branch:1234abcb') + expect(backend.referenceDeleted).toBeFalsy() + expect(repo.headSha).toEqual('9049f1265b7d61be4a8904a9a27120d2064dab3b') + }) + + test('monorepo: group renamed with existing branches (1114)', async () => { + const configFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + + const newConfigFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json' + ] + }, + newCoolName: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + + const { repositories } = await dbs() + + await Promise.all([ + repositories.bulkDocs([ + { + _id: '1114', + fullName: 'hans/monorepo', + accountId: '321', + enabled: true, + headSha: 'hallo', + packages: { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/backend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + }, + greenkeeper: configFileContent + }, + { + _id: '1114:branch:1234abca', + type: 'branch', + sha: '1234abcd', + repositoryId: '1114', + version: '0.9.1', + dependency: 'lodash', + dependencyType: 'dependencies', + head: 'greenkeeper/frontend/lodash-2.0.0' + }, + { + _id: '1114:branch:1234abcb', + type: 'branch', + sha: '1234abcd', + repositoryId: '1114', + version: '0.9.1', + dependency: 'lodash', + dependencyType: 'dependencies', + head: 'greenkeeper/backend/lodash-2.0.0' + } + ]) + ]) + + const githubPush = requireFresh(pathToWorker) + + nock('https://api.github.com') + .post('/installations/11/access_tokens') + .reply(200, { + token: 'secret' + }) + .get('/rate_limit') + .reply(200, {}) + .get('/repos/hans/monorepo/contents/greenkeeper.json') + .reply(200, { + type: 'file', + path: 'greenkeeper.json', + name: 'greenkeeper.json', + content: Buffer.from(JSON.stringify(newConfigFileContent)).toString('base64') + }) + .get('/repos/hans/monorepo/contents/packages/frontend/package.json') + .reply(200, { + path: 'packages/frontend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .get('/repos/hans/monorepo/contents/packages/backend/package.json') + .reply(200, { + path: 'packages/backend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .delete('/repos/hans/monorepo/git/refs/heads/greenkeeper/backend/lodash-2.0.0') + .reply(200, {}) + .delete('/repos/hans/monorepo/git/refs/heads/greenkeeper/frontend/lodash-2.0.0') + .reply(200, () => { + // should not delete this one + expect(true).toBeFalsy() + return {} + }) + + const newJob = await githubPush({ + installation: { + id: 11 + }, + ref: 'refs/heads/master', + after: '9049f1265b7d61be4a8904a9a27120d2064dab3b', + head_commit: {}, + commits: [ + { + added: [], + removed: [], + modified: ['greenkeeper.json'] + } + ], + repository: { + id: '1114', + full_name: 'hans/monorepo', + name: 'test', + owner: { + login: 'hans' + }, + default_branch: 'master' + } + }) + + expect(newJob).toBeTruthy() + expect(newJob).toHaveLength(1) + const job = newJob[0].data + expect(job.name).toEqual('create-initial-subgroup-branch') + expect(job.accountId).toEqual('321') + expect(job.repositoryId).toEqual('1114') + + const repo = await repositories.get('1114') + const expectedFiles = { + 'package.json': [ 'packages/frontend/package.json', 'packages/backend/package.json' ], + 'package-lock.json': [], + 'yarn.lock': [], + 'npm-shrinkwrap.json': [] + } + const expectedPackages = { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/backend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + } + expect(repo.files).toMatchObject(expectedFiles) + expect(repo.packages).toMatchObject(expectedPackages) + expect(repo.greenkeeper).toMatchObject(newConfigFileContent) + const frontend = await repositories.get('1114:branch:1234abca') + expect(frontend.referenceDeleted).toBeFalsy() + const backend = await repositories.get('1114:branch:1234abcb') + expect(backend.referenceDeleted).toBeTruthy() + expect(repo.headSha).toEqual('9049f1265b7d61be4a8904a9a27120d2064dab3b') + }) + + test('monorepo: package.json renamed with existing branches (1115)', async () => { + const configFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json' + ] + }, + backend: { + packages: [ + 'packages/app/package.json' + ] + } + } + } + + const newConfigFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + + const { repositories } = await dbs() + + await Promise.all([ + repositories.bulkDocs([ + { + _id: '1115', + fullName: 'hans/monorepo', + accountId: '321', + enabled: true, + headSha: 'hallo', + packages: { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/app/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + }, + greenkeeper: configFileContent + }, + { + _id: '1115:branch:1234abca', + type: 'branch', + sha: '1234abcd', + repositoryId: '1115', + version: '0.9.1', + dependency: 'lodash', + dependencyType: 'dependencies', + head: 'greenkeeper/frontend/lodash-2.0.0' + }, + { + _id: '1115:branch:1234abcb', + type: 'branch', + sha: '1234abcd', + repositoryId: '1115', + version: '0.9.1', + dependency: 'lodash', + dependencyType: 'dependencies', + head: 'greenkeeper/backend/lodash-2.0.0' + } + ]) + ]) + + const githubPush = requireFresh(pathToWorker) + + nock('https://api.github.com') + .post('/installations/11/access_tokens') + .reply(200, { + token: 'secret' + }) + .get('/rate_limit') + .reply(200, {}) + .get('/repos/hans/monorepo/contents/greenkeeper.json') + .reply(200, { + type: 'file', + path: 'greenkeeper.json', + name: 'greenkeeper.json', + content: Buffer.from(JSON.stringify(newConfigFileContent)).toString('base64') + }) + .get('/repos/hans/monorepo/contents/packages/frontend/package.json') + .reply(200, { + path: 'packages/frontend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .get('/repos/hans/monorepo/contents/packages/backend/package.json') + .reply(200, { + path: 'packages/backend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .delete('/repos/hans/monorepo/git/refs/heads/greenkeeper/backend/lodash-2.0.0') + .reply(200, {}) + .delete('/repos/hans/monorepo/git/refs/heads/greenkeeper/frontend/lodash-2.0.0') + .reply(200, () => { + // should not delete this one + expect(true).toBeFalsy() + return {} + }) + + const newJob = await githubPush({ + installation: { + id: 11 + }, + ref: 'refs/heads/master', + after: '9049f1265b7d61be4a8904a9a27120d2064dab3b', + head_commit: {}, + commits: [ + { + added: [], + removed: [], + modified: ['greenkeeper.json'] + } + ], + repository: { + id: '1115', + full_name: 'hans/monorepo', + name: 'test', + owner: { + login: 'hans' + }, + default_branch: 'master' + } + }) + + expect(newJob).toBeTruthy() + expect(newJob).toHaveLength(1) + const job = newJob[0].data + expect(job.name).toEqual('create-initial-subgroup-branch') + expect(job.accountId).toEqual('321') + expect(job.repositoryId).toEqual('1115') + + const repo = await repositories.get('1115') + const expectedFiles = { + 'package.json': [ 'packages/frontend/package.json', 'packages/backend/package.json' ], + 'package-lock.json': [], + 'yarn.lock': [], + 'npm-shrinkwrap.json': [] + } + const expectedPackages = { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/backend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + } + expect(repo.files).toMatchObject(expectedFiles) + expect(repo.packages).toMatchObject(expectedPackages) + expect(repo.greenkeeper).toMatchObject(newConfigFileContent) + const frontend = await repositories.get('1115:branch:1234abca') + expect(frontend.referenceDeleted).toBeFalsy() + const backend = await repositories.get('1115:branch:1234abcb') + expect(backend.referenceDeleted).toBeTruthy() + expect(repo.headSha).toEqual('9049f1265b7d61be4a8904a9a27120d2064dab3b') + }) + + test('monorepo: package.json moved to another group (1116)', async () => { + const configFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json', + 'packages/app/package.json' + ] + }, + backend: { + packages: [ + 'packages/app/package.json' + ] + } + } + } + + const newConfigFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json', + 'packages/app/package.json' + ] + } + } + } + + const { repositories } = await dbs() + + await Promise.all([ + repositories.bulkDocs([ + { + _id: '1116', + fullName: 'hans/monorepo', + accountId: '321', + enabled: true, + headSha: 'hallo', + packages: { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/backend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/app/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + }, + greenkeeper: configFileContent + }, + { + _id: '1116:branch:1234abca', + type: 'branch', + sha: '1234abcd', + repositoryId: '1116', + version: '0.9.1', + dependency: 'lodash', + dependencyType: 'dependencies', + head: 'greenkeeper/frontend/lodash-2.0.0' + }, + { + _id: '1116:branch:1234abcb', + type: 'branch', + sha: '1234abcd', + repositoryId: '1116', + version: '0.9.1', + dependency: 'lodash', + dependencyType: 'dependencies', + head: 'greenkeeper/backend/lodash-2.0.0' + } + ]) + ]) + + const githubPush = requireFresh(pathToWorker) + + nock('https://api.github.com') + .post('/installations/11/access_tokens') + .reply(200, { + token: 'secret' + }) + .get('/rate_limit') + .reply(200, {}) + .get('/repos/hans/monorepo/contents/greenkeeper.json') + .reply(200, { + type: 'file', + path: 'greenkeeper.json', + name: 'greenkeeper.json', + content: Buffer.from(JSON.stringify(newConfigFileContent)).toString('base64') + }) + .get('/repos/hans/monorepo/contents/packages/frontend/package.json') + .reply(200, { + path: 'packages/frontend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .get('/repos/hans/monorepo/contents/packages/app/package.json') + .reply(200, { + path: 'packages/app/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .get('/repos/hans/monorepo/contents/packages/backend/package.json') + .reply(200, { + path: 'packages/backend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .delete('/repos/hans/monorepo/git/refs/heads/greenkeeper/backend/lodash-2.0.0') + .reply(200, {}) + .delete('/repos/hans/monorepo/git/refs/heads/greenkeeper/frontend/lodash-2.0.0') + .reply(200, {}) + + const newJob = await githubPush({ + installation: { + id: 11 + }, + ref: 'refs/heads/master', + after: '9049f1265b7d61be4a8904a9a27120d2064dab3b', + head_commit: {}, + commits: [ + { + added: [], + removed: [], + modified: ['greenkeeper.json'] + } + ], + repository: { + id: '1116', + full_name: 'hans/monorepo', + name: 'test', + owner: { + login: 'hans' + }, + default_branch: 'master' + } + }) + + expect(newJob).toBeTruthy() + expect(newJob).toHaveLength(1) + const job = newJob[0].data + expect(job.name).toEqual('create-initial-subgroup-branch') + expect(job.groupName).toEqual('backend') + expect(job.accountId).toEqual('321') + expect(job.repositoryId).toEqual('1116') + + const repo = await repositories.get('1116') + const expectedFiles = { + 'package.json': [ 'packages/frontend/package.json', 'packages/backend/package.json', 'packages/app/package.json' ], + 'package-lock.json': [], + 'yarn.lock': [], + 'npm-shrinkwrap.json': [] + } + const expectedPackages = { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/app/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/backend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + } + expect(repo.files).toMatchObject(expectedFiles) + expect(repo.packages).toMatchObject(expectedPackages) + expect(repo.greenkeeper).toMatchObject(newConfigFileContent) + const frontend = await repositories.get('1116:branch:1234abca') + expect(frontend.referenceDeleted).toBeTruthy() + const backend = await repositories.get('1116:branch:1234abcb') expect(backend.referenceDeleted).toBeTruthy() expect(repo.headSha).toEqual('9049f1265b7d61be4a8904a9a27120d2064dab3b') }) @@ -1600,7 +2940,14 @@ describe('github-event push', async () => { await removeIfExists(repositories, '444', '444A', '445', '445A', '446', '447', '448', '444:branch:1234abcd', '444:branch:1234abce', '444A:branch:1234abcd', '444A:branch:1234abce', '555', '666', '777', '777:branch:1234abcd', '777:branch:1234abce', '777:branch:1234abcf', '777:branch:1234abcg', '777A', '777A:branch:1234abca', '777A:branch:1234abcb', '777A:branch:1234abcc', - '888', '888:branch:1234abca', '888:branch:1234abcb') + '888', '888:branch:1234abca', '888:branch:1234abcb', + '999', '999:branch:1234abca', '999:branch:1234abcb', + '1111', + '1112', '1112:branch:1234abca', '1112:branch:1234abcb', + '1113', '1113:branch:1234abca', '1113:branch:1234abcb', + '1114', '1114:branch:1234abca', '1114:branch:1234abcb', + '1115', '1115:branch:1234abca', '1115:branch:1234abcb', + '1116', '1116:branch:1234abca', '1116:branch:1234abcb') await removeIfExists(payments, '123') }) }) diff --git a/test/lib/diff-greenkeeper-json.js b/test/lib/diff-greenkeeper-json.js new file mode 100644 index 00000000..5a7770b3 --- /dev/null +++ b/test/lib/diff-greenkeeper-json.js @@ -0,0 +1,210 @@ +const diff = require('../../lib/diff-greenkeeper-json') + +const oldFile = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json', + 'packages/lalalalala/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } +} + +test('no change', () => { + const newFile = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json', + 'packages/lalalalala/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + const expected = { + removed: [], + added: [], + modified: [] + } + expect(diff(oldFile, newFile)).toMatchObject(expected) +}) + +test('removed a group', () => { + const newFile = { + groups: { + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + + const expected = { + removed: ['frontend'], + added: [], + modified: [] + } + expect(diff(oldFile, newFile)).toMatchObject(expected) +}) + +test('removed 2 groups', () => { + const newFile = { + groups: { + soup: { + packages: [ + 'packages/soup/package.json' + ] + } + } + } + + const expected = { + removed: ['frontend', 'backend'], + added: ['soup'], + modified: [] + } + expect(diff(oldFile, newFile)).toMatchObject(expected) +}) + +test('added a group', () => { + const newFile = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json', + 'packages/lalalalala/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + }, + pizza: { + packages: [ + 'packages/pizza/package.json' + ] + } + } + } + const expected = { + added: ['pizza'], + removed: [], + modified: [] + } + expect(diff(oldFile, newFile)).toMatchObject(expected) +}) + +test('added a package.json to a group', () => { + const newFile = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json', + 'packages/lalalalala/package.json', + 'packages/pizza/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + const expected = { + modified: ['frontend'], + added: [], + removed: [] + } + expect(diff(oldFile, newFile)).toMatchObject(expected) +}) + +test('added and removed a package.json to a group', () => { + const newFile = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json', + 'packages/pizza/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + const expected = { + modified: ['frontend'], + added: [], + removed: [] + } + expect(diff(oldFile, newFile)).toMatchObject(expected) +}) + +test('added a group and moved a package.json to another group', () => { + const newFile = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json' + ] + }, + mobile: { + packages: [ + 'packages/mobile/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json', + 'packages/lalalalala/package.json' + ] + } + } + } + const expected = { + modified: ['frontend', 'backend'], + added: ['mobile'], + removed: [] + } + expect(diff(oldFile, newFile)).toMatchObject(expected) +}) + +test('renamed a group', () => { + const newFile = { + groups: { + pizza: { + packages: [ + 'packages/frontend/package.json', + 'packages/lalalalala/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + const expected = { + added: ['pizza'], + removed: ['frontend'], + modified: [] + } + expect(diff(oldFile, newFile)).toMatchObject(expected) +})