diff --git a/jobs/create-initial-subgroup-branch.js b/jobs/create-initial-subgroup-branch.js index b8071ae8..76229f6c 100644 --- a/jobs/create-initial-subgroup-branch.js +++ b/jobs/create-initial-subgroup-branch.js @@ -11,14 +11,13 @@ const getConfig = require('../lib/get-config') const createBranch = require('../lib/create-branch') const { updateRepoDoc } = require('../lib/repository-docs') const githubQueue = require('../lib/github-queue') -const { maybeUpdatePaymentsJob } = require('../lib/payments') const upsert = require('../lib/upsert') const registryUrl = env.NPM_REGISTRY // if we update dependencies find open PRs for that dependency and close the PRs by commit message -module.exports = async function ({ repositoryId, groupname }) { +module.exports = async function ({ repositoryId, groupName }) { const { installations, repositories, logs } = await dbs() const repoDoc = await repositories.get(repositoryId) const accountId = repoDoc.accountId @@ -28,42 +27,36 @@ module.exports = async function ({ repositoryId, groupname }) { log.info('started') - if (repoDoc.fork && !repoDoc.hasIssues) { // we should allways check if issues are disabled and exit - log.warn('exited: Issues disabled on fork') - return - } - await updateRepoDoc(installationId, repoDoc) - - // Object.keys(repoDoc.packages).length > 0 - if (!_.get(repoDoc, ['packages', 'package.json'])) { - log.warn('exited: No packages and package.json found') + const config = getConfig(repoDoc) + const pathsForGroup = config.groups[groupName].packages + if (_.isEmpty(pathsForGroup)) { + log.warn(`exited: No packages and package.json found for group: ${groupName}`) return } - await upsert(repositories, repoDoc._id, repoDoc) - const config = getConfig(repoDoc) - if (config.disabled) { - log.warn('exited: Greenkeeper is disabled for this repo in package.json') + const packageJsonFiles = _.get(repoDoc, ['packages']) + if (_.isEmpty(packageJsonFiles)) { + log.warn(`exited: No package.json files found`) return } - const pkg = _.get(repoDoc, ['packages', 'package.json']) // this is duplicated code (merge with L44) - if (!pkg) return + await upsert(repositories, repoDoc._id, repoDoc) const [owner, repo] = repoDoc.fullName.split('/') - await createDefaultLabel({ installationId, owner, repo, name: config.label }) - const registry = RegClient() const registryGet = promisify(registry.get.bind(registry)) // get for all package.jsons in a group // every package should be updated to the newest version - const dependencyMeta = _.flatten( - ['dependencies', 'devDependencies', 'optionalDependencies'].map(type => { - return _.map(pkg[type], (version, name) => ({ name, version, type })) - }) - ) - log.info('dependencies found', {parsedDependencies: dependencyMeta, packageJson: pkg}) + const dependencyMeta = _.uniqWith(_.flatten(pathsForGroup.map(path => { + return _.flatten( + ['dependencies', 'devDependencies', 'optionalDependencies'].map(type => { + return _.map(packageJsonFiles[path][type], (version, name) => ({ name, version, type })) + }) + ) + })), _.isEqual) + + log.info('dependencies found', {parsedDependencies: dependencyMeta, packageJsonFiles: packageJsonFiles}) let dependencies = await Promise.mapSeries(dependencyMeta, async dep => { try { dep.data = await registryGet(registryUrl + dep.name.replace('/', '%2F'), { @@ -78,7 +71,10 @@ module.exports = async function ({ repositoryId, groupname }) { .filter(Boolean) .map(dependency => { let latest = _.get(dependency, 'data.dist-tags.latest') - if (_.includes(config.ignore, dependency.name)) { + if ( + _.includes(config.ignore, dependency.name) || + _.includes(config.groups[groupName].ignore, dependency.name) + ) { dependencyActionsLog[dependency.name] = 'ignored in config' return } @@ -124,7 +120,6 @@ module.exports = async function ({ repositoryId, groupname }) { }) .filter(Boolean) .value() - log.info('parsed dependency actions', {dependencyActionsLog}) const ghRepo = await githubQueue(installationId).read(github => github.repos.get({ owner, repo })) // wrap in try/catch @@ -132,13 +127,12 @@ module.exports = async function ({ repositoryId, groupname }) { const branch = ghRepo.default_branch - const newBranch = config.branchPrefix + 'initial' + `-${groupname}` + const newBranch = config.branchPrefix + 'initial' + `-${groupName}` - let badgeAlreadyAdded = false // create a transform loop for all the package.json paths and push into the transforms array below - const transforms = [ - { - path: 'package.json', + const transforms = pathsForGroup.map(path => { + return { + path, message: 'chore(package): update dependencies', transform: oldPkg => { const oldPkgParsed = JSON.parse(oldPkg) @@ -152,7 +146,7 @@ module.exports = async function ({ repositoryId, groupname }) { return inplace.toString() } } - ] + }) const sha = await createBranch({ // try/catch installationId, @@ -163,55 +157,18 @@ module.exports = async function ({ repositoryId, groupname }) { transforms }) - if (!sha) { - // When there are no changes and the badge already exists we can enable right away - if (badgeAlreadyAdded) { - await upsert(repositories, repoDoc._id, { enabled: true }) - log.info('Repository silently enabled') - return maybeUpdatePaymentsJob(accountId, repoDoc.private) - } else { - log.error('Could not create initial branch') - throw new Error('Could not create initial branch') - } - } - - const depsUpdated = transforms[0].created - const travisModified = false - const badgeAdded = false + const depsUpdated = _.some(transforms, 'created') + if (!depsUpdated) return await upsert(repositories, `${repositoryId}:branch:${sha}`, { type: 'branch', - initial: true, + initial: false, // other flag? sha, base: branch, head: newBranch, processed: false, - depsUpdated, - travisModified, - badgeAdded + depsUpdated }) log.success('success') - - return { - delay: 30 * 60 * 1000, - data: { - name: 'initial-timeout-pr', - repositoryId, - accountId - } - } -} - -async function createDefaultLabel ({ installationId, name, owner, repo }) { - if (name !== false) { - try { - await githubQueue(installationId).write(github => github.issues.createLabel({ - owner, - repo, - name, - color: '00c775' - })) - } catch (e) {} - } } diff --git a/jobs/github-event/push.js b/jobs/github-event/push.js index 196d9d57..ac6f725c 100644 --- a/jobs/github-event/push.js +++ b/jobs/github-event/push.js @@ -65,9 +65,6 @@ module.exports = async function (data) { await updateDoc(repositories, repository, repoDoc) - console.log('oldPkg', oldPkg) - console.log('pkg', pkg) - if (!oldPkg) { return { data: { @@ -85,14 +82,10 @@ module.exports = async function (data) { // TODO: for Tuesday -> deleting a package.json needs to be detected!! const branches = await getDependencyBranchesForAllGroups({pkg, oldPkg, config, repositories, repositoryId}) const configChanges = diffGreenkeeperJson(config, repoDoc.greenkeeper) - console.log('configChanges', configChanges) - console.log('dependencyChanges', branches) const groupBranchesToDelete = await getGroupBranchesToDelete({configChanges, repositories, repositoryId}) - console.log('branches in push', branches) const allBranchesToDelete = branches.concat(groupBranchesToDelete) const _branches = _.uniqWith(_.flattenDeep(allBranchesToDelete), _.isEqual) - console.log('allBranchesToDelete flattend&uniq', _branches) await Promise.mapSeries( _branches, @@ -109,9 +102,7 @@ module.exports = async function (data) { return true } }) - console.log('relevantModifiedGroups', relevantModifiedGroups) const groupsToRecvieveInitialBranch = configChanges.added.concat(relevantModifiedGroups) - console.log('groupsToRecvieveInitialBranch', groupsToRecvieveInitialBranch) if (_.isEmpty(groupsToRecvieveInitialBranch)) return // create subgroup initial pr return _(groupsToRecvieveInitialBranch) @@ -160,7 +151,6 @@ function hasRelevantChanges (commits, files) { } async function disableRepo ({ repositories, repoDoc, repository }) { - // console.log('disableRepo') repoDoc.enabled = false await updateDoc(repositories, repository, repoDoc) if (!env.IS_ENTERPRISE) { @@ -180,7 +170,6 @@ async function getDependencyBranchesForAllGroups ({pkg, oldPkg, config, reposito } // this can only happen if a package.json was modified const dependencyDiff = diff(oldPkg[path], pkg[path], groupName) - console.log('dependencyDiff', dependencyDiff) if (!_.isEmpty(dependencyDiff)) { return getDependencyBranchesToDelete({changes: dependencyDiff, repositories, repositoryId}) } diff --git a/lib/branches-to-delete.js b/lib/branches-to-delete.js index b6dc5904..f40d98e9 100644 --- a/lib/branches-to-delete.js +++ b/lib/branches-to-delete.js @@ -8,7 +8,6 @@ module.exports = { } function getDependencyChanges (changes) { - console.log('changes in getDependencyChanges', changes) let dependencyChanges = [] _.each(changes, (type, dependencyType) => { @@ -37,7 +36,6 @@ async function getDependencyBranchesToDelete ({changes, repositories, repository } async function getSingleDependencyBranchesToDelete ({changes, repositories, repositoryId}) { - console.log('changes in getSingleDependencyBranchesToDelete', changes) const { change, after, dependency, dependencyType, groupName } = changes let branches = [] @@ -52,7 +50,6 @@ async function getSingleDependencyBranchesToDelete ({changes, repositories, repo })).rows, 'doc' ) - console.log('branches in getSingleDependencyBranchesToDelete', branches) return _(branches) .filter( branch => @@ -68,7 +65,6 @@ async function getSingleDependencyBranchesToDelete ({changes, repositories, repo // if groupName is passed in, only include branches of that group // branch.head = 'greenkeeper/${groupName}/${dependency}' if (groupName) { - console.log('has groupName', groupName) return branch.head.includes(`greenkeeper/${groupName}/`) } else { // If there's no groupName, only return branches that don’t belong to groups @@ -82,9 +78,7 @@ async function getGroupBranchesToDelete ({configChanges, repositories, repositor if (configChanges.removed.length || configChanges.modified.length) { const groups = _.uniq(configChanges.removed.concat(configChanges.modified)) // delete all branches for those groups - console.log('groups', groups) return Promise.all(_.map(groups, async (group) => { - console.log('map über group', group) return Promise.all(_.map( (await repositories.query('branch_by_group', { key: [repositoryId, group], diff --git a/lib/delete-branches.js b/lib/delete-branches.js index 7070951f..35160e82 100644 --- a/lib/delete-branches.js +++ b/lib/delete-branches.js @@ -8,7 +8,6 @@ module.exports = async function ( ) { const { repositories } = await dbs() const [owner, repo] = fullName.split('/') - console.log('branch to be deleted', branch) if (!branch) return let referenceDeleted = false try { diff --git a/test/jobs/create-initial-subgroup-branch.js b/test/jobs/create-initial-subgroup-branch.js new file mode 100644 index 00000000..7e9bb237 --- /dev/null +++ b/test/jobs/create-initial-subgroup-branch.js @@ -0,0 +1,162 @@ +const nock = require('nock') + +const dbs = require('../../lib/dbs') +const removeIfExists = require('../helpers/remove-if-exists') +// const { cleanCache } = require('../helpers/module-cache-helpers') + +nock.disableNetConnect() +nock.enableNetConnect('localhost') + +describe('create initial subgroup branch', () => { + beforeEach(() => { + jest.resetModules() + }) + + beforeAll(async () => { + const { installations } = await dbs() + + await installations.put({ + _id: '123', + installation: 37, + plan: 'free' + }) + }) + + test('create a subgroup pull request', async () => { + const { repositories } = await dbs() + const configFileContent = { + groups: { + frontend: { + packages: [ + 'packages/frontend/package.json', + 'packages/lalalalala/package.json' + ] + }, + backend: { + packages: [ + 'packages/backend/package.json' + ] + } + } + } + await repositories.put({ + _id: '1111', + fullName: 'hans/monorepo', + accountId: '123', + enabled: true, + headSha: 'hallo', + packages: { + 'packages/frontend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/backend/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }, + 'packages/lalalalala/package.json': { + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + } + }, + greenkeeper: configFileContent + }) + + // expect.assertions(10) + + nock('https://api.github.com') + .post('/installations/37/access_tokens') + .reply(200, { + token: 'secret' + }) + .get('/rate_limit') + .reply(200, {}) + .get('/repos/hans/monorepo/contents/greenkeeper.json') + .reply(200, { + type: 'file', + path: 'greenkeeper.json', + name: 'greenkeeper.json', + content: Buffer.from(JSON.stringify(configFileContent)).toString('base64') + }) + .get('/repos/hans/monorepo/contents/packages/frontend/package.json') + .reply(200, { + path: 'packages/frontend/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .get('/repos/hans/monorepo/contents/packages/lalalalala/package.json') + .reply(200, { + path: 'packages/lalalalala/package.json', + name: 'package.json', + content: encodePkg({ + name: 'testpkg', + dependencies: { + lodash: '^1.0.0' + } + }) + }) + .get('/repos/hans/monorepo') + .reply(200, { + default_branch: 'master' + }) + + nock('https://registry.npmjs.org') + .get('/lodash') + .reply(200, { + 'dist-tags': { + latest: '3.0.0-rc1' + }, + versions: { + '2.0.0-rc1': true, + '2.0.0-rc2': true, + '2.0.0': true, + '3.0.0-rc1': true, + '1.0.0': true + } + }) + + // mock relative dependencies + jest.mock('../../lib/create-branch', () => ({ transforms }) => { + transforms.forEach(t => { + const newPkg = JSON.parse( + t.transform(JSON.stringify({ dependencies: { lodash: '^1.0.0' } })) + ) + t.created = true + expect(newPkg.dependencies['lodash']).toEqual('^2.0.0') + }) + + return '1234abcd' + }) + const createInitialSubgroupBranch = require('../../jobs/create-initial-subgroup-branch') + + const newJob = await createInitialSubgroupBranch({repositoryId: 1111, groupName: 'frontend'}) + const newBranch = await repositories.get('1111:branch:1234abcd') + + expect(newJob).toBeFalsy() + expect(newBranch.type).toEqual('branch') + // expect(newBranch.initial).toBeTruthy() + }) + + afterAll(async () => { + const { installations, repositories } = await dbs() + await Promise.all([ + removeIfExists(installations, '123'), + removeIfExists(repositories, '1111', '1111:branch:1234abcd') + ]) + }) + + function encodePkg (pkg) { + return Buffer.from(JSON.stringify(pkg)).toString('base64') + } +}) diff --git a/test/jobs/github-event/push.js b/test/jobs/github-event/push.js index 3669f52f..d7537101 100644 --- a/test/jobs/github-event/push.js +++ b/test/jobs/github-event/push.js @@ -210,7 +210,6 @@ describe('github-event push', async () => { expect(newJob).toBeFalsy() const repo = await repositories.get('555') - console.log('repoDoc', repo) expect(repo.files['package.json'].length).toBeGreaterThan(0) expect(repo.files['package-lock.json'].length).toBeGreaterThan(0) expect(repo.files['npm-shrinkwrap.json']).toHaveLength(0) @@ -1152,7 +1151,6 @@ describe('github-event push', async () => { expect(newJob).toBeFalsy() const repo = await repositories.get('777') - console.log('repoDoc', repo) const expectedFiles = { 'package.json': [ 'packages/frontend/package.json', 'packages/backend/package.json' ], @@ -1362,7 +1360,6 @@ describe('github-event push', async () => { expect(newJob).toBeFalsy() const repo = await repositories.get('777A') - console.log('repoDoc', repo) const expectedFiles = { 'package.json': [ 'packages/frontend/package.json', 'packages/backend/package.json', 'packages/lalalalala/package.json' ],