From 67ea12f707652286a727c066a581e53757310ace Mon Sep 17 00:00:00 2001 From: Will Norris Date: Tue, 30 Mar 2021 11:53:58 -0700 Subject: [PATCH] refactor configuration loading logic Currently, logic for loading configuration files is split between bin/repolinter.js (for loading URLs) and index.js (for local files, directories, and the default config). This commit refactors all configuration loading logic into a new lib/config.js module, which handles loading and validating config files of all types (existing parseConfig and validateConfig functions were moved as-is). This will make it simpler to extend config loading logic, for example to support inheriting rulesets (#21). This does have minor changes on the error messages returned by the CLI in some cases. It also attempts to parse all config files as JSON first and then YAML, regardless of file extensions (this was previously different for remote versus local config files). This does not change anything in repolinter's programmatic API. --- bin/repolinter.js | 45 +-------- index.js | 136 +++----------------------- lib/config.js | 199 ++++++++++++++++++++++++++++++++++++++ tests/lib/config_tests.js | 103 ++++++++++++++++++++ tests/lib/default.json | 16 +++ tests/lib/repolinter.yaml | 2 + 6 files changed, 335 insertions(+), 166 deletions(-) create mode 100644 lib/config.js create mode 100644 tests/lib/config_tests.js create mode 100644 tests/lib/default.json create mode 100644 tests/lib/repolinter.yaml diff --git a/bin/repolinter.js b/bin/repolinter.js index 462eda82..71beabae 100755 --- a/bin/repolinter.js +++ b/bin/repolinter.js @@ -6,10 +6,8 @@ const repolinter = require('..') const rimraf = require('rimraf') const git = require('simple-git/promise')() /** @type {any} */ -const fetch = require('node-fetch') const fs = require('fs') const os = require('os') -const yaml = require('js-yaml') // eslint-disable-next-line no-unused-expressions require('yargs') @@ -65,47 +63,6 @@ require('yargs') }) }, async (/** @type {any} */ argv) => { - let rulesetParsed = null - let jsonerror - let yamlerror - // resolve the ruleset if a url is specified - if (argv.rulesetUrl) { - const res = await fetch(argv.rulesetUrl) - if (!res.ok) { - console.error( - `Failed to fetch config from ${argv.rulesetUrl} with status code ${res.status}` - ) - process.exitCode = 1 - return - } - const data = await res.text() - // attempt to parse as JSON - try { - rulesetParsed = JSON.parse(data) - } catch (e) { - jsonerror = e - } - // attempt to parse as YAML - if (!rulesetParsed) { - try { - rulesetParsed = yaml.safeLoad(data) - } catch (e) { - yamlerror = e - } - } - // throw an error if neither worked - if (!rulesetParsed) { - console.log(`Failed to fetch ruleset from URL ${argv.rulesetUrl}:`) - console.log( - `\tJSON failed with error ${jsonerror && jsonerror.toString()}` - ) - console.log( - `\tYAML failed with error ${yamlerror && yamlerror.toString()}` - ) - process.exitCode = 1 - return - } - } let tmpDir = null // temporarily clone a git repo to lint if (argv.git) { @@ -124,7 +81,7 @@ require('yargs') const output = await repolinter.lint( tmpDir || path.resolve(process.cwd(), argv.directory), argv.allowPaths, - rulesetParsed || argv.rulesetFile, + argv.rulesetUrl || argv.rulesetFile, argv.dryRun ) // create the output diff --git a/index.js b/index.js index 26d00408..d68d4157 100644 --- a/index.js +++ b/index.js @@ -3,13 +3,8 @@ /** @module repolinter */ -const jsonfile = require('jsonfile') -const Ajv = require('ajv') const path = require('path') -const findConfig = require('find-config') -const fs = require('fs') -const yaml = require('js-yaml') -// eslint-disable-next-line no-unused-vars +const config = require('./lib/config') const Result = require('./lib/result') const RuleInfo = require('./lib/ruleinfo') const FormatResult = require('./lib/formatresult') @@ -126,26 +121,18 @@ async function lint( let rulesetPath = null if (typeof ruleset === 'string') { - rulesetPath = path.resolve(targetDir, ruleset) + if (config.isAbsoluteURL(ruleset)) { + rulesetPath = ruleset + } else { + rulesetPath = path.resolve(targetDir, ruleset) + } } else if (!ruleset) { - rulesetPath = - findConfig('repolint.json', { cwd: targetDir }) || - findConfig('repolint.yaml', { cwd: targetDir }) || - findConfig('repolint.yml', { cwd: targetDir }) || - findConfig('repolinter.json', { cwd: targetDir }) || - findConfig('repolinter.yaml', { cwd: targetDir }) || - findConfig('repolinter.yml', { cwd: targetDir }) || - path.join(__dirname, 'rulesets/default.json') + rulesetPath = config.findConfig(targetDir) } + if (rulesetPath !== null) { - const extension = path.extname(rulesetPath) try { - const file = await fs.promises.readFile(rulesetPath, 'utf-8') - if (extension === '.yaml' || extension === '.yml') { - ruleset = yaml.safeLoad(file) - } else { - ruleset = JSON.parse(file) - } + ruleset = await config.loadConfig(rulesetPath) } catch (e) { return { params: { @@ -164,8 +151,9 @@ async function lint( } } } + // validate config - const val = await validateConfig(ruleset) + const val = await config.validateConfig(ruleset) if (!val.passed) { return { params: { @@ -184,7 +172,7 @@ async function lint( } } // parse it - const configParsed = parseConfig(ruleset) + const configParsed = config.parseConfig(ruleset) // determine axiom targets /** @ignore @type {Object.} */ let targetObj = {} @@ -488,106 +476,10 @@ async function determineTargets(axiomconfig, fs) { }, {}) } -/** - * Validate a repolint configuration against a known JSON schema - * - * @memberof repolinter - * @param {Object} config The configuration to validate - * @returns {Promise} - * A object representing or not the config validation succeeded (passed) - * an an error message if not (error) - */ -async function validateConfig(config) { - // compile the json schema - const ajvProps = new Ajv() - // find all json schemas - const parsedRuleSchemas = Promise.all( - Rules.map(rs => - jsonfile.readFile(path.resolve(__dirname, 'rules', `${rs}-config.json`)) - ) - ) - const parsedFixSchemas = Promise.all( - Fixes.map(f => - jsonfile.readFile(path.resolve(__dirname, 'fixes', `${f}-config.json`)) - ) - ) - const allSchemas = ( - await Promise.all([parsedFixSchemas, parsedRuleSchemas]) - ).reduce((a, c) => a.concat(c), []) - // load them into the validator - for (const schema of allSchemas) { - ajvProps.addSchema(schema) - } - const validator = ajvProps.compile( - await jsonfile.readFile(require.resolve('./rulesets/schema.json')) - ) - - // validate it against the supplied ruleset - if (!validator(config)) { - return { - passed: false, - error: `Configuration validation failed with errors: \n${validator.errors - .map(e => `\tconfiguration${e.dataPath} ${e.message}`) - .join('\n')}` - } - } else { - return { passed: true } - } -} - -/** - * Parse a JSON object config (with repolinter.json structure) and return a list - * of RuleInfo objects which will then be used to determine how to run the linter. - * - * @memberof repolinter - * @param {Object} config The repolinter.json config - * @returns {RuleInfo[]} The parsed rule data - */ -function parseConfig(config) { - // check to see if the config has a version marker - // parse modern config - if (config.version === 2) { - return Object.entries(config.rules).map( - ([name, cfg]) => - new RuleInfo( - name, - cfg.level, - cfg.where, - cfg.rule.type, - cfg.rule.options, - cfg.fix && cfg.fix.type, - cfg.fix && cfg.fix.options, - cfg.policyInfo, - cfg.policyUrl - ) - ) - } - // parse legacy config - // old format of "axiom": { "rule-name:rule-type": ["level", { "configvalue": false }]} - return ( - Object.entries(config.rules) - // get axioms - .map(([where, rules]) => { - // get the rules in each axiom - return Object.entries(rules).map(([rulename, configray]) => { - const [name, type] = rulename.split(':') - return new RuleInfo( - name, - configray[0], - where === 'all' ? [] : [where], - type || name, - configray[1] || {} - ) - }) - }) - .reduce((a, c) => a.concat(c)) - ) -} - module.exports.runRuleset = runRuleset module.exports.determineTargets = determineTargets -module.exports.validateConfig = validateConfig -module.exports.parseConfig = parseConfig +module.exports.validateConfig = config.validateConfig +module.exports.parseConfig = config.parseConfig module.exports.shouldRuleRun = shouldRuleRun module.exports.lint = lint module.exports.Result = Result diff --git a/lib/config.js b/lib/config.js new file mode 100644 index 00000000..396e157b --- /dev/null +++ b/lib/config.js @@ -0,0 +1,199 @@ +// Copyright 2017 TODO Group. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +const Ajv = require('ajv') +const fetch = require('node-fetch') +const findFile = require('find-config') +const fs = require('fs') +const jsonfile = require('jsonfile') +const path = require('path') +const yaml = require('js-yaml') + +const Rules = require('../rules/rules') +const RuleInfo = require('./ruleinfo') +const Fixes = require('../fixes/fixes') + +/** + * Determine if provided string is an absolute URL. That is, if it is + * parseable and has a 'host' URL component. + * + * @param {string} url string to test + * @returns {boolean} true if the string is an absolute URL + */ +function isAbsoluteURL(url) { + try { + const u = new URL(url) + if (u.host !== '') { + return true + } + } catch (e) {} + return false +} + +/** + * Find a repolinter config file in the specified directory. This looks for + * files named repolint or repolinter with a file extension of .json, .yaml, or + * .yml in the specified directory or the nearest ancestor directory. If no + * file is found, the default configuration that ships with repolinter is + * returned. + * + * @param {string} [directory] directory to search for config files in + * @returns {string} absolute path of configuration file + */ +function findConfig(directory) { + return ( + findFile('repolint.json', { cwd: directory }) || + findFile('repolint.yaml', { cwd: directory }) || + findFile('repolint.yml', { cwd: directory }) || + findFile('repolinter.json', { cwd: directory }) || + findFile('repolinter.yaml', { cwd: directory }) || + findFile('repolinter.yml', { cwd: directory }) || + path.join(__dirname, '../rulesets/default.json') + ) +} + +/** + * Load a ruleset config from the specified location. + * + * @param {string} configLocation A URL or local file containing a repolinter config file + * @returns {Object} The loaded repolinter json config + * @throws Will throw an error if unable to parse config or if config is invalid + */ +async function loadConfig(configLocation) { + if (!configLocation) { + throw new Error('must specify config location') + } + + let configData = null + if (isAbsoluteURL(configLocation)) { + const res = await fetch(configLocation) + if (!res.ok) { + throw new Error( + `Failed to fetch config from ${configLocation} with status code ${res.status}` + ) + } + configData = await res.text() + } else { + configData = await fs.promises.readFile(configLocation, 'utf-8') + } + + let ruleset + // try parsing as JSON, then YAML + try { + ruleset = JSON.parse(configData) + } catch (je) { + try { + ruleset = yaml.safeLoad(configData) + } catch (ye) { + throw new Error( + `unable to parse ${configLocation} as either JSON (error: ${je}) or YAML (error: ${ye})` + ) + } + } + + return ruleset +} + +/** + * Validate a repolint configuration against a known JSON schema + * + * @memberof repolinter + * @param {Object} config The configuration to validate + * @returns {Promise} + * A object representing or not the config validation succeeded (passed) + * or an error message if not (error) + */ +async function validateConfig(config) { + // compile the json schema + const ajvProps = new Ajv() + // find all json schemas + const parsedRuleSchemas = Promise.all( + Rules.map(rs => + jsonfile.readFile( + path.resolve(__dirname, '../rules', `${rs}-config.json`) + ) + ) + ) + const parsedFixSchemas = Promise.all( + Fixes.map(f => + jsonfile.readFile(path.resolve(__dirname, '../fixes', `${f}-config.json`)) + ) + ) + const allSchemas = ( + await Promise.all([parsedFixSchemas, parsedRuleSchemas]) + ).reduce((a, c) => a.concat(c), []) + // load them into the validator + for (const schema of allSchemas) { + ajvProps.addSchema(schema) + } + const validator = ajvProps.compile( + await jsonfile.readFile(require.resolve('../rulesets/schema.json')) + ) + + // validate it against the supplied ruleset + if (!validator(config)) { + return { + passed: false, + error: `Configuration validation failed with errors: \n${validator.errors + .map(e => `\tconfiguration${e.dataPath} ${e.message}`) + .join('\n')}` + } + } else { + return { passed: true } + } +} + +/** + * Parse a JSON object config (with repolinter.json structure) and return a list + * of RuleInfo objects which will then be used to determine how to run the linter. + * + * @memberof repolinter + * @param {Object} config The repolinter.json config + * @returns {RuleInfo[]} The parsed rule data + */ +function parseConfig(config) { + // check to see if the config has a version marker + // parse modern config + if (config.version === 2) { + return Object.entries(config.rules).map( + ([name, cfg]) => + new RuleInfo( + name, + cfg.level, + cfg.where, + cfg.rule.type, + cfg.rule.options, + cfg.fix && cfg.fix.type, + cfg.fix && cfg.fix.options, + cfg.policyInfo, + cfg.policyUrl + ) + ) + } + // parse legacy config + // old format of "axiom": { "rule-name:rule-type": ["level", { "configvalue": false }]} + return ( + Object.entries(config.rules) + // get axioms + .map(([where, rules]) => { + // get the rules in each axiom + return Object.entries(rules).map(([rulename, configray]) => { + const [name, type] = rulename.split(':') + return new RuleInfo( + name, + configray[0], + where === 'all' ? [] : [where], + type || name, + configray[1] || {} + ) + }) + }) + .reduce((a, c) => a.concat(c)) + ) +} + +module.exports.findConfig = findConfig +module.exports.isAbsoluteURL = isAbsoluteURL +module.exports.loadConfig = loadConfig +module.exports.validateConfig = validateConfig +module.exports.parseConfig = parseConfig diff --git a/tests/lib/config_tests.js b/tests/lib/config_tests.js new file mode 100644 index 00000000..f35caf3c --- /dev/null +++ b/tests/lib/config_tests.js @@ -0,0 +1,103 @@ +// Copyright 2017 TODO Group. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +const chai = require('chai') +const chaiAsPromised = require('chai-as-promised') +const expect = chai.expect +const path = require('path') +const fs = require('fs') +const ServerMock = require('mock-http-server') + +chai.use(chaiAsPromised) + +describe('lib', () => { + describe('config', function () { + const Config = require('../../lib/config') + + this.timeout(10000) + + describe('isAbsoluteURL', () => { + it('should identify absolute URLs', async () => { + expect(Config.isAbsoluteURL('http://example.com/')).to.equals(true) + expect(Config.isAbsoluteURL('https://example.com/')).to.equals(true) + expect(Config.isAbsoluteURL('ftp://example.com/')).to.equals(true) + }) + + it('should identify relative URLs', async () => { + expect(Config.isAbsoluteURL('foo')).to.equals(false) + expect(Config.isAbsoluteURL('/foo')).to.equals(false) + expect(Config.isAbsoluteURL('file:/foo')).to.equals(false) + expect(Config.isAbsoluteURL('file:///foo')).to.equals(false) + expect(Config.isAbsoluteURL('c:\\foo')).to.equals(false) + }) + }) + + describe('findConfig', () => { + it('should find config file in directory', async () => { + const localConfig = path.join(__dirname, 'repolinter.yaml') + expect(Config.findConfig(__dirname)).to.equals(localConfig) + }) + it('should return default file when no config present', async () => { + const parent = path.join(__dirname, '..') + const defaultConfig = path.join( + __dirname, + '../../rulesets/default.json' + ) + expect(Config.findConfig(parent)).to.equals(defaultConfig) + }) + }) + + describe('loadConfig', async () => { + const server = new ServerMock({ host: 'localhost', port: 9000 }, {}) + const serveDirectory = dir => ({ + method: 'GET', + path: '*', + reply: { + status: 200, + body: request => + fs.readFileSync(path.resolve(dir, request.pathname.substring(1))) + } + }) + beforeEach(done => server.start(done)) + afterEach(done => server.stop(done)) + + it('should load local config file', async () => { + const actual = await Config.loadConfig( + path.join(__dirname, 'default.json') + ) + expect(actual.rules).to.have.property('test-file-exists') + expect(actual.rules['test-file-exists'].level).to.equals('error') + }) + + it('should load URL config file', async () => { + server.on(serveDirectory(__dirname)) + const actual = await Config.loadConfig( + 'http://localhost:9000/default.json' + ) + expect(actual.rules).to.have.property('test-file-exists') + expect(actual.rules['test-file-exists'].level).to.equals('error') + }) + + it('should throw error on non existant file', async () => { + expect(Config.loadConfig('/does-not-exist')).to.eventually.throw( + 'ENOENT' + ) + }) + + it('should throw error on non existant URL', async () => { + server.on(serveDirectory(__dirname)) + expect( + Config.loadConfig('http://localhost:9000/404') + ).to.eventually.throw('404') + }) + }) + + describe('validateConfig', () => { + // already tested as part of the repolinter api tests in tests/api + }) + + describe('parseConfig', () => { + // already tested as part of the repolinter api tests in tests/api + }) + }) +}) diff --git a/tests/lib/default.json b/tests/lib/default.json new file mode 100644 index 00000000..078b9916 --- /dev/null +++ b/tests/lib/default.json @@ -0,0 +1,16 @@ +{ + "$schema": "../../rulesets/schema.json", + "version": 2, + "axioms": {}, + "rules": { + "test-file-exists": { + "level": "error", + "rule": { + "type": "file-existence", + "options": { + "globsAny": ["text_file_for_test.txt"] + } + } + } + } +} diff --git a/tests/lib/repolinter.yaml b/tests/lib/repolinter.yaml new file mode 100644 index 00000000..3cbc5a93 --- /dev/null +++ b/tests/lib/repolinter.yaml @@ -0,0 +1,2 @@ +$schema: "../../rulesets/schema.json" +version: 2