From 67ea12f707652286a727c066a581e53757310ace Mon Sep 17 00:00:00 2001 From: Will Norris Date: Tue, 30 Mar 2021 11:53:58 -0700 Subject: [PATCH 1/2] refactor configuration loading logic Currently, logic for loading configuration files is split between bin/repolinter.js (for loading URLs) and index.js (for local files, directories, and the default config). This commit refactors all configuration loading logic into a new lib/config.js module, which handles loading and validating config files of all types (existing parseConfig and validateConfig functions were moved as-is). This will make it simpler to extend config loading logic, for example to support inheriting rulesets (#21). This does have minor changes on the error messages returned by the CLI in some cases. It also attempts to parse all config files as JSON first and then YAML, regardless of file extensions (this was previously different for remote versus local config files). This does not change anything in repolinter's programmatic API. --- bin/repolinter.js | 45 +-------- index.js | 136 +++----------------------- lib/config.js | 199 ++++++++++++++++++++++++++++++++++++++ tests/lib/config_tests.js | 103 ++++++++++++++++++++ tests/lib/default.json | 16 +++ tests/lib/repolinter.yaml | 2 + 6 files changed, 335 insertions(+), 166 deletions(-) create mode 100644 lib/config.js create mode 100644 tests/lib/config_tests.js create mode 100644 tests/lib/default.json create mode 100644 tests/lib/repolinter.yaml diff --git a/bin/repolinter.js b/bin/repolinter.js index 462eda82..71beabae 100755 --- a/bin/repolinter.js +++ b/bin/repolinter.js @@ -6,10 +6,8 @@ const repolinter = require('..') const rimraf = require('rimraf') const git = require('simple-git/promise')() /** @type {any} */ -const fetch = require('node-fetch') const fs = require('fs') const os = require('os') -const yaml = require('js-yaml') // eslint-disable-next-line no-unused-expressions require('yargs') @@ -65,47 +63,6 @@ require('yargs') }) }, async (/** @type {any} */ argv) => { - let rulesetParsed = null - let jsonerror - let yamlerror - // resolve the ruleset if a url is specified - if (argv.rulesetUrl) { - const res = await fetch(argv.rulesetUrl) - if (!res.ok) { - console.error( - `Failed to fetch config from ${argv.rulesetUrl} with status code ${res.status}` - ) - process.exitCode = 1 - return - } - const data = await res.text() - // attempt to parse as JSON - try { - rulesetParsed = JSON.parse(data) - } catch (e) { - jsonerror = e - } - // attempt to parse as YAML - if (!rulesetParsed) { - try { - rulesetParsed = yaml.safeLoad(data) - } catch (e) { - yamlerror = e - } - } - // throw an error if neither worked - if (!rulesetParsed) { - console.log(`Failed to fetch ruleset from URL ${argv.rulesetUrl}:`) - console.log( - `\tJSON failed with error ${jsonerror && jsonerror.toString()}` - ) - console.log( - `\tYAML failed with error ${yamlerror && yamlerror.toString()}` - ) - process.exitCode = 1 - return - } - } let tmpDir = null // temporarily clone a git repo to lint if (argv.git) { @@ -124,7 +81,7 @@ require('yargs') const output = await repolinter.lint( tmpDir || path.resolve(process.cwd(), argv.directory), argv.allowPaths, - rulesetParsed || argv.rulesetFile, + argv.rulesetUrl || argv.rulesetFile, argv.dryRun ) // create the output diff --git a/index.js b/index.js index 26d00408..d68d4157 100644 --- a/index.js +++ b/index.js @@ -3,13 +3,8 @@ /** @module repolinter */ -const jsonfile = require('jsonfile') -const Ajv = require('ajv') const path = require('path') -const findConfig = require('find-config') -const fs = require('fs') -const yaml = require('js-yaml') -// eslint-disable-next-line no-unused-vars +const config = require('./lib/config') const Result = require('./lib/result') const RuleInfo = require('./lib/ruleinfo') const FormatResult = require('./lib/formatresult') @@ -126,26 +121,18 @@ async function lint( let rulesetPath = null if (typeof ruleset === 'string') { - rulesetPath = path.resolve(targetDir, ruleset) + if (config.isAbsoluteURL(ruleset)) { + rulesetPath = ruleset + } else { + rulesetPath = path.resolve(targetDir, ruleset) + } } else if (!ruleset) { - rulesetPath = - findConfig('repolint.json', { cwd: targetDir }) || - findConfig('repolint.yaml', { cwd: targetDir }) || - findConfig('repolint.yml', { cwd: targetDir }) || - findConfig('repolinter.json', { cwd: targetDir }) || - findConfig('repolinter.yaml', { cwd: targetDir }) || - findConfig('repolinter.yml', { cwd: targetDir }) || - path.join(__dirname, 'rulesets/default.json') + rulesetPath = config.findConfig(targetDir) } + if (rulesetPath !== null) { - const extension = path.extname(rulesetPath) try { - const file = await fs.promises.readFile(rulesetPath, 'utf-8') - if (extension === '.yaml' || extension === '.yml') { - ruleset = yaml.safeLoad(file) - } else { - ruleset = JSON.parse(file) - } + ruleset = await config.loadConfig(rulesetPath) } catch (e) { return { params: { @@ -164,8 +151,9 @@ async function lint( } } } + // validate config - const val = await validateConfig(ruleset) + const val = await config.validateConfig(ruleset) if (!val.passed) { return { params: { @@ -184,7 +172,7 @@ async function lint( } } // parse it - const configParsed = parseConfig(ruleset) + const configParsed = config.parseConfig(ruleset) // determine axiom targets /** @ignore @type {Object.} */ let targetObj = {} @@ -488,106 +476,10 @@ async function determineTargets(axiomconfig, fs) { }, {}) } -/** - * Validate a repolint configuration against a known JSON schema - * - * @memberof repolinter - * @param {Object} config The configuration to validate - * @returns {Promise} - * A object representing or not the config validation succeeded (passed) - * an an error message if not (error) - */ -async function validateConfig(config) { - // compile the json schema - const ajvProps = new Ajv() - // find all json schemas - const parsedRuleSchemas = Promise.all( - Rules.map(rs => - jsonfile.readFile(path.resolve(__dirname, 'rules', `${rs}-config.json`)) - ) - ) - const parsedFixSchemas = Promise.all( - Fixes.map(f => - jsonfile.readFile(path.resolve(__dirname, 'fixes', `${f}-config.json`)) - ) - ) - const allSchemas = ( - await Promise.all([parsedFixSchemas, parsedRuleSchemas]) - ).reduce((a, c) => a.concat(c), []) - // load them into the validator - for (const schema of allSchemas) { - ajvProps.addSchema(schema) - } - const validator = ajvProps.compile( - await jsonfile.readFile(require.resolve('./rulesets/schema.json')) - ) - - // validate it against the supplied ruleset - if (!validator(config)) { - return { - passed: false, - error: `Configuration validation failed with errors: \n${validator.errors - .map(e => `\tconfiguration${e.dataPath} ${e.message}`) - .join('\n')}` - } - } else { - return { passed: true } - } -} - -/** - * Parse a JSON object config (with repolinter.json structure) and return a list - * of RuleInfo objects which will then be used to determine how to run the linter. - * - * @memberof repolinter - * @param {Object} config The repolinter.json config - * @returns {RuleInfo[]} The parsed rule data - */ -function parseConfig(config) { - // check to see if the config has a version marker - // parse modern config - if (config.version === 2) { - return Object.entries(config.rules).map( - ([name, cfg]) => - new RuleInfo( - name, - cfg.level, - cfg.where, - cfg.rule.type, - cfg.rule.options, - cfg.fix && cfg.fix.type, - cfg.fix && cfg.fix.options, - cfg.policyInfo, - cfg.policyUrl - ) - ) - } - // parse legacy config - // old format of "axiom": { "rule-name:rule-type": ["level", { "configvalue": false }]} - return ( - Object.entries(config.rules) - // get axioms - .map(([where, rules]) => { - // get the rules in each axiom - return Object.entries(rules).map(([rulename, configray]) => { - const [name, type] = rulename.split(':') - return new RuleInfo( - name, - configray[0], - where === 'all' ? [] : [where], - type || name, - configray[1] || {} - ) - }) - }) - .reduce((a, c) => a.concat(c)) - ) -} - module.exports.runRuleset = runRuleset module.exports.determineTargets = determineTargets -module.exports.validateConfig = validateConfig -module.exports.parseConfig = parseConfig +module.exports.validateConfig = config.validateConfig +module.exports.parseConfig = config.parseConfig module.exports.shouldRuleRun = shouldRuleRun module.exports.lint = lint module.exports.Result = Result diff --git a/lib/config.js b/lib/config.js new file mode 100644 index 00000000..396e157b --- /dev/null +++ b/lib/config.js @@ -0,0 +1,199 @@ +// Copyright 2017 TODO Group. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +const Ajv = require('ajv') +const fetch = require('node-fetch') +const findFile = require('find-config') +const fs = require('fs') +const jsonfile = require('jsonfile') +const path = require('path') +const yaml = require('js-yaml') + +const Rules = require('../rules/rules') +const RuleInfo = require('./ruleinfo') +const Fixes = require('../fixes/fixes') + +/** + * Determine if provided string is an absolute URL. That is, if it is + * parseable and has a 'host' URL component. + * + * @param {string} url string to test + * @returns {boolean} true if the string is an absolute URL + */ +function isAbsoluteURL(url) { + try { + const u = new URL(url) + if (u.host !== '') { + return true + } + } catch (e) {} + return false +} + +/** + * Find a repolinter config file in the specified directory. This looks for + * files named repolint or repolinter with a file extension of .json, .yaml, or + * .yml in the specified directory or the nearest ancestor directory. If no + * file is found, the default configuration that ships with repolinter is + * returned. + * + * @param {string} [directory] directory to search for config files in + * @returns {string} absolute path of configuration file + */ +function findConfig(directory) { + return ( + findFile('repolint.json', { cwd: directory }) || + findFile('repolint.yaml', { cwd: directory }) || + findFile('repolint.yml', { cwd: directory }) || + findFile('repolinter.json', { cwd: directory }) || + findFile('repolinter.yaml', { cwd: directory }) || + findFile('repolinter.yml', { cwd: directory }) || + path.join(__dirname, '../rulesets/default.json') + ) +} + +/** + * Load a ruleset config from the specified location. + * + * @param {string} configLocation A URL or local file containing a repolinter config file + * @returns {Object} The loaded repolinter json config + * @throws Will throw an error if unable to parse config or if config is invalid + */ +async function loadConfig(configLocation) { + if (!configLocation) { + throw new Error('must specify config location') + } + + let configData = null + if (isAbsoluteURL(configLocation)) { + const res = await fetch(configLocation) + if (!res.ok) { + throw new Error( + `Failed to fetch config from ${configLocation} with status code ${res.status}` + ) + } + configData = await res.text() + } else { + configData = await fs.promises.readFile(configLocation, 'utf-8') + } + + let ruleset + // try parsing as JSON, then YAML + try { + ruleset = JSON.parse(configData) + } catch (je) { + try { + ruleset = yaml.safeLoad(configData) + } catch (ye) { + throw new Error( + `unable to parse ${configLocation} as either JSON (error: ${je}) or YAML (error: ${ye})` + ) + } + } + + return ruleset +} + +/** + * Validate a repolint configuration against a known JSON schema + * + * @memberof repolinter + * @param {Object} config The configuration to validate + * @returns {Promise} + * A object representing or not the config validation succeeded (passed) + * or an error message if not (error) + */ +async function validateConfig(config) { + // compile the json schema + const ajvProps = new Ajv() + // find all json schemas + const parsedRuleSchemas = Promise.all( + Rules.map(rs => + jsonfile.readFile( + path.resolve(__dirname, '../rules', `${rs}-config.json`) + ) + ) + ) + const parsedFixSchemas = Promise.all( + Fixes.map(f => + jsonfile.readFile(path.resolve(__dirname, '../fixes', `${f}-config.json`)) + ) + ) + const allSchemas = ( + await Promise.all([parsedFixSchemas, parsedRuleSchemas]) + ).reduce((a, c) => a.concat(c), []) + // load them into the validator + for (const schema of allSchemas) { + ajvProps.addSchema(schema) + } + const validator = ajvProps.compile( + await jsonfile.readFile(require.resolve('../rulesets/schema.json')) + ) + + // validate it against the supplied ruleset + if (!validator(config)) { + return { + passed: false, + error: `Configuration validation failed with errors: \n${validator.errors + .map(e => `\tconfiguration${e.dataPath} ${e.message}`) + .join('\n')}` + } + } else { + return { passed: true } + } +} + +/** + * Parse a JSON object config (with repolinter.json structure) and return a list + * of RuleInfo objects which will then be used to determine how to run the linter. + * + * @memberof repolinter + * @param {Object} config The repolinter.json config + * @returns {RuleInfo[]} The parsed rule data + */ +function parseConfig(config) { + // check to see if the config has a version marker + // parse modern config + if (config.version === 2) { + return Object.entries(config.rules).map( + ([name, cfg]) => + new RuleInfo( + name, + cfg.level, + cfg.where, + cfg.rule.type, + cfg.rule.options, + cfg.fix && cfg.fix.type, + cfg.fix && cfg.fix.options, + cfg.policyInfo, + cfg.policyUrl + ) + ) + } + // parse legacy config + // old format of "axiom": { "rule-name:rule-type": ["level", { "configvalue": false }]} + return ( + Object.entries(config.rules) + // get axioms + .map(([where, rules]) => { + // get the rules in each axiom + return Object.entries(rules).map(([rulename, configray]) => { + const [name, type] = rulename.split(':') + return new RuleInfo( + name, + configray[0], + where === 'all' ? [] : [where], + type || name, + configray[1] || {} + ) + }) + }) + .reduce((a, c) => a.concat(c)) + ) +} + +module.exports.findConfig = findConfig +module.exports.isAbsoluteURL = isAbsoluteURL +module.exports.loadConfig = loadConfig +module.exports.validateConfig = validateConfig +module.exports.parseConfig = parseConfig diff --git a/tests/lib/config_tests.js b/tests/lib/config_tests.js new file mode 100644 index 00000000..f35caf3c --- /dev/null +++ b/tests/lib/config_tests.js @@ -0,0 +1,103 @@ +// Copyright 2017 TODO Group. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +const chai = require('chai') +const chaiAsPromised = require('chai-as-promised') +const expect = chai.expect +const path = require('path') +const fs = require('fs') +const ServerMock = require('mock-http-server') + +chai.use(chaiAsPromised) + +describe('lib', () => { + describe('config', function () { + const Config = require('../../lib/config') + + this.timeout(10000) + + describe('isAbsoluteURL', () => { + it('should identify absolute URLs', async () => { + expect(Config.isAbsoluteURL('http://example.com/')).to.equals(true) + expect(Config.isAbsoluteURL('https://example.com/')).to.equals(true) + expect(Config.isAbsoluteURL('ftp://example.com/')).to.equals(true) + }) + + it('should identify relative URLs', async () => { + expect(Config.isAbsoluteURL('foo')).to.equals(false) + expect(Config.isAbsoluteURL('/foo')).to.equals(false) + expect(Config.isAbsoluteURL('file:/foo')).to.equals(false) + expect(Config.isAbsoluteURL('file:///foo')).to.equals(false) + expect(Config.isAbsoluteURL('c:\\foo')).to.equals(false) + }) + }) + + describe('findConfig', () => { + it('should find config file in directory', async () => { + const localConfig = path.join(__dirname, 'repolinter.yaml') + expect(Config.findConfig(__dirname)).to.equals(localConfig) + }) + it('should return default file when no config present', async () => { + const parent = path.join(__dirname, '..') + const defaultConfig = path.join( + __dirname, + '../../rulesets/default.json' + ) + expect(Config.findConfig(parent)).to.equals(defaultConfig) + }) + }) + + describe('loadConfig', async () => { + const server = new ServerMock({ host: 'localhost', port: 9000 }, {}) + const serveDirectory = dir => ({ + method: 'GET', + path: '*', + reply: { + status: 200, + body: request => + fs.readFileSync(path.resolve(dir, request.pathname.substring(1))) + } + }) + beforeEach(done => server.start(done)) + afterEach(done => server.stop(done)) + + it('should load local config file', async () => { + const actual = await Config.loadConfig( + path.join(__dirname, 'default.json') + ) + expect(actual.rules).to.have.property('test-file-exists') + expect(actual.rules['test-file-exists'].level).to.equals('error') + }) + + it('should load URL config file', async () => { + server.on(serveDirectory(__dirname)) + const actual = await Config.loadConfig( + 'http://localhost:9000/default.json' + ) + expect(actual.rules).to.have.property('test-file-exists') + expect(actual.rules['test-file-exists'].level).to.equals('error') + }) + + it('should throw error on non existant file', async () => { + expect(Config.loadConfig('/does-not-exist')).to.eventually.throw( + 'ENOENT' + ) + }) + + it('should throw error on non existant URL', async () => { + server.on(serveDirectory(__dirname)) + expect( + Config.loadConfig('http://localhost:9000/404') + ).to.eventually.throw('404') + }) + }) + + describe('validateConfig', () => { + // already tested as part of the repolinter api tests in tests/api + }) + + describe('parseConfig', () => { + // already tested as part of the repolinter api tests in tests/api + }) + }) +}) diff --git a/tests/lib/default.json b/tests/lib/default.json new file mode 100644 index 00000000..078b9916 --- /dev/null +++ b/tests/lib/default.json @@ -0,0 +1,16 @@ +{ + "$schema": "../../rulesets/schema.json", + "version": 2, + "axioms": {}, + "rules": { + "test-file-exists": { + "level": "error", + "rule": { + "type": "file-existence", + "options": { + "globsAny": ["text_file_for_test.txt"] + } + } + } + } +} diff --git a/tests/lib/repolinter.yaml b/tests/lib/repolinter.yaml new file mode 100644 index 00000000..3cbc5a93 --- /dev/null +++ b/tests/lib/repolinter.yaml @@ -0,0 +1,2 @@ +$schema: "../../rulesets/schema.json" +version: 2 From b3f16d3ad239c9eec749ad3b2f63cbbd946fb7c8 Mon Sep 17 00:00:00 2001 From: Will Norris Date: Tue, 30 Mar 2021 13:27:13 -0700 Subject: [PATCH 2/2] add 'extends' property for extending ruleset --- README.md | 50 ++++++++++++++++++++++++++++++++ lib/config.js | 26 ++++++++++++++++- rulesets/schema.json | 4 +++ tests/lib/absolute-override.yaml | 6 ++++ tests/lib/config_tests.js | 40 +++++++++++++++++++++++++ tests/lib/loop-a.yaml | 6 ++++ tests/lib/loop-b.yaml | 6 ++++ tests/lib/loop-self.yaml | 6 ++++ tests/lib/repolinter.yaml | 1 + 9 files changed, 144 insertions(+), 1 deletion(-) create mode 100644 tests/lib/absolute-override.yaml create mode 100644 tests/lib/loop-a.yaml create mode 100644 tests/lib/loop-b.yaml create mode 100644 tests/lib/loop-self.yaml diff --git a/README.md b/README.md index 7dc644ab..e485bc5e 100644 --- a/README.md +++ b/README.md @@ -320,6 +320,56 @@ rules: ... ``` +### Extending Rulesets + +A ruleset can extend another ruleset, in which case the two files will be +recursively merged. Extended rulesets can themselves extend additional rulesets +up to 20 rulesets deep. + +Extend a ruleset by including an "extends" top-level key which identifies a URL +or file path: + +```JavaScript +{ + "extends": "https://raw.githubusercontent.com/todogroup/repolinter/master/rulesets/default.json" + "rules": { + # disable CI check + "integrates-with-ci": { + "level": "off" + } + } +} +``` + +```YAML +extends: https://raw.githubusercontent.com/todogroup/repolinter/master/rulesets/default.json +rules: + # disable CI check + integrates-with-ci + level: off + ... +``` + +Relative paths are resolved relative to the location used to access the +extending file. For example, if repolinter is invoked as: + +``` +repolinter -u http://example.com/custom-rules.yaml +``` + +And that ruleset includes `extends: "./default.yaml"`, the path will be resolved +relative to the original URL as `http://example.com/default.yaml`. If instead +repolinter is invoked as: + +``` +repolinter -r /etc/repolinter/custom-rules.yaml +``` + +And that ruleset includes `extends: "./default.yaml"`, the path will be resolved +relative to the original file path as `/etc/repolinter/default.yaml`. + +YAML and JSON rulesets can be extended from either format. + ## API Repolinter also includes an extensible JavaScript API: diff --git a/lib/config.js b/lib/config.js index 396e157b..4c915b56 100644 --- a/lib/config.js +++ b/lib/config.js @@ -6,6 +6,7 @@ const fetch = require('node-fetch') const findFile = require('find-config') const fs = require('fs') const jsonfile = require('jsonfile') +const lodash = require('lodash') const path = require('path') const yaml = require('js-yaml') @@ -56,10 +57,11 @@ function findConfig(directory) { * Load a ruleset config from the specified location. * * @param {string} configLocation A URL or local file containing a repolinter config file + * @param {array} [processed] List of config files already processed, used to prevent loops * @returns {Object} The loaded repolinter json config * @throws Will throw an error if unable to parse config or if config is invalid */ -async function loadConfig(configLocation) { +async function loadConfig(configLocation, processed = []) { if (!configLocation) { throw new Error('must specify config location') } @@ -91,6 +93,28 @@ async function loadConfig(configLocation) { } } + // merge extended rulesets + if (ruleset.extends) { + processed.push(configLocation) + if (processed.length > 20) { + // safeguard against infinite loops. expose as flag one day if needed + throw new Error('exceeded maximum 20 ruleset extensions') + } + + let parent + if (isAbsoluteURL(ruleset.extends)) { + parent = ruleset.extends + } else if (isAbsoluteURL(configLocation)) { + parent = new URL(ruleset.extends, configLocation) + } else { + parent = path.resolve(path.dirname(configLocation), ruleset.extends) + } + if (!processed.includes(parent)) { + const parentRuleset = await loadConfig(parent, processed) + ruleset = lodash.merge({}, parentRuleset, ruleset) + } + } + return ruleset } diff --git a/rulesets/schema.json b/rulesets/schema.json index 9e4a176e..8a9922b6 100644 --- a/rulesets/schema.json +++ b/rulesets/schema.json @@ -14,6 +14,10 @@ "properties": { "$schema": { "type": "string" }, "version": { "const": 2 }, + "extends": { + "type": "string", + "title": "URL or path of ruleset file this ruleset extends" + }, "axioms": { "type": "object", "title": "The axioms schema", diff --git a/tests/lib/absolute-override.yaml b/tests/lib/absolute-override.yaml new file mode 100644 index 00000000..67542262 --- /dev/null +++ b/tests/lib/absolute-override.yaml @@ -0,0 +1,6 @@ +$schema: "../../rulesets/schema.json" +extends: "http://localhost:9000/default.json" +version: 2 +rules: + test-file-exists: + level: off diff --git a/tests/lib/config_tests.js b/tests/lib/config_tests.js index f35caf3c..8bff17cd 100644 --- a/tests/lib/config_tests.js +++ b/tests/lib/config_tests.js @@ -78,6 +78,46 @@ describe('lib', () => { expect(actual.rules['test-file-exists'].level).to.equals('error') }) + it('should handle relative file extends', async () => { + const actual = await Config.loadConfig( + path.join(__dirname, 'repolinter.yaml') + ) + expect(actual.rules).to.have.property('test-file-exists') + expect(actual.rules['test-file-exists'].level).to.equals('error') + }) + + it('should handle relative URL extends', async () => { + server.on(serveDirectory(__dirname)) + const actual = await Config.loadConfig( + 'http://localhost:9000/repolinter.yaml' + ) + expect(actual.rules).to.have.property('test-file-exists') + expect(actual.rules['test-file-exists'].level).to.equals('error') + }) + + it('should handle absolute URL extends', async () => { + server.on(serveDirectory(__dirname)) + const actual = await Config.loadConfig( + path.join(__dirname, 'absolute-override.yaml') + ) + expect(actual.rules).to.have.property('test-file-exists') + expect(actual.rules['test-file-exists'].level).to.equals('off') + }) + + it('should detect loops in extended rulesets', async () => { + const loopSelf = await Config.loadConfig( + path.join(__dirname, 'loop-self.yaml') + ) + expect(loopSelf.rules).to.have.property('test-file-exists') + expect(loopSelf.rules['test-file-exists'].level).to.equals('error') + + const loopB = await Config.loadConfig( + path.join(__dirname, 'loop-b.yaml') + ) + expect(loopB.rules).to.have.property('test-file-exists') + expect(loopB.rules['test-file-exists'].level).to.equals('off') + }) + it('should throw error on non existant file', async () => { expect(Config.loadConfig('/does-not-exist')).to.eventually.throw( 'ENOENT' diff --git a/tests/lib/loop-a.yaml b/tests/lib/loop-a.yaml new file mode 100644 index 00000000..ff3345e0 --- /dev/null +++ b/tests/lib/loop-a.yaml @@ -0,0 +1,6 @@ +$schema: "../../rulesets/schema.json" +extends: "./loop-b.yaml" +version: 2 +rules: + test-file-exists: + level: error diff --git a/tests/lib/loop-b.yaml b/tests/lib/loop-b.yaml new file mode 100644 index 00000000..5df25ac4 --- /dev/null +++ b/tests/lib/loop-b.yaml @@ -0,0 +1,6 @@ +$schema: "../../rulesets/schema.json" +extends: "./loop-a.yaml" +version: 2 +rules: + test-file-exists: + level: off diff --git a/tests/lib/loop-self.yaml b/tests/lib/loop-self.yaml new file mode 100644 index 00000000..c752eab0 --- /dev/null +++ b/tests/lib/loop-self.yaml @@ -0,0 +1,6 @@ +$schema: "../../rulesets/schema.json" +extends: "./loop-self.yaml" +version: 2 +rules: + test-file-exists: + level: error diff --git a/tests/lib/repolinter.yaml b/tests/lib/repolinter.yaml index 3cbc5a93..d207119e 100644 --- a/tests/lib/repolinter.yaml +++ b/tests/lib/repolinter.yaml @@ -1,2 +1,3 @@ $schema: "../../rulesets/schema.json" +extends: "./default.json" version: 2