Skip to content

Commit

Permalink
refactor configuration loading logic
Browse files Browse the repository at this point in the history
Currently, logic for loading configuration files is split between
bin/repolinter.js (for loading URLs) and index.js (for local files,
directories, and the default config).

This commit refactors all configuration loading logic into a new
lib/config.js module, which handles loading and validating config files
of all types (existing parseConfig and validateConfig functions were
moved as-is).  This will make it simpler to extend config loading logic,
for example to support inheriting rulesets (#21).

This does have minor changes on the error messages returned by the CLI
in some cases.  It also attempts to parse all config files as JSON
first and then YAML, regardless of file extensions (this was previously
different for remote versus local config files).

This does not change anything in repolinter's programmatic API.
  • Loading branch information
willnorris committed Apr 14, 2021
1 parent 8b0b68e commit 67ea12f
Show file tree
Hide file tree
Showing 6 changed files with 335 additions and 166 deletions.
45 changes: 1 addition & 44 deletions bin/repolinter.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,8 @@ const repolinter = require('..')
const rimraf = require('rimraf')
const git = require('simple-git/promise')()
/** @type {any} */
const fetch = require('node-fetch')
const fs = require('fs')
const os = require('os')
const yaml = require('js-yaml')

// eslint-disable-next-line no-unused-expressions
require('yargs')
Expand Down Expand Up @@ -65,47 +63,6 @@ require('yargs')
})
},
async (/** @type {any} */ argv) => {
let rulesetParsed = null
let jsonerror
let yamlerror
// resolve the ruleset if a url is specified
if (argv.rulesetUrl) {
const res = await fetch(argv.rulesetUrl)
if (!res.ok) {
console.error(
`Failed to fetch config from ${argv.rulesetUrl} with status code ${res.status}`
)
process.exitCode = 1
return
}
const data = await res.text()
// attempt to parse as JSON
try {
rulesetParsed = JSON.parse(data)
} catch (e) {
jsonerror = e
}
// attempt to parse as YAML
if (!rulesetParsed) {
try {
rulesetParsed = yaml.safeLoad(data)
} catch (e) {
yamlerror = e
}
}
// throw an error if neither worked
if (!rulesetParsed) {
console.log(`Failed to fetch ruleset from URL ${argv.rulesetUrl}:`)
console.log(
`\tJSON failed with error ${jsonerror && jsonerror.toString()}`
)
console.log(
`\tYAML failed with error ${yamlerror && yamlerror.toString()}`
)
process.exitCode = 1
return
}
}
let tmpDir = null
// temporarily clone a git repo to lint
if (argv.git) {
Expand All @@ -124,7 +81,7 @@ require('yargs')
const output = await repolinter.lint(
tmpDir || path.resolve(process.cwd(), argv.directory),
argv.allowPaths,
rulesetParsed || argv.rulesetFile,
argv.rulesetUrl || argv.rulesetFile,
argv.dryRun
)
// create the output
Expand Down
136 changes: 14 additions & 122 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,8 @@

/** @module repolinter */

const jsonfile = require('jsonfile')
const Ajv = require('ajv')
const path = require('path')
const findConfig = require('find-config')
const fs = require('fs')
const yaml = require('js-yaml')
// eslint-disable-next-line no-unused-vars
const config = require('./lib/config')
const Result = require('./lib/result')
const RuleInfo = require('./lib/ruleinfo')
const FormatResult = require('./lib/formatresult')
Expand Down Expand Up @@ -126,26 +121,18 @@ async function lint(

let rulesetPath = null
if (typeof ruleset === 'string') {
rulesetPath = path.resolve(targetDir, ruleset)
if (config.isAbsoluteURL(ruleset)) {
rulesetPath = ruleset
} else {
rulesetPath = path.resolve(targetDir, ruleset)
}
} else if (!ruleset) {
rulesetPath =
findConfig('repolint.json', { cwd: targetDir }) ||
findConfig('repolint.yaml', { cwd: targetDir }) ||
findConfig('repolint.yml', { cwd: targetDir }) ||
findConfig('repolinter.json', { cwd: targetDir }) ||
findConfig('repolinter.yaml', { cwd: targetDir }) ||
findConfig('repolinter.yml', { cwd: targetDir }) ||
path.join(__dirname, 'rulesets/default.json')
rulesetPath = config.findConfig(targetDir)
}

if (rulesetPath !== null) {
const extension = path.extname(rulesetPath)
try {
const file = await fs.promises.readFile(rulesetPath, 'utf-8')
if (extension === '.yaml' || extension === '.yml') {
ruleset = yaml.safeLoad(file)
} else {
ruleset = JSON.parse(file)
}
ruleset = await config.loadConfig(rulesetPath)
} catch (e) {
return {
params: {
Expand All @@ -164,8 +151,9 @@ async function lint(
}
}
}

// validate config
const val = await validateConfig(ruleset)
const val = await config.validateConfig(ruleset)
if (!val.passed) {
return {
params: {
Expand All @@ -184,7 +172,7 @@ async function lint(
}
}
// parse it
const configParsed = parseConfig(ruleset)
const configParsed = config.parseConfig(ruleset)
// determine axiom targets
/** @ignore @type {Object.<string, Result>} */
let targetObj = {}
Expand Down Expand Up @@ -488,106 +476,10 @@ async function determineTargets(axiomconfig, fs) {
}, {})
}

/**
* Validate a repolint configuration against a known JSON schema
*
* @memberof repolinter
* @param {Object} config The configuration to validate
* @returns {Promise<Object>}
* A object representing or not the config validation succeeded (passed)
* an an error message if not (error)
*/
async function validateConfig(config) {
// compile the json schema
const ajvProps = new Ajv()
// find all json schemas
const parsedRuleSchemas = Promise.all(
Rules.map(rs =>
jsonfile.readFile(path.resolve(__dirname, 'rules', `${rs}-config.json`))
)
)
const parsedFixSchemas = Promise.all(
Fixes.map(f =>
jsonfile.readFile(path.resolve(__dirname, 'fixes', `${f}-config.json`))
)
)
const allSchemas = (
await Promise.all([parsedFixSchemas, parsedRuleSchemas])
).reduce((a, c) => a.concat(c), [])
// load them into the validator
for (const schema of allSchemas) {
ajvProps.addSchema(schema)
}
const validator = ajvProps.compile(
await jsonfile.readFile(require.resolve('./rulesets/schema.json'))
)

// validate it against the supplied ruleset
if (!validator(config)) {
return {
passed: false,
error: `Configuration validation failed with errors: \n${validator.errors
.map(e => `\tconfiguration${e.dataPath} ${e.message}`)
.join('\n')}`
}
} else {
return { passed: true }
}
}

/**
* Parse a JSON object config (with repolinter.json structure) and return a list
* of RuleInfo objects which will then be used to determine how to run the linter.
*
* @memberof repolinter
* @param {Object} config The repolinter.json config
* @returns {RuleInfo[]} The parsed rule data
*/
function parseConfig(config) {
// check to see if the config has a version marker
// parse modern config
if (config.version === 2) {
return Object.entries(config.rules).map(
([name, cfg]) =>
new RuleInfo(
name,
cfg.level,
cfg.where,
cfg.rule.type,
cfg.rule.options,
cfg.fix && cfg.fix.type,
cfg.fix && cfg.fix.options,
cfg.policyInfo,
cfg.policyUrl
)
)
}
// parse legacy config
// old format of "axiom": { "rule-name:rule-type": ["level", { "configvalue": false }]}
return (
Object.entries(config.rules)
// get axioms
.map(([where, rules]) => {
// get the rules in each axiom
return Object.entries(rules).map(([rulename, configray]) => {
const [name, type] = rulename.split(':')
return new RuleInfo(
name,
configray[0],
where === 'all' ? [] : [where],
type || name,
configray[1] || {}
)
})
})
.reduce((a, c) => a.concat(c))
)
}

module.exports.runRuleset = runRuleset
module.exports.determineTargets = determineTargets
module.exports.validateConfig = validateConfig
module.exports.parseConfig = parseConfig
module.exports.validateConfig = config.validateConfig
module.exports.parseConfig = config.parseConfig
module.exports.shouldRuleRun = shouldRuleRun
module.exports.lint = lint
module.exports.Result = Result
Expand Down
Loading

0 comments on commit 67ea12f

Please sign in to comment.