-
Notifications
You must be signed in to change notification settings - Fork 74
/
Copy pathconfig.js
290 lines (270 loc) · 8.65 KB
/
config.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
// Copyright 2017 TODO Group. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
const Ajv = require('ajv')
const fetch = require('node-fetch')
const findFile = require('find-config')
const fs = require('fs')
const jsonfile = require('jsonfile')
const lodash = require('lodash')
const path = require('path')
const yaml = require('js-yaml')
const Rules = require('../rules/rules')
const RuleInfo = require('./ruleinfo')
const Fixes = require('../fixes/fixes')
/**
* Determine if provided string is an absolute URL. That is, if it is
* parseable and has a 'host' URL component.
*
* @param {string} url string to test
* @returns {boolean} true if the string is an absolute URL
*/
function isAbsoluteURL(url) {
try {
const u = new URL(url)
if (u.host !== '') {
return true
}
} catch (e) {}
return false
}
/**
* Find a repolinter config file in the specified directory. This looks for
* files named repolint or repolinter with a file extension of .json, .yaml, or
* .yml in the specified directory or the nearest ancestor directory. If no
* file is found, the default configuration that ships with repolinter is
* returned.
*
* @param {string} [directory] directory to search for config files in
* @returns {string} absolute path of configuration file
*/
function findConfig(directory) {
return (
findFile('repolint.json', { cwd: directory }) ||
findFile('repolint.yaml', { cwd: directory }) ||
findFile('repolint.yml', { cwd: directory }) ||
findFile('repolinter.json', { cwd: directory }) ||
findFile('repolinter.yaml', { cwd: directory }) ||
findFile('repolinter.yml', { cwd: directory }) ||
path.join(__dirname, '../rulesets/default.json')
)
}
/**
* Load a ruleset config from the specified location.
*
* @param {string} configLocation A URL or local file containing a repolinter config file
* @param {array} [processed] List of config files already processed, used to prevent loops
* @returns {Object} The loaded repolinter json config
* @throws Will throw an error if unable to parse config or if config is invalid
*/
async function loadConfig(configLocation, processed = []) {
if (!configLocation) {
throw new Error('must specify config location')
}
let configData = null
if (isAbsoluteURL(configLocation)) {
const res = await fetch(configLocation)
if (!res.ok) {
throw new Error(
`Failed to fetch config from ${configLocation} with status code ${res.status}`
)
}
configData = await res.text()
} else {
configData = await fs.promises.readFile(configLocation, 'utf-8')
}
let ruleset
// try parsing as JSON, then YAML
try {
ruleset = JSON.parse(configData)
} catch (je) {
try {
ruleset = yaml.safeLoad(configData)
} catch (ye) {
throw new Error(
`unable to parse ${configLocation} as either JSON (error: ${je}) or YAML (error: ${ye})`
)
}
}
// merge extended rulesets
if (ruleset.extends) {
processed.push(configLocation)
if (processed.length > 20) {
// safeguard against infinite loops. expose as flag one day if needed
throw new Error('exceeded maximum 20 ruleset extensions')
}
let parent
if (isAbsoluteURL(ruleset.extends) || isBase64(ruleset.extends)) {
parent = ruleset.extends
} else if (isAbsoluteURL(configLocation)) {
parent = new URL(ruleset.extends, configLocation)
} else {
parent = path.resolve(path.dirname(configLocation), ruleset.extends)
}
if (!processed.includes(parent)) {
let parentRuleset
if (isBase64(parent)) {
parentRuleset = await decodeConfig(parent, processed)
} else {
parentRuleset = await loadConfig(parent, processed)
}
ruleset = lodash.merge({}, parentRuleset, ruleset)
}
}
return ruleset
}
/**
* Validate a repolint configuration against a known JSON schema
*
* @memberof repolinter
* @param {Object} config The configuration to validate
* @returns {Promise<Object>}
* A object representing or not the config validation succeeded (passed)
* or an error message if not (error)
*/
async function validateConfig(config) {
// compile the json schema
const ajvProps = new Ajv()
// find all json schemas
const parsedRuleSchemas = Promise.all(
Object.keys(Rules).map(rs =>
jsonfile.readFile(
path.resolve(__dirname, '../rules', `${rs}-config.json`)
)
)
)
const parsedFixSchemas = Promise.all(
Object.keys(Fixes).map(f =>
jsonfile.readFile(path.resolve(__dirname, '../fixes', `${f}-config.json`))
)
)
const allSchemas = (
await Promise.all([parsedFixSchemas, parsedRuleSchemas])
).reduce((a, c) => a.concat(c), [])
// load them into the validator
for (const schema of allSchemas) {
ajvProps.addSchema(schema)
}
const validator = ajvProps.compile(
await jsonfile.readFile(require.resolve('../rulesets/schema.json'))
)
// validate it against the supplied ruleset
if (!validator(config)) {
return {
passed: false,
error: `Configuration validation failed with errors: \n${validator.errors
.map(
e =>
`\tconfiguration${e.dataPath} ${e.message}\n\nIt's likely the rulesetPath or rulesetUrl isn't configured correctly.`
)
.join('\n')}`
}
} else {
return { passed: true }
}
}
/**
* Parse a JSON object config (with repolinter.json structure) and return a list
* of RuleInfo objects which will then be used to determine how to run the linter.
*
* @memberof repolinter
* @param {Object} config The repolinter.json config
* @returns {RuleInfo[]} The parsed rule data
*/
function parseConfig(config) {
// check to see if the config has a version marker
// parse modern config
if (config.version === 2) {
return Object.entries(config.rules).map(
([name, cfg]) =>
new RuleInfo(
name,
cfg.level,
cfg.where,
cfg.rule.type,
cfg.rule.options,
cfg.fix && cfg.fix.type,
cfg.fix && cfg.fix.options,
cfg.policyInfo,
cfg.policyUrl
)
)
}
// parse legacy config
// old format of "axiom": { "rule-name:rule-type": ["level", { "configvalue": false }]}
return (
Object.entries(config.rules)
// get axioms
.map(([where, rules]) => {
// get the rules in each axiom
return Object.entries(rules).map(([rulename, configray]) => {
const [name, type] = rulename.split(':')
return new RuleInfo(
name,
configray[0],
where === 'all' ? [] : [where],
type || name,
configray[1] || {}
)
})
})
.reduce((a, c) => a.concat(c))
)
}
/**
* Decodes a base64 encoded string into a config
*
* @param {string} encodedRuleSet A base64 encoded string that needs decoding
* @param {array} [processed] List of config files already processed, used to prevent loops
* @returns {Object} The loaded repolinter json config
* @throws Will throw an error if unable to parse config or if config is invalid
*/
async function decodeConfig(encodedRuleSet, processed = []) {
const configData = Buffer.from(encodedRuleSet, 'base64').toString()
let ruleset
// try parsing as JSON, then YAML
try {
ruleset = JSON.parse(configData)
} catch (je) {
try {
ruleset = yaml.safeLoad(configData)
} catch (ye) {
throw new Error(
`unable to parse ruleset as either JSON (error: ${je}) or YAML (error: ${ye})`
)
}
}
// merge extended rulesets
if (ruleset.extends) {
processed.push(encodedRuleSet)
if (processed.length > 20) {
// safeguard against infinite loops. expose as flag one day if needed
throw new Error('exceeded maximum 20 ruleset extensions')
}
let parent
let parentRuleset
if (isAbsoluteURL(ruleset.extends)) {
parent = ruleset.extends
} else if (isBase64(ruleset.extends)) {
parentRuleset = await decodeConfig(ruleset.extends, processed)
}
if (!processed.includes(parent)) {
if (!isBase64(ruleset.extends)) {
parentRuleset = await loadConfig(parent, processed)
}
ruleset = lodash.merge({}, parentRuleset, ruleset)
}
}
return ruleset
}
// isBase64 returns a boolean when the string is a valid base64 string
function isBase64(str) {
const base64regex = /^([0-9a-zA-Z+/]{4})*(([0-9a-zA-Z+/]{2}==)|([0-9a-zA-Z+/]{3}=))?$/
return base64regex.test(str)
}
module.exports.findConfig = findConfig
module.exports.isAbsoluteURL = isAbsoluteURL
module.exports.loadConfig = loadConfig
module.exports.decodeConfig = decodeConfig
module.exports.validateConfig = validateConfig
module.exports.parseConfig = parseConfig
module.exports.isBase64 = isBase64