-
Notifications
You must be signed in to change notification settings - Fork 601
/
Copy pathmaintenance.py
188 lines (143 loc) · 7.12 KB
/
maintenance.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
"""
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
"""
import fnmatch
import json
import logging
import multiprocessing
import os
import jsonpointer
import jsonpatch
import cfnlint
from cfnlint.helpers import get_url_content, url_has_newer_version
from cfnlint.helpers import SPEC_REGIONS
import cfnlint.data.ExtendedSpecs
import cfnlint.data.AdditionalSpecs
LOGGER = logging.getLogger(__name__)
def update_resource_specs():
""" Update Resource Specs """
# Pool() uses cpu count if no number of processors is specified
# Pool() only implements the Context Manager protocol from Python3.3 onwards,
# so it will fail Python2.7 style linting, as well as throw AttributeError
try:
# pylint: disable=not-context-manager
with multiprocessing.Pool() as pool:
pool.starmap(update_resource_spec, SPEC_REGIONS.items())
except AttributeError:
# Do it the long, slow way
for region, url in SPEC_REGIONS.items():
update_resource_spec(region, url)
def update_resource_spec(region, url):
""" Update a single resource spec """
filename = os.path.join(os.path.dirname(cfnlint.__file__), 'data/CloudSpecs/%s.json' % region)
multiprocessing_logger = multiprocessing.log_to_stderr()
multiprocessing_logger.debug('Downloading template %s into %s', url, filename)
# Check to see if we already have the latest version, and if so stop
if not url_has_newer_version(url):
return
spec_content = get_url_content(url, caching=True)
multiprocessing_logger.debug('A more recent version of %s was found, and will be downloaded to %s', url, filename)
spec = json.loads(spec_content)
# Patch the files
spec = patch_spec(spec, 'all')
spec = patch_spec(spec, region)
with open(filename, 'w') as f:
json.dump(spec, f, indent=2, sort_keys=True, separators=(',', ': '))
def update_documentation(rules):
"""Generate documentation"""
# Update the overview of all rules in the linter
filename = 'docs/rules.md'
# Sort rules by the Rule ID
sorted_rules = sorted(rules, key=lambda obj: obj.id)
data = []
# Read current file up to the Rules part, everything up to that point is
# static documentation.
with open(filename, 'r') as original_file:
line = original_file.readline()
while line:
data.append(line)
if line == '## Rules\n':
break
line = original_file.readline()
# Rebuild the file content
with open(filename, 'w') as new_file:
# Rewrite the static documentation
for line in data:
new_file.write(line)
# Add the rules
new_file.write(
'(_This documentation is generated by running `cfn-lint --update-documentation`, do not alter this manually_)\n\n')
new_file.write(
'The following **{}** rules are applied by this linter:\n\n'.format(len(sorted_rules) + 3))
new_file.write(
'| Rule ID | Title | Description | Config<br />(Name:Type:Default) | Source | Tags |\n')
new_file.write('| -------- | ----- | ----------- | ---------- | ------ | ---- |\n')
rule_output = '| {0}<a name="{0}"></a> | {1} | {2} | {3} | [Source]({4}) | {5} |\n'
# Add system Errors (hardcoded)
for error in [cfnlint.rules.ParseError(), cfnlint.rules.TransformError(), cfnlint.rules.RuleError()]:
tags = ','.join('`{0}`'.format(tag) for tag in error.tags)
new_file.write(rule_output.format(error.id, error.shortdesc, error.description, '', '', tags))
# Separate the experimental rules
experimental_rules = []
for rule in sorted_rules:
if rule.experimental:
experimental_rules.append(rule)
continue
tags = ','.join('`{0}`'.format(tag) for tag in rule.tags)
config = '<br />'.join('{0}:{1}:{2}'.format(key, values.get('type'), values.get('default'))
for key, values in rule.config_definition.items())
new_file.write(rule_output.format(rule.id, rule.shortdesc,
rule.description, config, rule.source_url, tags))
# Output the experimental rules (if any)
if experimental_rules:
new_file.write('### Experimental rules\n')
new_file.write('| Rule ID | Title | Description | Source | Tags |\n')
new_file.write('| -------- | ----- | ----------- | ------ | ---- |\n')
for rule in experimental_rules:
tags = ','.join('`{0}`'.format(tag) for tag in rule.tags)
config = '<br />'.join('{0}:{1}:{2}'.format(key, values.get('type'), values.get('default'))
for key, values in rule.config_definition.items())
new_file.write(rule_output.format(rule.id, rule.shortdesc,
rule.description, config, rule.source_url, tags))
def patch_spec(content, region):
"""Patch the spec file"""
LOGGER.info('Patching spec file for region "%s"', region)
append_dir = os.path.join(os.path.dirname(__file__), 'data', 'ExtendedSpecs', region)
for dirpath, _, filenames in os.walk(append_dir):
filenames.sort()
for filename in fnmatch.filter(filenames, '*.json'):
file_path = os.path.basename(filename)
module = dirpath.replace('%s' % append_dir, '%s' % region).replace(os.path.sep, '.')
LOGGER.info('Processing %s/%s', module, file_path)
all_patches = jsonpatch.JsonPatch(cfnlint.helpers.load_resource(
'cfnlint.data.ExtendedSpecs.{}'.format(module), file_path))
# Process the generic patches 1 by 1 so we can "ignore" failed ones
for all_patch in all_patches:
try:
jsonpatch.JsonPatch([all_patch]).apply(content, in_place=True)
except jsonpatch.JsonPatchConflict:
LOGGER.debug('Patch (%s) not applied in region %s', all_patch, region)
except jsonpointer.JsonPointerException:
# Debug as the parent element isn't supported in the region
LOGGER.debug('Parent element not found for patch (%s) in region %s',
all_patch, region)
return content
def update_iam_policies():
"""update iam policies file"""
url = 'https://awspolicygen.s3.amazonaws.com/js/policies.js'
filename = os.path.join(
os.path.dirname(cfnlint.data.AdditionalSpecs.__file__),
'Policies.json')
LOGGER.debug('Downloading policies %s into %s', url, filename)
content = get_url_content(url)
content = content.split('app.PolicyEditorConfig=')[1]
content = json.loads(content)
content['serviceMap']['Manage Amazon API Gateway']['Actions'].extend(
['HEAD', 'OPTIONS']
)
content['serviceMap']['Amazon Kinesis Video Streams']['Actions'].append(
'StartStreamEncryption'
)
with open(filename, 'w') as f:
json.dump(content, f, indent=2, sort_keys=True, separators=(',', ': '))