-
-
Notifications
You must be signed in to change notification settings - Fork 3
/
entrypoint.py
executable file
·305 lines (263 loc) · 10 KB
/
entrypoint.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
#!/usr/bin/python3
# pylint: disable=C0103
# pylint: disable=C0114
import json
import os
import os.path
import shutil
import sys
from contextlib import contextmanager
from tempfile import NamedTemporaryFile, mkstemp
from git.exc import GitCommandError
import git
import in_place
import yaml
# Output strings for coloring
ENDC = '\033[0m'
ERROR = '\033[31m'
INFO = '\033[34m'
NOTICE = '\033[33m'
if 'DEPLOY_KEY' in os.environ:
deploy_key = os.environ['DEPLOY_KEY']
token = None
elif 'RELEASE_TOKEN' in os.environ:
deploy_key = None
token = os.environ['RELEASE_TOKEN']
else:
print(ERROR + "Either RELEASE_TOKEN or DEPLOY_KEY needs to be set in env. "
+ "Exiting." + ENDC)
sys.exit(1)
library_name = os.environ['INPUT_LIBRARY_NAME']
docs_build_dir = os.environ['INPUT_DOCS_BUILD_DIR']
#
# make the documentation
#
print(INFO + "Running 'make docs'." + ENDC)
rslt = os.system('make docs')
if rslt != 0:
print(ERROR + "'make docs' failed." + ENDC)
sys.exit(1)
mkdocs_yml_file = os.path.join(docs_build_dir, 'mkdocs.yml')
docs_dir = os.path.join(docs_build_dir, 'docs')
index_file = os.path.join(docs_dir, 'index.md')
source_dir = os.path.join(docs_dir, 'src')
#
# remove any docs that aren't part of this library
# store information about removed entries so we can fix up links to them later
#
print(INFO + "Removing 'other docs'." + ENDC)
removed_docs = []
for f in os.listdir(docs_dir):
if f in ('assets', 'src', 'index.md'):
continue
if not f.startswith(library_name + '-'):
p = os.path.join(docs_dir, f)
if os.path.isfile(p):
os.remove(p)
else:
shutil.rmtree(p)
removed_docs.append(f)
#
# remove any source code that isn't part of this library
#
print(INFO + "Removing 'other sources'." + ENDC)
for f in os.listdir(source_dir):
if f != library_name:
p = os.path.join(source_dir, f)
if os.path.isfile(p):
os.remove(p)
else:
shutil.rmtree(p)
#
# - trim mkdocs.yml down to entries for our library
# record those packages for later reference
#
print(INFO + "Trimming mkdocs.yml." + ENDC)
mkdocs_yml = {}
packages = []
with open(mkdocs_yml_file, encoding="utf8") as infile:
mkdocs_yml = yaml.load(infile, Loader=yaml.FullLoader)
nav = mkdocs_yml['nav']
new_nav = []
library_package_key = 'package ' + library_name
library_subpackage_key = 'package ' + library_name + '/'
for entry in nav:
# there's only 1 entry but we don't know what it is because
# well that's how the yaml package represents this thing should be a
# 2-element tuple or list
for k in entry.keys():
if k == library_name:
# library index entry. keep it.
new_nav.append(entry)
if k == library_package_key \
or k.startswith(library_subpackage_key):
# package entry. keep it.
# record the package name for later usage
new_nav.append(entry)
# entry will look like one of:
# package semver
# package semver/subpackage
packages.append(k[8:])
mkdocs_yml['nav'] = new_nav
# add a site url to fix some /asset links
# without this, the 404 page will be broken
mkdocs_yml['site_url'] = os.environ['INPUT_SITE_URL']
with open(mkdocs_yml_file, 'w', encoding="utf8") as outfile:
yaml.dump(mkdocs_yml, outfile)
#
# trim docs/index.md down to entries for our library
#
print(INFO + "Trimming index.md." + ENDC)
with in_place.InPlace(index_file) as fp:
for line in fp:
if not line.startswith('*'):
fp.write(line)
else:
for p in packages:
if line.startswith('* [' + p + ']'):
fp.write(line)
#
# `make docs` at the start will have pulled down any needed dependencies that
# we might have. Here we are going to reach into the _corral directory to find
# the `corral.json` for any dependencies and get:
# - the package names
# - the location of the documentation_url
#
# This should eventually be incorporated into `corral` as a command
# or something similar. In the meantime, we are doing "by hand" in this
# action as we work out how to accomplish everything that we want to.
#
# This could grab info about "extra" packages as there is on guarantee that a
# dependency that was removed isn't still in _corral directory assuming that
# this code was used outside of the context of this action that starts from a
# clean-slate. That's not an edge condition to worry about at this time.
#
# packages provided are listed in `corral.json` in an array with the key
# `packages`. Every package needs to be listed including those that are
# "subpackages" so for example, we have package listings for `semver`,
# `semver/constraint`, and `semver/version`.
#
# The documentation_url for a given package is located in the `info` object
# in the `documentation_url` field.
#
documentation_urls = {}
if os.path.isdir("_corral"):
dependencies_dirs = os.listdir("_corral")
for dd in dependencies_dirs:
corral_file = "/".join(["_corral", dd, "corral.json"])
if not os.path.isfile(corral_file):
print(NOTICE + "No corral.json in " + dd + "." + ENDC)
continue
with open(corral_file, 'r', encoding="utf8") as corral_fd:
corral_data = json.load(corral_fd)
bundle_documentation_url = ""
try:
bundle_documentation_url = corral_data['info']['documentation_url']
except KeyError as e:
print(NOTICE + "No documentation_url in " + corral_file + "." \
+ ENDC)
try:
packages = corral_data['packages']
for p in packages:
documentation_urls[p] = bundle_documentation_url
except KeyError as e:
print(NOTICE + "No packages in " + corral_file + "." \
+ ENDC)
#
# Go through the markdown belonging to our package and replace missing entries
# with links to their external sites.
#
print(INFO + "Fixing links to code outside of our package." + ENDC)
for f in os.listdir(docs_dir):
if f in ('assets', 'src'):
continue
p = os.path.join(docs_dir, f)
print(INFO + "Fixing links in " + str(p) + "." + ENDC)
with in_place.InPlace(p) as fp:
for line in fp:
for removed in removed_docs:
if removed in line:
print(INFO + "Replacing link for " + removed + "." + ENDC)
# get the package name
s = removed.replace('.md', '')
s = s.split('-')
if len(s) > 1:
del s[-1]
package_name = '/'.join(s)
# if unknown package, we'll use the standard library
external_url = documentation_urls.get(package_name, \
'https://stdlib.ponylang.io/')
# as the external url is input from users, it might not
# include a trailing slash. if not, generated urls will
# be broken.
# there's far more validation we could do here, but in
# terms of helping out a non-malicious user, this is the
# minimum
if not external_url.endswith('/'):
external_url += '/'
as_html = removed.replace('.md', '')
link = external_url + as_html + "/"
line = line.replace(removed, link)
fp.write(line)
#
# run mkdocs to actually build the content
#
print(INFO + "Setting up git configuration." + ENDC)
git = git.Repo().git
git.config('--global', 'user.name', os.environ['INPUT_GIT_USER_NAME'])
git.config('--global', 'user.email', os.environ['INPUT_GIT_USER_EMAIL'])
git.config('--global', '--add', 'safe.directory', os.environ['GITHUB_WORKSPACE'])
if deploy_key:
@contextmanager
def git_auth():
"""
Temporarily set SSH credentials for Git. To be used as context manager.
"""
(ssh_wrapper_fd, ssh_wrapper_path) = mkstemp(text=True)
try:
with NamedTemporaryFile() as identity_file:
with open(ssh_wrapper_fd, "w", encoding="utf8") as ssh_wrapper_file:
ssh_wrapper_file.write('#!/bin/sh\n')
ssh_wrapper_file.write(
f'exec ssh -o StrictHostKeyChecking=no '
f'-i {identity_file.name} $@')
os.chmod(ssh_wrapper_path, 0o500)
identity_file.write(deploy_key.encode('utf-8'))
if not deploy_key.endswith("\n"):
identity_file.write("\n")
identity_file.flush()
os.environ['GIT_SSH'] = ssh_wrapper_path
try:
yield
finally:
del os.environ['GIT_SSH']
finally:
os.unlink(ssh_wrapper_path)
remote = f'[email protected]:{os.environ["GITHUB_REPOSITORY"]}'
else:
@contextmanager
def git_auth():
"""
No-op context manager.
"""
yield
remote = f'https://{token}@github.com/{os.environ["GITHUB_REPOSITORY"]}'
git.remote('add', 'gh-token', remote)
with git_auth():
git.fetch('gh-token')
# reset will fail if 'generated-documentation` branch doesn't yet exist.
# That's fine, it will exist after our push. Just not the error and move on.
try:
git.reset('gh-token/generated-documentation')
except GitCommandError:
print(NOTICE + "Couldn't git reset generated-documentation." + ENDC)
print(NOTICE + "This error is expected if the branch doesn't exist yet."
+ ENDC)
print(INFO + "Running 'mkdocs gh-deploy'." + ENDC)
os.chdir(docs_build_dir)
rslt = os.system(
'mkdocs gh-deploy --verbose --clean --remote-name gh-token '
'--remote-branch generated-documentation')
if rslt != 0:
print(ERROR + "'mkdocs gh-deploy' failed." + ENDC)
sys.exit(1)