This repository has been archived by the owner on Mar 13, 2020. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 26
/
migrate.py
executable file
·2554 lines (2029 loc) · 101 KB
/
migrate.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python
# also dynamically imports ansible in code
import argparse
import configparser
import contextlib
import functools
import glob
import importlib.util
import itertools
import logging
import os
import re
import shutil
import subprocess
import sys
import textwrap
import yaml
from collections import defaultdict, Counter
from collections.abc import Mapping
from copy import deepcopy
from string import Template
from typing import Any, Dict, Iterable, Set, Union
from ansible import constants as C
from ansible.errors import AnsibleParserError
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.utils.collection_loader import AnsibleCollectionLoader
from ansible.vars.reserved import is_reserved_name
import logzero
from logzero import logger
from baron.parser import ParsingError
import redbaron
import backoff
from gh import GitHubOrgClient
from rsa_utils import RSAKey
from template_utils import render_template_into
# CONSTANTS/SETTINGS
# https://github.com/ansible/ansible/blob/100fe52860f45238ee8ca9e3019d1129ad043c68/hacking/fix_test_syntax.py#L62
FILTER_RE = re.compile(r'((.+?)\s*([\w \.\'"]+)(\s*)\|(\s*)(\w+))')
TEST_RE = re.compile(r'((.+?)\s*([\w \.\'"]+)(\s*)is(\s*)(\w+))')
DEFAULT_VERSION = '0.1.0'
DEVEL_URL = 'https://github.com/ansible/ansible.git'
DEVEL_BRANCH = 'devel'
ALL_THE_FILES = set()
CLEANUP_FILES = set(['contrib/README.md'])
COLLECTION_NAMESPACE = 'test_migrate_ns'
PLUGIN_EXCEPTION_PATHS = {'modules': 'lib/ansible/modules', 'module_utils': 'lib/ansible/module_utils', 'inventory_scripts': 'contrib/inventory', 'vault': 'contrib/vault', 'unit': 'test/units', 'integration': 'test/integration/targets'}
PLUGIN_DEST_EXCEPTION_PATHS = {'inventory_scripts': 'scripts/inventory', 'vault': 'scripts/vault', 'unit': 'tests/unit', 'integration': 'tests/integration/targets'}
COLLECTION_SKIP_REWRITE = ('_core',)
RAW_STR_TMPL = "r'''{str_val}'''"
STR_TMPL = "'''{str_val}'''"
BAD_EXT = frozenset({'.pyo', '.pyc'})
VALID_SPEC_ENTRIES = frozenset({
'action',
'become',
'cache',
'callback',
'cliconf',
'connection',
'doc_fragments',
'filter',
'httpapi',
'integration',
'inventory',
'lookup',
'module_utils',
'modules',
'netconf',
'shell',
'strategy',
'terminal',
'test',
'unit',
'vars',
'vault',
'inventory_scripts',
})
NOT_PLUGINS = frozenset(set(['inventory_scripts', 'unit', 'integration', 'vault']))
ALWAYS_PRESERVE_SUBDIRS = frozenset(['module_utils', 'unit', 'integration'])
VARNAMES_TO_PLUGIN_MAP = {
'ansible_become_method': 'become',
'ansible_connection': 'connection',
'ansible_shell_type': 'shell',
}
KEYWORDS_TO_PLUGIN_MAP = {
'become_method': 'become',
'cache_plugin': 'cache',
'connection': 'connection',
'plugin': 'inventory',
'strategy': 'strategy',
}
REWRITE_CLASS_PROPERTY_MAP = {
'BecomeModule': 'name',
'CallbackModule': 'CALLBACK_NAME',
'Connection': 'transport',
'InventoryModule': 'NAME',
}
REWRITE_CLASS_PROPERTY_PLUGINS = {
'become',
'callback',
'connection',
'inventory',
}
VARDIR = os.environ.get('GRAVITY_VAR_DIR', '.cache')
LOGFILE = os.path.join(VARDIR, 'errors.log')
ALIAS = {}
DEPRECATE = {}
REMOVE = defaultdict(lambda: defaultdict(set))
core = {}
manual_check = defaultdict(list)
### CLASSES
class UnmovablePathStr(str): ...
### FUNCTION DEFS
def log_subprocess_failure(func):
def func_wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except subprocess.CalledProcessError as proc_err:
proc_id = hex(id(proc_err))
logger.error(
'[%s] Running "%s" failed with return code %s',
proc_id,
proc_err.cmd,
proc_err.returncode,
)
logger.error('[%s] stderr:', proc_id)
logger.error(proc_err.stderr)
logger.error('[%s] stdout:', proc_id)
logger.error(proc_err.stdout)
raise
return func_wrapper
def _is_unexpected_error(proc_err):
proc_id = hex(id(proc_err))
err_out = proc_err.stderr
std_out = proc_err.stdout
legit_errors = {
'ERROR: Permission to ',
' denied to deploy key',
'ERROR: Unknown public SSH key.',
'fatal: Could not read from remote repository.',
}
should_retry = any(msg in err_out for msg in legit_errors)
logger.info(
'[%s] Expected failure: should retry' if should_retry
else '[%s] Unexpected failure: should fail loudly',
proc_id,
)
logger.error('[%s] stderr:', proc_id)
logger.error(err_out)
logger.error('[%s] stdout:', proc_id)
logger.error(std_out)
return not should_retry
retry_on_permission_denied = backoff.on_exception( # pylint: disable=invalid-name
backoff.expo, subprocess.CalledProcessError,
max_tries=8, max_time=15, jitter=backoff.full_jitter,
giveup=_is_unexpected_error,
)
@log_subprocess_failure
@retry_on_permission_denied
def ensure_cmd_succeeded(ssh_agent, cmd, cwd):
"""Perform cmd in cwd dir using a subprocess wrapper."""
logger.info('Executing "%s"...', ' '.join(cmd))
cmd_out = ssh_agent.check_output(cmd, stderr=subprocess.PIPE, cwd=cwd, text=True)
print(cmd_out)
def add_core(ptype, name):
global core
if ptype not in core:
core[ptype] = set()
core[ptype].add(name)
def add_manual_check(key, value, filename):
global manual_check
manual_check[filename].append((key, value))
def checkout_repo(git_url: str, checkout_path: str, *, refresh: bool = False) -> Set[str]:
"""Fetch and optionally refresh the repo."""
if not os.path.exists(checkout_path):
git_clone_cmd = 'git', 'clone', git_url, checkout_path
logger.info('Running "%s"', git_clone_cmd)
subprocess.check_call(git_clone_cmd)
target_ref = DEVEL_BRANCH if refresh is True else refresh
if refresh:
subprocess.check_call(('git', 'fetch', 'origin'), cwd=checkout_path)
subprocess.check_call(('git', 'checkout', DEVEL_BRANCH), cwd=checkout_path)
subprocess.check_call(('git', 'pull', '--rebase'), cwd=checkout_path)
if target_ref:
logger.info('Ensuring that "%s" is checked out', target_ref)
git_checkout_cmd = 'git', 'checkout', target_ref
logger.info('Running "%s"', git_checkout_cmd)
subprocess.check_call(git_checkout_cmd, cwd=checkout_path)
else:
logger.info('Skipping refreshing the cached Core')
return set(
f.strip()
for f in subprocess.check_output(
('git', '-c', 'core.quotepath=false', 'ls-tree', '--full-tree', '-r', '--name-only', 'HEAD'),
text=True, cwd=checkout_path,
).split('\n')
if f.strip()
)
### FILE utils
def alias(namespace, collection, ptype, plugin, source):
global ALIAS
if not namespace in ALIAS:
ALIAS[namespace] = {}
if not collection in ALIAS[namespace]:
ALIAS[namespace][collection] = {}
if ptype not in ALIAS[namespace][collection]:
ALIAS[namespace][collection][ptype] = {}
source_namespace, source_collection= get_plugin_collection(source, ptype, spec)
ALIAS[namespace][collection][ptype][plugin]= {'redirect' : get_plugin_fqcn(source_namespace, source_collection, source)}
def deprecate(namespace, collection, ptype, plugin):
global DEPRECATE
if not namespace in DEPRECATE:
DEPRECATE[namespace] = {}
if not collection in DEPRECATE[namespace]:
DEPRECATE[namespace][collection] = {}
if not ptype in DEPRECATE[namespace][collection]:
DEPRECATE[namespace][collection][ptype] = []
DEPRECATE[namespace][collection][ptype].append(plugin)
def remove(path, namespace, collection):
global REMOVE
if path == '.cache/releases/devel.git/test/units/plugins/connection/__init__.py':
raise Exception
REMOVE[namespace][collection].add(path)
def write_core_routing(resolved, checkout_dir):
routing_file = os.path.join('lib/ansible/config', 'routing.yml')
write_yaml_into_file_as_is(os.path.join(checkout_dir, routing_file), {'plugin_routing': resolved})
subprocess.check_call(('git', 'add', routing_file), cwd=checkout_dir)
subprocess.check_call(('git', 'commit', '-m', 'Added routing file', '--allow-empty'), cwd=checkout_dir)
def write_collection_routing(coll_dir, namespace, collection):
global ALIAS, DEPRECATE
write = False
routing = {'plugin_routing': {}}
meta = os.path.join(coll_dir, namespace, collection, 'meta')
if namespace in ALIAS:
if collection in ALIAS[namespace]:
for ptype in ALIAS[namespace][collection].keys():
if ptype not in routing['plugin_routing'][pytype]:
routing['plugin_routing'][ptype] = {}
for plugin in ALIAS[namespace][collection][ptype].keys():
if not write:
write = True
routing['plugin_routing'][ptype][plugin] = {'redirect: ', ALIAS[namespace][collection][ptype][plugin]}
if namespace in DEPRECATE:
if collection in DEPRECATE[namespace]:
for ptype in DEPRECATE[namespace][collection].keys():
if ptype not in routing['plugin_routing']:
routing['plugin_routing'][ptype] = {}
for plugin in DEPRECATE[namespace][collection][ptype]:
if not write:
write = True
if plugin not in routing['plugin_routing'][ptype]:
routing['plugin_routing'][ptype][plugin] = {}
routing['plugin_routing'][ptype][plugin].update({'deprecation': {'removal_date': 'TBD', 'warning_text': 'see plugin documentation for details'}})
if write:
if not os.path.exists(meta):
os.mkdir(meta)
route_file = os.path.join(meta, 'routing.yml')
write_yaml_into_file_as_is(route_file, routing)
def cleanup_targets(checkout_path):
dep_targets = {}
targets_dir = os.path.join(checkout_path, 'test/integration/targets/')
for target in os.listdir(targets_dir):
target_dir = os.path.join(targets_dir, target)
if not os.path.isdir(target_dir):
continue
target_relpath = os.path.relpath(target_dir, checkout_path)
aliases_file = os.path.join(target_dir, 'aliases')
if os.path.basename(target_dir).startswith(('setup_', 'prepare_')):
if target_relpath not in dep_targets:
dep_targets[target_relpath] = set()
elif os.path.exists(aliases_file):
for line in read_text_from_file(aliases_file).splitlines():
if line.strip() == 'hidden':
if target_relpath not in dep_targets:
dep_targets[target_relpath] = set()
break
for target in os.listdir(targets_dir):
target_dir = os.path.join(targets_dir, target)
if not os.path.isdir(target_dir):
continue
target_relpath = os.path.relpath(target_dir, checkout_path)
deps = process_integration_tests_deps(checkout_path, os.path.join(targets_dir, target), log=False)
for dep in deps:
dep_relpath = os.path.relpath(dep[0], checkout_path)
if dep_relpath in dep_targets:
dep_targets[dep_relpath].add(target_relpath)
to_remove = set()
while True:
added = False
for target, used_by in dep_targets.items():
if not used_by or used_by.issubset(to_remove):
if target not in to_remove:
to_remove.add(target)
added = True
if not added:
break
if to_remove:
subprocess.check_call(('git', 'rm', '-r', '-f', *to_remove), cwd=checkout_path)
def actually_remove(checkout_path):
global REMOVE
coll_paths = {
f'{namespace}.{collection}': paths
for namespace, coll_map in REMOVE.items()
for collection, paths in coll_map.items()
}
paths_counter = Counter(itertools.chain.from_iterable(coll_paths.values()))
for coll_fqdn, paths in coll_paths.items():
actually_remove_from(coll_fqdn, paths, paths_counter, checkout_path)
# cleanup integration tests targets
cleanup_targets(checkout_path)
# other cleanup
if CLEANUP_FILES:
subprocess.check_call(('git', 'rm', '-f', *CLEANUP_FILES), cwd=checkout_path)
# cleanup empty single __init__.py
for plugin_type in VALID_SPEC_ENTRIES:
mod_root = PLUGIN_EXCEPTION_PATHS.get(plugin_type, os.path.join('lib', 'ansible', 'plugins', plugin_type))
removed = []
for emptydir in os.walk(os.path.join(checkout_path, mod_root), topdown=False):
if '__init__.py' in emptydir[2] and len(emptydir[2]) == 1:
# os.walk caches top level entries, check if we already removed these subdirs
dirnum = len(emptydir[1])
if dirnum != 0:
for other in emptydir[1]:
full = os.path.join(emptydir[0], other)
if full in removed:
dirnum -= 1
# if only init in dir and init is 0 bytes, remove
if dirnum <= 0 and os.stat(os.path.join(emptydir[0], '__init__.py')).st_size == 0:
reldir = emptydir[0].replace(checkout_path, '')
init_file = os.path.join(reldir.lstrip('/'), '__init__.py')
# remove init from repo
subprocess.check_call(('git', 'rm', init_file), cwd=checkout_path)
removed.append(emptydir[0])
subprocess.check_call(('git', 'commit', '-m', f'migration final cleanup', '--allow-empty'), cwd=checkout_path)
def actually_remove_from(coll_fqdn, paths, paths_counter, checkout_path):
# load sanity/ignore.txt, the files being removed below need to be removed from ignore.txt too
sanity_ignore = read_lines_from_file(os.path.join(checkout_path, 'test/sanity/ignore.txt'))
new_sanity_ignore = defaultdict(list)
for ignore in sanity_ignore:
values = ignore.split(' ', 1)
new_sanity_ignore[values[0]].append(values[1])
paths_to_delete = set()
# actually remove files we marked for removal
for path in paths:
actual_devel_path = os.path.relpath(path, checkout_path)
paths_counter[path] -= 1
if paths_counter[path] == 0:
paths_to_delete.add(actual_devel_path)
new_sanity_ignore.pop(actual_devel_path, None)
subprocess.check_call(('git', 'rm', '-f', *paths_to_delete), cwd=checkout_path)
# save modified sanity/ignore.txt
res = ''
for filename, values in new_sanity_ignore.items():
for value in values:
# value contains '\n' which is preserved from the original file
res += '%s %s' % (filename, value)
write_text_into_file(os.path.join(checkout_path, 'test/sanity/ignore.txt'), res)
subprocess.check_call(('git', 'add', 'test/sanity/ignore.txt'), cwd=checkout_path)
# commit the changes
subprocess.check_call(('git', 'commit', '-m', f'Migrated to {coll_fqdn}', '--allow-empty'), cwd=checkout_path)
def read_yaml_file(path):
with open(path, 'rb') as yaml_file:
return yaml.safe_load(yaml_file)
def read_ansible_yaml_file(path):
with open(path, 'rb') as yaml_file:
return AnsibleLoader(yaml_file.read(), file_name=path).get_single_data()
def write_yaml_into_file_as_is(path, data):
yaml_text = yaml.dump(data, allow_unicode=True, default_flow_style=False, sort_keys=False, width=1024)
write_text_into_file(path, yaml_text)
def write_ansible_yaml_into_file_as_is(path, data):
yaml_text = yaml.dump(data, Dumper=AnsibleDumper, allow_unicode=True, default_flow_style=False, sort_keys=False, width=1024)
write_text_into_file(path, yaml_text)
def read_text_from_file(path):
with open(path, 'r') as f:
return f.read()
def read_lines_from_file(path):
with open(path, 'r') as f:
return f.readlines()
def write_text_into_file(path, text):
with open(path, 'w') as f:
return f.write(text)
@contextlib.contextmanager
def working_directory(target_dir):
"""Temporary change dir to the target and change back on exit."""
current_working_dir = os.getcwd()
os.chdir(target_dir)
try:
yield os.getcwd()
finally:
os.chdir(current_working_dir)
### SPEC UTILS
def load_spec_file(spec_file):
spec = read_yaml_file(spec_file) # TODO: capture yamlerror?
if not isinstance(spec, Mapping):
sys.exit("Invalid format for spec file, expected a dictionary and got %s" % type(spec))
elif not spec:
sys.exit("Cannot use spec file, ended up with empty spec")
return spec
def resolve_spec(spec, checkoutdir):
# TODO: add negation? entry: x/* \n entry: !x/base.py
files_to_collections = defaultdict(list)
for ns in spec.keys():
for coll in spec[ns].keys():
for ptype in spec[ns][coll].keys():
if ptype == '_options':
# not a valid module type, but hijacked to put per colleciton options
continue
if ptype not in VALID_SPEC_ENTRIES:
raise Exception('Invalid plugin type: %s, expected one of %s' % (ptype, VALID_SPEC_ENTRIES))
plugin_base = os.path.join(checkoutdir, PLUGIN_EXCEPTION_PATHS.get(ptype, os.path.join('lib', 'ansible', 'plugins', ptype)))
replace_base = '%s/' % plugin_base
new_ptype = []
for entry in spec[ns][coll][ptype]:
if r'*' in entry or r'?' in entry:
files = glob.glob(os.path.join(plugin_base, entry))
if not files:
raise Exception('No matches for plugin type: %s, entry: %s. Searched in %s.' % (ptype, entry, os.path.join(plugin_base, entry)))
for fname in files:
if ptype not in NOT_PLUGINS and fname.endswith('__init__.py') or not os.path.isfile(fname):
continue
fname = fname.replace(replace_base, '')
new_ptype.append(fname)
else:
new_ptype.append(entry)
spec[ns][coll][ptype] = new_ptype
# NOTE now that spec for plugins of ptype has been finalized, we can iterate again and add files for the dupe check
for entry in spec[ns][coll][ptype]:
files_to_collections[os.path.join(plugin_base, entry)].append(coll)
def dir_to_path(path):
if not (os.path.isdir(path) and os.path.exists(path)):
return path
return next(
subpath
for subpath in glob.glob(os.path.join(path, '**'), recursive=True)
if not os.path.isdir(subpath)
)
logger.info(
'Verifying that all %s '
'that are scheduled for migration '
'to %s.%s exist...',
ptype, ns, coll,
)
assert_migrating_git_tracked_resources(
os.path.relpath(dir_to_path(p_abs), checkoutdir)
for p in spec[ns][coll][ptype]
for p_abs in (
glob.glob(os.path.join(plugin_base, p))
or [os.path.join(plugin_base, p)]
)
)
logger.info(
'All %s entries for %s.%s '
'are valid',
ptype, ns, coll,
)
dupes = {k: v for k, v in files_to_collections.items() if len(v) > 1}
if dupes:
err_msg = 'Each plugin needs to be assigned to one collection only. The following files are assigned to multiple collections:\n' + yaml.dump(dupes)
logger.error(err_msg)
raise RuntimeError(err_msg)
### GET_PLUGINS UTILS
def get_plugin_collection(plugin_name, plugin_type, spec):
for ns in spec.keys():
for collection in spec[ns].keys():
if spec[ns][collection]: # avoid empty collections
plugins = spec[ns][collection].get(plugin_type, [])
if plugin_name + '.py' in plugins:
return ns, collection
# keep info
plugin_name = plugin_name.replace('/', '.')
logger.debug('Assuming "%s.%s" stays in core', plugin_type, plugin_name)
add_core(plugin_type, plugin_name.replace('/', '.'))
raise LookupError('Could not find "%s" named "%s" in any collection in the spec' % (plugin_type, plugin_name))
def get_plugins_from_collection(ns, collection, plugin_type, spec):
assert ns in spec
assert collection in spec[ns]
return [plugin.rsplit('/')[-1][:-3] for plugin in spec[ns][collection].get(plugin_type, [])]
def get_plugin_fqcn(namespace, collection, plugin_name):
# this is how we assemble FQCN before doing rewrites, so just before rewriting
# strip leading '_' from a collection name if present since it is not a part of the collection name
# but an internal identification of 'info collection only'
return '%s.%s.%s' % (namespace, collection.lstrip('_'), plugin_name)
def get_rewritable_collections(namespace, spec):
return (collection for collection in spec[namespace].keys() if collection not in COLLECTION_SKIP_REWRITE)
### REWRITE FUNCTIONS
def rewrite_class_property(mod_fst, collection, namespace, filename):
if all(f'plugins/{p}' not in filename for p in REWRITE_CLASS_PROPERTY_PLUGINS):
return
for class_name, property_name in REWRITE_CLASS_PROPERTY_MAP.items():
try:
val = (
mod_fst.
find('class', name=class_name).
find('name', value=property_name).parent.value
)
except AttributeError:
continue
try:
val.value = "'%s'" % get_plugin_fqcn(namespace, collection, val.to_python())
except ValueError:
# so this might be something like:
# transport = CONNECTION_TRANSPORT
add_manual_check(property_name, val.value, filename)
def normalize_implicit_relative_imports_in_unit_tests(mod_fst, file_path):
"""Locate implicit imports and prepend them with dot."""
cur_pkg_dir = os.path.dirname(file_path)
make_pkg_subpath = functools.partial(os.path.join, cur_pkg_dir)
for imp in mod_fst.find_all(('from_import', )):
if not imp.value: # from . import something
continue
*pkg_path_parts, pkg_or_mod = tuple(t.value for t in imp.value)
if ((pkg_path_parts and not pkg_path_parts[0]) or (not pkg_path_parts and pkg_or_mod == '__future__')): # import is already absolute
continue
relative_mod_path = make_pkg_subpath(*pkg_path_parts, f'{pkg_or_mod}.py')
if relative_mod_path == file_path: # self-import? nope! def other mod
continue
relative_pkg_init_path = make_pkg_subpath(*pkg_path_parts, pkg_or_mod, '__init__.py')
possible_relative_targets = {relative_mod_path, relative_pkg_init_path}
relative_imp_target_exists = any(os.path.exists(p) for p in possible_relative_targets)
if not relative_imp_target_exists:
continue
# turn implicit relative import into an explicit absolute import
# that is relative to the current module
imp.value = f'.{imp.value.dumps()!s}'
def rewrite_unit_tests_patch(mod_fst, collection, spec, namespace, args, options):
plugins_path = ('ansible_collections', namespace, collection, 'plugins')
tests_path = ('ansible_collections', namespace, collection, 'tests')
unit_tests_path = tests_path + ('unit', )
import_map = {
('ansible', 'modules'): plugins_path + ('modules', ),
('ansible', 'module_utils'): plugins_path + ('module_utils', ),
('ansible', 'plugins'): plugins_path,
('units', ): unit_tests_path,
}
patches = (
mod_fst('string',
lambda x:
'ansible.modules' in x.dumps() or
'ansible.module_utils' in x.dumps() or
'ansible.plugins' in x.dumps() or
'units' in x.dumps()
)
)
deps = []
for el in patches:
val = el.to_python().split('.')
for old, new in import_map.items():
token_length = len(old)
if tuple(val[:token_length]) != old:
continue
if val[0] == 'units':
val[:token_length] = new
el.value = "'%s'" % '.'.join(val)
continue
elif val[1] in ('modules', 'module_utils'):
plugin_type = val[1]
# 'ansible.modules.storage.netapp.na_ontap_nvme.NetAppONTAPNVMe.create_nvme'
# look for module name
for i in (len(val), -1, -2):
plugin_name = '/'.join(val[2:i])
try:
found_ns, found_coll = get_plugin_collection(plugin_name, plugin_type, spec)
break
except LookupError:
continue
else:
continue
elif val[1] == 'plugins':
# 'ansible.plugins.lookup.manifold.open_url'
try:
plugin_type = val[2]
plugin_name = val[3]
except IndexError:
# Not enough information to search for the plugin, safe to assume it's not for the rewrite
# e.g. 'ansible.plugins.inventory'
continue
try:
found_ns, found_coll = get_plugin_collection(plugin_name, plugin_type, spec)
except LookupError:
continue
else:
continue
if found_coll in COLLECTION_SKIP_REWRITE:
continue
if args.fail_on_core_rewrite:
raise RuntimeError('Rewriting to %s' % '.'.join(val))
val[:token_length] = new
if plugin_type == 'modules' and not (args.preserve_module_subdirs or options.get('flatmap')):
plugin_subdirs_len = len(plugin_name.split('/')[:-1])
new_len = len(new)
del val[new_len:new_len+plugin_subdirs_len]
if (found_ns, found_coll) != (namespace, collection):
val[1] = found_ns
val[2] = found_coll
deps.append((found_ns, found_coll))
el.value = "'%s'" % '.'.join(val)
return deps
def rewrite_docs_fragments(docs, collection, spec, namespace, args):
old_fragments = docs.get('extends_documentation_fragment', [])
if not old_fragments:
return [], [], []
if not isinstance(old_fragments, list):
old_fragments = [old_fragments]
deps = []
new_fragments = []
for fragment in old_fragments:
# some doc_fragments use subsections (e.g. vmware.vcenter_documentation)
fragment_name, _dot, _rest = fragment.partition('.')
try:
fragment_namespace, fragment_collection = get_plugin_collection(fragment_name, 'doc_fragments', spec)
except LookupError:
# plugin not in spec, assuming it stays in core and leaving as is
new_fragments.append(fragment)
continue
if fragment_collection in COLLECTION_SKIP_REWRITE:
# skip rewrite
new_fragments.append(fragment)
continue
if fragment_collection.startswith('_'):
fragment_collection = fragment_collection[1:]
new_fragment = get_plugin_fqcn(fragment_namespace, fragment_collection, fragment)
if args.fail_on_core_rewrite:
raise RuntimeError('Rewriting to %s' % new_fragment)
new_fragments.append(new_fragment)
if (namespace, collection) != (fragment_namespace, fragment_collection):
deps.append((fragment_namespace, fragment_collection))
return deps, old_fragments, new_fragments
def rewrite_plugin_documentation(mod_fst, collection, spec, namespace, args):
try:
doc_val = (
mod_fst.
find_all('assignment').
find('name', value='DOCUMENTATION').
parent.
value
)
except AttributeError:
raise LookupError('No DOCUMENTATION found')
docs_parsed_dict = yaml.safe_load(doc_val.to_python().strip('\n'))
docs_parsed_list = doc_val.to_python().split('\n')
# docs fragments prep
deps, old_fragments, new_fragments = rewrite_docs_fragments(docs_parsed_dict, collection, spec, namespace, args)
# version_added prep
options = docs_parsed_dict.get('options', {})
if not isinstance(options, Mapping):
# lib/ansible/plugins/doc_fragments/emc.py
options = {}
option_name_empty = []
for name, data in options.items():
if len(data.keys()) == 1 and data.get('version_added', False):
option_name_empty.append(name)
# seealso prep
seealso_rewrite_map = {}
for seealso in docs_parsed_dict.get('seealso', []):
module_name = seealso.get('module')
if not module_name:
continue
try:
for ns in spec.keys():
for coll in get_rewritable_collections(ns, spec):
if collection == coll:
# https://github.com/ansible-community/collection_migration/issues/156
continue
if module_name not in get_plugins_from_collection(ns, coll, 'modules', spec):
continue
new_module_name = get_plugin_fqcn(ns, coll, module_name)
msg = 'Rewriting to %s' % new_module_name
if args.fail_on_core_rewrite:
raise RuntimeError(msg)
seealso_rewrite_map[module_name] = new_module_name
except LookupError:
continue
# https://github.com/ansible-community/collection_migration/issues/81
# unfortunately, with PyYAML, the resulting DOCUMENTATION ended up in syntax errors when running sanity tests
# to prevent that, use the original string split into list for rewrites
new_docs = []
in_extends = False
in_seealso = False
seealso_indent = -1
changed = False
for line in docs_parsed_list:
# https://github.com/ansible-community/collection_migration/issues/431
if any(line.strip() == ('%s:' % option_name) for option_name in option_name_empty):
continue
# remove version_added, it does not apply to collection in its current state
if 'version_added' in line:
changed = True
continue
# extends_documentation_fragment rewrite
if 'extends_documentation_fragment' in line:
# rewrite fragments
if new_fragments and bool(set(old_fragments).difference(new_fragments)):
indent = ' ' * (len(line) - len(line.lstrip()))
new_docs.append(line.split(':')[0] + ':')
for new_fragment in new_fragments:
new_docs.append('%s- %s' % (indent, new_fragment))
new_docs.append('')
in_extends = True
changed = True
continue
if in_extends and '-' in line:
continue
else:
in_extends = False
# seealso rewrite
if 'seealso:' in line and seealso_rewrite_map:
seealso_indent = len(line) - len(line.lstrip())
in_seealso = True
else:
indent = len(line) - len(line.lstrip())
if not in_seealso:
pass
elif seealso_indent == indent and not line.strip().startswith('-') and line.strip().endswith(':'):
in_seealso = False
elif in_seealso and 'module:' in line:
module_name = line.split(':')[-1].strip()
new_module_name = seealso_rewrite_map.get(module_name)
if new_module_name:
new_line = line.replace(module_name, new_module_name)
new_docs.append(new_line)
continue
new_docs.append(line)
if not changed:
return []
doc_str_tmpl = RAW_STR_TMPL if doc_val.type == 'raw_string' else STR_TMPL
# `doc_val` holds a baron representation of the string node
# of type 'string' or 'raw_string'. Updating its `.value`
# via assigning the new one replaces the node in FST.
# Also, in order to generate a string or raw-string literal,
# we need to wrap it with a corresponding pair of quotes.
# If we don't do this, we'd generate the following Python code
# ```
# DOCUMENTATION = some string value
# ```
# instead of the correct
# ```
# DOCUMENTATION = r'''some string value'''
# ```
# or
# ```
# DOCUMENTATION = '''some string value'''
# ```
doc_val.value = doc_str_tmpl.format(str_val='\n'.join(new_docs))
return deps
def rewrite_imports(mod_fst, collection, spec, namespace, args, options):
"""Rewrite imports map."""
plugins_path = ('ansible_collections', namespace, collection, 'plugins')
tests_path = ('ansible_collections', namespace, collection, 'tests')
unit_tests_path = tests_path + ('unit', )
import_map = {
('ansible', 'modules'): plugins_path + ('modules', ),
('ansible', 'module_utils'): plugins_path + ('module_utils', ),
('ansible', 'plugins'): plugins_path,
('units', ): unit_tests_path,
}
return rewrite_imports_in_fst(mod_fst, import_map, collection, spec, namespace, args, options)
def match_import_src(imp_src, import_map):
"""Find a replacement map entry matching the current import."""
try:
imp_src_tuple = tuple(t.value for t in imp_src)
except AttributeError as e:
# AttributeError("EllipsisNode instance has no attribute 'value' and 'value' is not a valid identifier of another node")
# lib/ansible/modules/system/setup.py:
# from ...module_utils.basic import AnsibleModule
logger.exception(e)
raise LookupError
for old_imp, new_imp in import_map.items():
token_length = len(old_imp)
if imp_src_tuple[:token_length] != old_imp:
continue
return token_length, new_imp
raise LookupError(f"Couldn't find a replacement for {imp_src!s}")
def rewrite_imports_in_fst(mod_fst, import_map, collection, spec, namespace, args, options):
"""Replace imports in the python module FST."""
deps = []
for imp in mod_fst.find_all(('import', 'from_import')):
imp_src = imp.value
if imp.type == 'import':
imp_src = imp_src[0].value
try:
token_length, exchange = match_import_src(imp_src, import_map)
except LookupError:
continue # no matching imports