diff --git a/.gitignore b/.gitignore index f1e24e594e7..73927ba36c5 100644 --- a/.gitignore +++ b/.gitignore @@ -21,4 +21,6 @@ venv xcuserdata # Ignore rust_dev_preview as it's no longer part of the project -rust_dev_preview \ No newline at end of file +rust_dev_preview +# .snippets are created temporarily as build artifacts +.snippets diff --git a/.tools/validation/doc_gen.py b/.tools/validation/doc_gen.py index 851d333dd59..da71ea0ddf4 100644 --- a/.tools/validation/doc_gen.py +++ b/.tools/validation/doc_gen.py @@ -20,7 +20,7 @@ class DocGen: snippets: dict[str, Snippet] = field(default_factory=dict) @staticmethod - def from_root(root: Path) -> Self | MetadataErrors: + def from_root(root: Path) -> (Self, MetadataErrors): errors = MetadataErrors() with open(root / "sdks.yaml", encoding="utf-8") as file: @@ -30,12 +30,9 @@ def from_root(root: Path) -> Self | MetadataErrors: with open(root / "services.yaml", encoding="utf-8") as file: meta = yaml.safe_load(file) - parsed = parse_services("services.yaml", meta) - services = errors.maybe_extend(parsed) + services, service_errors = parse_services("services.yaml", meta) + errors.extend(service_errors) snippets = {} - if len(errors) > 0: - return errors - - return DocGen(sdks=sdks, services=services, snippets=snippets) + return DocGen(sdks=sdks, services=services, snippets=snippets), errors diff --git a/.tools/validation/metadata.py b/.tools/validation/metadata.py index af27c99d8c9..a7ff23525ce 100755 --- a/.tools/validation/metadata.py +++ b/.tools/validation/metadata.py @@ -62,7 +62,7 @@ class Version: more_info: list[Url] = field(default_factory=list) @staticmethod - def from_yaml(yaml: dict[str, any], doc_gen: DocGen) -> Self | MetadataParseError: + def from_yaml(yaml: dict[str, any], doc_gen: DocGen) -> (Self, MetadataParseError): errors = MetadataErrors() sdk_version = int(yaml.get("sdk_version", 0)) @@ -109,17 +109,17 @@ def from_yaml(yaml: dict[str, any], doc_gen: DocGen) -> Self | MetadataParseErro if add_services and block_content is not None: errors.append(metadata_errors.APIExampleCannotAddService()) - if len(errors) > 0: - return errors - - return Version( - sdk_version, - block_content, - excerpts, - github, - add_services, - sdkguide, - more_info, + return ( + Version( + sdk_version, + block_content, + excerpts, + github, + add_services, + sdkguide, + more_info, + ), + errors, ) @@ -141,18 +141,14 @@ def from_yaml(name: str, yaml: any, doc_gen: DocGen) -> Self | MetadataErrors: versions: list[Version] = [] for version in yaml_versions: - version = Version.from_yaml(version, doc_gen) - if isinstance(version, Version): - versions.append(version) - else: - for error in version: - error.language = name - errors.append(error) + version, version_errors = Version.from_yaml(version, doc_gen) + errors.extend(version_errors) + versions.append(version) - if len(errors) > 0: - return errors + for error in errors: + error.language = name - return Language(name, versions) + return Language(name, versions), errors @dataclass @@ -178,7 +174,7 @@ class Example: source_key: Optional[str] = field(default=None) @staticmethod - def from_yaml(yaml: any, doc_gen: DocGen) -> Self | MetadataErrors: + def from_yaml(yaml: any, doc_gen: DocGen) -> (Self, MetadataErrors): errors = MetadataErrors() title = get_with_valid_entities("title", yaml, errors) @@ -214,22 +210,22 @@ def from_yaml(yaml: any, doc_gen: DocGen) -> Self | MetadataErrors: except DuplicateItemException: pass - if len(errors) > 0: - return errors - - return Example( - id="", - file="", - title=title, - title_abbrev=title_abbrev, - category=category, - guide_topic=guide_topic, - languages=languages, - service_main=service_main, - services=services, - synopsis=synopsis, - synopsis_list=synopsis_list, - source_key=source_key, + return ( + Example( + id="", + file="", + title=title, + title_abbrev=title_abbrev, + category=category, + guide_topic=guide_topic, + languages=languages, + service_main=service_main, + services=services, + synopsis=synopsis, + synopsis_list=synopsis_list, + source_key=source_key, + ), + errors, ) @@ -284,24 +280,22 @@ def idFormat(id: str, doc_gen: DocGen) -> bool: def parse( file: str, yaml: dict[str, any], doc_gen: DocGen -) -> list[Example] | MetadataErrors: +) -> (list[Example], MetadataErrors): examples: list[Example] = [] errors = MetadataErrors() for id in yaml: if not idFormat(id, doc_gen): errors.append(metadata_errors.NameFormat(file=file, id=id)) - example = Example.from_yaml(yaml[id], doc_gen) - if isinstance(example, Example): - example.file = file - example.id = id - examples.append(example) - else: - for error in example: - error.file = file - error.id = id - errors.append(error) - - return examples if len(errors) == 0 else errors + example, example_errors = Example.from_yaml(yaml[id], doc_gen) + for error in example_errors: + error.file = file + error.id = id + errors.extend(example_errors) + example.file = file + example.id = id + examples.append(example) + + return examples, errors if __name__ == "__main__": diff --git a/.tools/validation/metadata_test.py b/.tools/validation/metadata_test.py index 41b326b8454..8572f2957ef 100644 --- a/.tools/validation/metadata_test.py +++ b/.tools/validation/metadata_test.py @@ -67,7 +67,8 @@ def load(path: Path, doc_gen: DocGen) -> list[Example] | metadata_errors.Metadat def test_parse(): meta = yaml.safe_load(GOOD_SINGLE_CPP) - parsed = parse("test_cpp.yaml", meta, DOC_GEN) + parsed, errors = parse("test_cpp.yaml", meta, DOC_GEN) + assert len(errors) == 0 assert parsed == [ Example( file="test_cpp.yaml", @@ -118,7 +119,8 @@ def test_parse(): def test_parse_cross(): meta = yaml.safe_load(CROSS_META) - actual = parse("cross.yaml", meta, DOC_GEN) + actual, errors = parse("cross.yaml", meta, DOC_GEN) + assert len(errors) == 0 assert actual == [ Example( file="cross.yaml", @@ -159,7 +161,8 @@ def test_parse_cross(): def test_parse_curated(): meta = yaml.safe_load(CURATED) - actual = parse("curated.yaml", meta, DOC_GEN) + actual, errors = parse("curated.yaml", meta, DOC_GEN) + assert len(errors) == 0 assert actual == [ Example( id="autogluon_tabular_with_sagemaker_pipelines", @@ -180,7 +183,8 @@ def test_parse_curated(): def test_verify_load_successful(): - examples = load("valid_metadata.yaml", DOC_GEN) + examples, errors = load("valid_metadata.yaml", DOC_GEN) + assert len(errors) == 0 assert examples == [ Example( file="valid_metadata.yaml", @@ -370,7 +374,7 @@ def test_verify_load_successful(): ], ) def test_common_errors(filename, expected_errors): - actual = load(filename, DOC_GEN) + _, actual = load(filename, DOC_GEN) assert expected_errors == actual._errors diff --git a/.tools/validation/project_validator.py b/.tools/validation/project_validator.py index 98849feae13..b13edf9e799 100644 --- a/.tools/validation/project_validator.py +++ b/.tools/validation/project_validator.py @@ -22,7 +22,6 @@ import os import re -import argparse import logging import sys from dataclasses import dataclass, field @@ -54,7 +53,6 @@ def check_files(root: Path, errors: MetadataErrors): verify_no_deny_list_words(file_contents, file_path, errors) verify_no_secret_keys(file_contents, file_path, errors) verify_no_secret_keys(file_contents, file_path, errors) - verify_snippet_start_end(file_contents, file_path, errors) print(f"{file_count} files scanned in {root}.\n") @@ -180,78 +178,11 @@ def verify_no_secret_keys( errors.append(PossibleSecretKey(file=str(file_location), word=word)) -@dataclass -class SnippetParseError(MetadataParseError): - tag: str = field(default="") - - -@dataclass -class DuplicateSnippetTagInFile(SnippetParseError): - def message(self): - return f"Duplicate tag {self.tag}" - - -@dataclass -class SnippetNoMatchingStart(SnippetParseError): - def message(self): - return f"No matching start for {self.tag}" - - -@dataclass -class SnippetNoMatchingEnd(SnippetParseError): - def message(self): - return f"No matching end for {self.tag}" - - -# TODO move this to snippets -def verify_snippet_start_end( - file_contents: str, file_location: Path, errors: MetadataErrors -): - """Scan the file contents for snippet-start and snippet-end tags and verify - that they are in matched pairs. Log errors and return the count of errors.""" - snippet_start = "snippet" + "-start:[" - snippet_end = "snippet" + "-end:[" - snippet_tags = set() - for word in file_contents.split(): - if snippet_start in word: - tag = word.split("[")[1] - if tag in snippet_tags: - errors.append(DuplicateSnippetTagInFile(file=file_location, tag=tag)) - else: - snippet_tags.add(tag) - elif snippet_end in word: - tag = word.split("[")[1] - if tag in snippet_tags: - snippet_tags.remove(tag) - else: - errors.append(SnippetNoMatchingStart(file=file_location, tag=tag)) - - for tag in snippet_tags: - errors.append(SnippetNoMatchingEnd(file=file_location, tag=tag)) - - def main(): - parser = argparse.ArgumentParser() - parser.add_argument( - "--quiet", - action="store_true", - help="Suppresses output of filenames while parsing. " "The default is False.", - ) - parser.add_argument( - "--root", - help="The root path from which to search for files " - "to check. The default is the current working " - "folder.", - ) - args = parser.parse_args() - - root_path = Path( - os.path.abspath(".") if not args.root else os.path.abspath(args.root) - ) - + root_path = Path(__file__).parent.parent.parent print("----------\n\nRun Tests\n") errors = MetadataErrors() - check_files(root_path, args.quiet, errors) + check_files(root_path, errors) verify_sample_files(root_path, errors) error_count = len(errors) if error_count > 0: diff --git a/.tools/validation/sdks.py b/.tools/validation/sdks.py index ca8763d7ffc..93682f5e927 100644 --- a/.tools/validation/sdks.py +++ b/.tools/validation/sdks.py @@ -19,23 +19,20 @@ class SdkApiRef: name: str link_template: Optional[str] - def from_yaml(yaml: dict[str, str] | None, errors: MetadataErrors) -> Self | None: + def from_yaml(yaml: dict[str, str] | None, errors: MetadataErrors) -> Self: if yaml is None: return None uid = yaml.get("uid") name = check_mapping(yaml.get("name"), "api_ref.name") link_template = yaml.get("link_template") - e = len(errors) - if not uid: errors.append(metadata_errors.MissingField(field="api_ref.uid")) if isinstance(name, MetadataParseError): errors.append(name) + name = "" - if e == len(errors): - return SdkApiRef(uid, name, link_template) - return None + return SdkApiRef(uid, name, link_template) @dataclass @@ -57,7 +54,7 @@ class SdkVersion: title_override: Optional[SdkTitleOverride] = field(default=None) @staticmethod - def from_yaml(version: int, yaml: dict[str, any]) -> Self | MetadataErrors: + def from_yaml(version: int, yaml: dict[str, any]) -> (Self, MetadataErrors): errors = MetadataErrors() long = check_mapping(yaml.get("long"), "long") short = check_mapping(yaml.get("short"), "short") @@ -91,23 +88,25 @@ def from_yaml(version: int, yaml: dict[str, any]) -> Self | MetadataErrors: if isinstance(long, MetadataParseError): errors.append(long) + long = "" if isinstance(short, MetadataParseError): errors.append(short) + short = "" api_ref = SdkApiRef.from_yaml(yaml.get("api_ref"), errors) - if len(errors) > 0: - return errors - - return SdkVersion( - version=version, - long=long, - short=short, - expanded=expanded, - guide=guide, - api_ref=api_ref, - caveat=caveat, - bookmark=bookmark, - title_override=title_override, + return ( + SdkVersion( + version=version, + long=long, + short=short, + expanded=expanded, + guide=guide, + api_ref=api_ref, + caveat=caveat, + bookmark=bookmark, + title_override=title_override, + ), + errors, ) @@ -119,12 +118,13 @@ class Sdk: property: str @staticmethod - def from_yaml(name: str, yaml: dict[str, any]) -> Self | MetadataErrors: + def from_yaml(name: str, yaml: dict[str, any]) -> (Self, MetadataErrors): errors = MetadataErrors() property = yaml.get("property") guide = check_mapping(yaml.get("guide"), "guide") if isinstance(guide, MetadataParseError): errors.append(guide) + guide = "" versions = [] sdk_versions = yaml.get("sdk", {}) @@ -137,13 +137,10 @@ def from_yaml(name: str, yaml: dict[str, any]) -> Self | MetadataErrors: else: versions.append(sdk_version) - if len(errors) > 0: - return errors - - return Sdk(name=name, versions=versions, guide=guide, property=property) + return Sdk(name=name, versions=versions, guide=guide, property=property), errors -def parse(file: str, yaml: dict[str, any]) -> dict[str, Sdk]: +def parse(file: str, yaml: dict[str, any]) -> (dict[str, Sdk], MetadataErrors): sdks = {} errors = MetadataErrors() @@ -157,7 +154,7 @@ def parse(file: str, yaml: dict[str, any]) -> dict[str, Sdk]: error.id = name errors.extend(sdk) - return sdks if len(errors) == 0 else errors + return sdks, errors if __name__ == "__main__": diff --git a/.tools/validation/sdks_test.py b/.tools/validation/sdks_test.py index ad94cc1e0ce..f56aa909810 100644 --- a/.tools/validation/sdks_test.py +++ b/.tools/validation/sdks_test.py @@ -13,7 +13,7 @@ from sdks import parse, Sdk, SdkVersion, SdkApiRef, SdkTitleOverride -def load(path: Path) -> list[Sdk] | metadata_errors.MetadataErrors: +def load(path: Path) -> (list[Sdk], metadata_errors.MetadataErrors): root = Path(__file__).parent filename = root / "test_resources" / path with open(filename) as file: @@ -22,8 +22,8 @@ def load(path: Path) -> list[Sdk] | metadata_errors.MetadataErrors: def test_empty_sdks(): - examples = load("empty_sdks.yaml") - assert examples._errors == [ + _, errors = load("empty_sdks.yaml") + assert errors._errors == [ metadata_errors.MissingField( file="empty_sdks.yaml", id="C++", @@ -35,7 +35,7 @@ def test_empty_sdks(): def test_entityusage(): - actual = load("entityusage_sdks.yaml") + _, actual = load("entityusage_sdks.yaml") expected = [ metadata_errors.MappingMustBeEntity( file="entityusage_sdks.yaml", diff --git a/.tools/validation/services.py b/.tools/validation/services.py index ba629f02670..7f9fbd92277 100644 --- a/.tools/validation/services.py +++ b/.tools/validation/services.py @@ -27,7 +27,7 @@ class Service: tags: dict[str, set[str]] = field(default_factory=dict) @staticmethod - def from_yaml(name: str, yaml: dict[str, any]) -> Self | MetadataErrors: + def from_yaml(name: str, yaml: dict[str, any]) -> (Self, MetadataErrors): errors = MetadataErrors() long = check_mapping(yaml.get("long"), "long") @@ -37,8 +37,10 @@ def from_yaml(name: str, yaml: dict[str, any]) -> Self | MetadataErrors: if isinstance(long, metadata_errors.MetadataParseError): errors.append(long) + long = "" if isinstance(short, metadata_errors.MetadataParseError): errors.append(short) + short = "" if sort is None: errors.append(metadata_errors.MissingField(field="sort")) if version is None: @@ -63,26 +65,27 @@ def from_yaml(name: str, yaml: dict[str, any]) -> Self | MetadataErrors: for tag in tags: tags[tag] = set(tags[tag].keys()) - if len(errors) > 0: - for error in errors: - error.id = name - return errors - - return Service( - long=long, - short=short, - sort=sort, - api_ref=api_ref, - blurb=blurb, - bundle=bundle, - caveat=caveat, - guide=guide, - tags=tags, - version=version, + for error in errors: + error.id = name + + return ( + Service( + long=long, + short=short, + sort=sort, + api_ref=api_ref, + blurb=blurb, + bundle=bundle, + caveat=caveat, + guide=guide, + tags=tags, + version=version, + ), + errors, ) -def parse(filename: str, yaml: dict[str, any]) -> dict[str, Service] | MetadataErrors: +def parse(filename: str, yaml: dict[str, any]) -> (dict[str, Service], MetadataErrors): errors = metadata_errors.MetadataErrors() services = {} for name in yaml: @@ -90,15 +93,13 @@ def parse(filename: str, yaml: dict[str, any]) -> dict[str, Service] | MetadataE if meta is None: errors.append(metadata_errors.MissingServiceBody(file=filename, id=name)) else: - service = Service.from_yaml(name, meta) - if isinstance(service, MetadataErrors): - for error in service: - error.file = filename - errors.extend(service) - else: - services[name] = service - - return services if len(errors) == 0 else errors + service, service_errors = Service.from_yaml(name, meta) + for error in service_errors: + error.file = filename + errors.extend(service_errors) + services[name] = service + + return services, errors if __name__ == "__main__": diff --git a/.tools/validation/snippets.py b/.tools/validation/snippets.py index 7747add02f4..aeaa092ea76 100644 --- a/.tools/validation/snippets.py +++ b/.tools/validation/snippets.py @@ -2,6 +2,16 @@ # SPDX-License-Identifier: Apache-2.0 from dataclasses import dataclass +from metadata_errors import MetadataErrors, MetadataError +from typing import Optional +from pathlib import Path +from shutil import copyfile, rmtree +import validator_config + +from file_utils import get_files + +SNIPPET_START = "snippet-start:[" +SNIPPET_END = "snippet-end:[" @dataclass @@ -10,3 +20,149 @@ class Snippet: file: str line_start: int line_end: int + code: str + + +@dataclass +class SnippetError(MetadataError): + line: Optional[int] = None + tag: Optional[str] = None + + def prefix(self): + super().prefix() + f" at l{self.line} for {self.tag}: " + + +@dataclass +class DuplicateSnippetStartError(SnippetError): + def message(self): + return "duplicate snippet-start tag" + + +@dataclass +class DuplicateSnippetEndError(SnippetError): + def message(self): + return "duplicate snippet-end tag" + + +@dataclass +class MissingSnippetStartError(SnippetError): + def message(self): + return "snippet-end with no matching start" + + +@dataclass +class MissingSnippetEndError(SnippetError): + def message(self): + return "snippet-start with no matching end" + + +@dataclass +class SnippetAlreadyWritten(MetadataError): + def message(self): + return "Snippet file already exists, which means this tag is defined more than once in separate files." + + +@dataclass +class MetadataUnicodeError(MetadataError): + err: Optional[UnicodeDecodeError] = None + + def message(self): + return f" unicode error: {str(self.err)}" + + +def _tag_from_line(token, line): + tag_start = line.find(token) + len(token) + tag_end = line.find("]", tag_start) + return line[tag_start:tag_end].strip() + + +def find_snippets(file: Path, errors: MetadataErrors) -> dict[str, Snippet]: + snippets = {} + open_tags = set() + with open(file, encoding="utf-8") as snippet_file: + try: + for line_idx, line in enumerate(snippet_file.readlines()): + if SNIPPET_START in line: + tag = _tag_from_line(SNIPPET_START, line) + if tag in snippets: + errors.append( + DuplicateSnippetStartError( + file=file, line=line_idx, tag=tag + ) + ) + else: + snippets[tag] = Snippet( + id=tag, file=file, line_start=line_idx, line_end=-1, code="" + ) + open_tags.add(tag) + elif SNIPPET_END in line: + tag = _tag_from_line(SNIPPET_END, line) + if tag not in snippets: + errors.append( + MissingSnippetStartError(file=file, line=line_idx, tag=tag) + ) + elif tag not in open_tags: + errors.append( + DuplicateSnippetEndError(file=file, line=line_idx, tag=tag) + ) + else: + open_tags.remove(tag) + snippets[tag].line_end = line_idx + else: + for tag in open_tags: + snippets[tag].code += line + except UnicodeDecodeError as err: + errors.append(MetadataUnicodeError(file=file, err=err)) + for tag in open_tags: + errors.append( + MissingSnippetEndError(file=file, line=snippets[tag].line_start, tag=tag) + ) + return snippets + + +def collect_snippets(root: Path) -> (dict[str, Snippet], MetadataErrors): + snippets = {} + errors = MetadataErrors() + for file in get_files(root, validator_config.skip): + snippets.update(find_snippets(file, errors)) + return snippets, errors + + +def clear(folder: Path): + if folder.exists(): + rmtree(folder, True) + folder.mkdir() + + +def write_snippets(root: Path, snippets: dict[str, Snippet]): + errors = MetadataErrors() + for tag in snippets: + name = root / f"{tag}.txt" + if name.exists(): + errors.append(SnippetAlreadyWritten(file=name)) + else: + with open(name, "w", encoding="utf-8") as file: + file.write(snippets[tag].code) + return errors + + +def _tag_from_file(self, name): + return f"{self.mirror}/{name}".replace("/", ".") + + +def write_snippet_file(folder: Path, snippet_file: Path): + name = _tag_from_file(snippet_file) + dest = folder / f"{name}.txt" + if not dest.exists(): + copyfile(folder / snippet_file, dest) + + +if __name__ == "__main__": + root = Path(__file__).parent.parent.parent + snippets, errors = collect_snippets(root) + print(f"Found {len(snippets)} snippets") + out = root / ".snippets" + clear(out) + errors.maybe_extend(write_snippets(out, snippets)) + if len(errors) > 0: + print(errors) diff --git a/.tools/validation/validate.py b/.tools/validation/validate.py index 8e07bf1203c..2e6fc8d583f 100755 --- a/.tools/validation/validate.py +++ b/.tools/validation/validate.py @@ -3,38 +3,119 @@ import argparse import yaml +from dataclasses import dataclass from pathlib import Path from sys import exit -from metadata import parse as parse_metadata -from metadata_errors import MetadataErrors +from typing import Optional + +from doc_gen import DocGen +from metadata import parse as parse_metadata, Example +from metadata_errors import MetadataErrors, MetadataError from metadata_validator import validate_metadata from project_validator import check_files, verify_sample_files -from doc_gen import DocGen +from snippets import collect_snippets, Snippet -def validate_zexii(metadata_path: Path, errors: MetadataErrors) -> None: - doc_gen = errors.maybe_extend(DocGen.from_root(metadata_path)) - if doc_gen is None: +def validate_zexii(metadata_path: Path, errors: MetadataErrors) -> list[Example]: + doc_gen, doc_gen_errors = DocGen.from_root(metadata_path) + errors.extend(doc_gen_errors) + + if len(doc_gen_errors) > 0: return + metadata = [] + for path in metadata_path.glob("*_metadata.yaml"): if path.name == "cross_metadata.yaml": continue with open(path, encoding="utf-8") as file: meta = yaml.safe_load(file) - errors.maybe_extend(parse_metadata(path.name, meta, doc_gen)) + meta_list, metadata_errors = parse_metadata(path.name, meta, doc_gen) + metadata.extend(meta_list) + errors.maybe_extend(metadata_errors) + + return metadata + + +@dataclass +class MissingSnippet(MetadataError): + tag: Optional[str] = None + + def prefix(self): + return f"for {self.tag}" + + def message(): + return "missing snippet" + + +@dataclass +class DuplicateSnippetFile(MetadataError): + snippet_file: Optional[str] = None + + def prefix(self): + return f"for {self.snippet_file}" + + def message(): + return "duplicate snippet_file" + + +@dataclass +class MissingSnippetFile(MetadataError): + snippet_file: Optional[str] = None + + def prefix(self): + return f"for {self.snippet_file}" + + def message(): + return "missing snippet_file" + + +def validate_snippets( + root: Path, + metadata: list[Example], + snippets: dict[str, Snippet], + errors: MetadataErrors, +): + snippet_files = set() + for example in metadata: + for lang in example.languages: + language = example.languages[lang] + for version in language.versions: + for excerpt in version.excerpts: + for snippet_tag in excerpt.snippet_tags: + if snippet_tag not in snippets: + # Ensure all metadata snippets are found + errors.append( + MissingSnippet(file=example.file, tag=snippet_tag) + ) + for snippet_file in excerpt.snippet_files: + if snippet_file in snippet_files: + # Ensure no snippet_files duplicates + errors.append( + DuplicateSnippetFile( + file=example.file, snippet_file=snippet_file + ) + ) + if not (root / snippet_file).exists(): + # Ensure all snippet_files exist + errors.append( + MissingSnippetFile( + file=example.file, snippet_file=snippet_file + ) + ) + snippet_files.add(snippet_file) def main(): parser = argparse.ArgumentParser() parser.add_argument( "--root", - default=f"{Path(__file__).parent / '..' / '..'}", + default=f"{Path(__file__).parent.parent.parent}", help="The root path from which to search for files to check. The default is the root of the git repo (two up from this file).", ) parser.add_argument( "--doc-gen", - default=f"{Path(__file__).parent / '..' / '..' / '.doc_gen'}", + default=f"{Path(__file__).parent.parent.parent / '.doc_gen'}", help="The folder that contains schema and metadata files. The default is .doc_gen in the root of this repo.", required=False, ) @@ -47,7 +128,10 @@ def main(): check_files(root_path, errors) verify_sample_files(root_path, errors) validate_metadata(doc_gen, errors) - validate_zexii(doc_gen / "metadata", errors) + metadata = validate_zexii(doc_gen / "metadata", errors) + snippets, snippet_errors = collect_snippets(root_path) + errors.extend(snippet_errors) + validate_snippets(root_path, metadata, snippets, errors) error_count = len(errors) if error_count > 0: diff --git a/.tools/validation/validate_test.py b/.tools/validation/validate_test.py new file mode 100644 index 00000000000..e69de29bb2d