diff --git a/.editorconfig b/.editorconfig index cc636124b..e18c0788d 100644 --- a/.editorconfig +++ b/.editorconfig @@ -33,3 +33,7 @@ max_line_length = 88 # tab indentation [Makefile] indent_style = tab + +[*.{cfg,ini}] +indent_style = space +indent_size = 4 diff --git a/rdflib/plugin.py b/rdflib/plugin.py index b7edbc624..01abe24c0 100644 --- a/rdflib/plugin.py +++ b/rdflib/plugin.py @@ -50,8 +50,10 @@ overload, ) -if TYPE_CHECKING: - from pkg_resources import EntryPoint +if sys.version_info < (3, 8): + from importlib_metadata import entry_points, EntryPoint +else: + from importlib.metadata import entry_points, EntryPoint __all__ = ["register", "get", "plugins", "PluginException", "Plugin", "PKGPlugin"] @@ -127,11 +129,6 @@ def get(name: str, kind: Type[PluginT]) -> Type[PluginT]: return p.getClass() -if sys.version_info < (3, 8): - from importlib_metadata import entry_points -else: - from importlib.metadata import entry_points - all_entry_points = entry_points() if hasattr(all_entry_points, "select"): for entry_point, kind in rdflib_entry_points.items(): @@ -139,8 +136,10 @@ def get(name: str, kind: Type[PluginT]) -> Type[PluginT]: _plugins[(ep.name, kind)] = PKGPlugin(ep.name, kind, ep) else: # Prior to Python 3.10, this returns a dict instead of the selection interface, which is slightly slower + if TYPE_CHECKING: + assert isinstance(all_entry_points, dict) for entry_point, kind in rdflib_entry_points.items(): - for ep in all_entry_points.get(entry_point, []): # type: ignore[union-attr] + for ep in all_entry_points.get(entry_point, []): _plugins[(ep.name, kind)] = PKGPlugin(ep.name, kind, ep) diff --git a/rdflib/plugins/sparql/__init__.py b/rdflib/plugins/sparql/__init__.py index 97d8c56aa..b996dc3f7 100644 --- a/rdflib/plugins/sparql/__init__.py +++ b/rdflib/plugins/sparql/__init__.py @@ -31,6 +31,7 @@ PLUGIN_ENTRY_POINT = "rdf.plugins.sparqleval" import sys +from typing import TYPE_CHECKING, Any from . import parser from . import operators from . import parserutils @@ -47,10 +48,12 @@ from importlib.metadata import entry_points all_entry_points = entry_points() -if isinstance(all_entry_points, dict): - # Prior to Python 3.10, this returns a dict instead of the selection interface - for ep in all_entry_points.get(PLUGIN_ENTRY_POINT, []): +if hasattr(all_entry_points, "select"): + for ep in all_entry_points.select(group=PLUGIN_ENTRY_POINT): CUSTOM_EVALS[ep.name] = ep.load() else: - for ep in all_entry_points.select(group=PLUGIN_ENTRY_POINT): + # Prior to Python 3.10, this returns a dict instead of the selection interface + if TYPE_CHECKING: + assert isinstance(all_entry_points, dict) + for ep in all_entry_points.get(PLUGIN_ENTRY_POINT, []): CUSTOM_EVALS[ep.name] = ep.load() diff --git a/setup.cfg b/setup.cfg index 1432189a5..acb567a32 100644 --- a/setup.cfg +++ b/setup.cfg @@ -30,10 +30,11 @@ disallow_subclassing_any = False warn_unreachable = True warn_unused_ignores = True -[mypy-rdflib.plugin] -# There is one type ignore in rdflib.plugin which is not needed on python 3.10 -# but is needed on earlier versions, so this does not work for that module. -warn_unused_ignores = False +# This is here to exclude the setup.py files in test plugins because these +# confuse mypy as mypy think they are the same module. +exclude = (?x)( + ^.*test/plugins/.*/setup.py$ + ) [tool:pytest] addopts = diff --git a/test/plugins/parser/example/rdflib/plugin/parser/__init__.py b/test/plugins/parser/example/rdflib/plugin/parser/__init__.py new file mode 100644 index 000000000..8554ceed3 --- /dev/null +++ b/test/plugins/parser/example/rdflib/plugin/parser/__init__.py @@ -0,0 +1,28 @@ +from typing import TYPE_CHECKING, Set, Tuple +from rdflib.parser import Parser + + +if TYPE_CHECKING: + from rdflib.parser import InputSource + from rdflib.graph import Graph + from rdflib.namespace import Namespace + from rdflib.term import Identifier + + +class ExampleParser(Parser): + def __init__(self): + super().__init__() + + def parse(self, source: "InputSource", sink: "Graph"): + for triple in self.constant_output(): + sink.add(triple) + + @classmethod + def namespace(cls) -> "Namespace": + return Namespace("example:rdflib:plugin:parser:") + + @classmethod + def constant_output(cls) -> Set[Tuple["Identifier", "Identifier", "Identifier"]]: + return {(cls.namespace().subj, cls.namespace().pred, cls.namespace().obj)} + +from rdflib.namespace import Namespace diff --git a/test/plugins/parser/setup.cfg b/test/plugins/parser/setup.cfg new file mode 100644 index 000000000..6af99d627 --- /dev/null +++ b/test/plugins/parser/setup.cfg @@ -0,0 +1,17 @@ +# https://setuptools.pypa.io/en/latest/userguide/declarative_config.html +# https://setuptools.pypa.io/en/latest/references/keywords.html +[metadata] +name = example.rdflib.plugin.parser +version = 0.0.0 + +[options] +packages = find_namespace: + +[options.packages.find] +include = + example.rdflib.plugin.parser + + +[options.entry_points] +rdf.plugins.parser = + example.rdflib.plugin.parser = example.rdflib.plugin.parser:ExampleParser diff --git a/test/plugins/parser/setup.py b/test/plugins/parser/setup.py new file mode 100644 index 000000000..606849326 --- /dev/null +++ b/test/plugins/parser/setup.py @@ -0,0 +1,3 @@ +from setuptools import setup + +setup() diff --git a/test/plugins/sparqleval/example/rdflib/plugin/sparqleval/__init__.py b/test/plugins/sparqleval/example/rdflib/plugin/sparqleval/__init__.py new file mode 100644 index 000000000..a4a87725b --- /dev/null +++ b/test/plugins/sparqleval/example/rdflib/plugin/sparqleval/__init__.py @@ -0,0 +1,34 @@ +from typing import Any + + +def custom_eval_extended(ctx: Any, extend: Any) -> Any: + for c in evalPart(ctx, extend.p): + try: + if hasattr(extend.expr, "iri") and extend.expr.iri == function_uri: + evaluation = function_result + else: + evaluation = _eval(extend.expr, c.forget(ctx, _except=extend._vars)) + if isinstance(evaluation, SPARQLError): + raise evaluation + + yield c.merge({extend.var: evaluation}) + + except SPARQLError: + yield c + + +def custom_eval(ctx: Any, part: Any) -> Any: + if part.name == "Extend": + return custom_eval_extended(ctx, part) + else: + raise NotImplementedError() + + +from rdflib import Namespace +from rdflib.plugins.sparql.evaluate import evalPart +from rdflib.plugins.sparql.evalutils import _eval +from rdflib.plugins.sparql.sparql import SPARQLError + +namespace = Namespace("example:rdflib:plugin:sparqleval:") +function_uri = namespace["function"] +function_result = namespace["result"] diff --git a/test/plugins/sparqleval/setup.cfg b/test/plugins/sparqleval/setup.cfg new file mode 100644 index 000000000..6679e6ad9 --- /dev/null +++ b/test/plugins/sparqleval/setup.cfg @@ -0,0 +1,17 @@ +# https://setuptools.pypa.io/en/latest/userguide/declarative_config.html +# https://setuptools.pypa.io/en/latest/references/keywords.html +[metadata] +name = example.rdflib.plugin.sparqleval +version = 0.0.0 + +[options] +packages = find_namespace: + +[options.packages.find] +include = + example.rdflib.plugin.sparqleval + + +[options.entry_points] +rdf.plugins.sparqleval = + example.rdflib.plugin.sparqleval = example.rdflib.plugin.sparqleval:custom_eval diff --git a/test/plugins/sparqleval/setup.py b/test/plugins/sparqleval/setup.py new file mode 100644 index 000000000..606849326 --- /dev/null +++ b/test/plugins/sparqleval/setup.py @@ -0,0 +1,3 @@ +from setuptools import setup + +setup() diff --git a/test/test_plugins.py b/test/test_plugins.py new file mode 100644 index 000000000..5be564a78 --- /dev/null +++ b/test/test_plugins.py @@ -0,0 +1,127 @@ +import importlib +import logging +import shutil +import subprocess +import sys +from contextlib import ExitStack, contextmanager +from pathlib import Path +from typing import Any, Callable, Dict, Generator, List + +from sphinx import warnings + +import rdflib.plugin +import rdflib.plugins.sparql +import rdflib.plugins.sparql.evaluate +from rdflib import Graph +from rdflib.parser import Parser + +TEST_DIR = Path(__file__).parent +TEST_PLUGINS_DIR = TEST_DIR / "plugins" + + +def del_key(d: Dict[Any, Any], key: Any) -> None: + del d[key] + + +@contextmanager +def ctx_plugin(tmp_path: Path, plugin_src: Path) -> Generator[None, None, None]: + base = tmp_path / f"{hash(plugin_src)}" + pypath = (base / "pypath").absolute() + plugpath = (base / "plugin").absolute() + shutil.copytree(plugin_src, plugpath) + logging.debug("Installing %s into %s", plugin_src, pypath) + subprocess.run( + [ + sys.executable, + "-m", + "pip", + "install", + "--isolated", + "--no-input", + "--no-clean", + "--no-index", + "--disable-pip-version-check", + "--target", + f"{pypath}", + f"{plugpath}", + ], + check=True, + ) + + sys.path.append(f"{pypath}") + + yield None + + sys.path.remove(f"{pypath}") + + +@contextmanager +def ctx_cleaners() -> Generator[List[Callable[[], None]], None, None]: + cleaners: List[Callable[[], None]] = [] + yield cleaners + for cleaner in cleaners: + logging.debug("running cleaner %s", cleaner) + cleaner() + + +# Using no_cover as coverage freaks out and crashes because of what is happening here. +def test_sparqleval(tmp_path: Path, no_cover: None) -> None: + with ExitStack() as stack: + stack.enter_context(ctx_plugin(tmp_path, TEST_PLUGINS_DIR / "sparqleval")) + warnings_record = stack.enter_context(warnings.catch_warnings(record=True)) + cleaners = stack.enter_context(ctx_cleaners()) + + ep_name = "example.rdflib.plugin.sparqleval" + ep_ns = f'{ep_name.replace(".", ":")}:' + plugin_module = importlib.import_module(ep_name) + assert plugin_module.namespace == ep_ns + + importlib.reload(rdflib.plugins.sparql) + importlib.reload(rdflib.plugins.sparql.evaluate) + + cleaners.insert(0, lambda: del_key(rdflib.plugins.sparql.CUSTOM_EVALS, ep_name)) + + logging.debug( + "rdflib.plugins.sparql.CUSTOM_EVALS = %s", + rdflib.plugins.sparql.CUSTOM_EVALS, + ) + + graph = Graph() + query_string = ( + "SELECT ?output1 WHERE { BIND(<" + ep_ns + "function>() AS ?output1) }" + ) + logging.debug("query_string = %s", query_string) + result = graph.query(query_string) + assert result.type == "SELECT" + rows = list(result) + logging.debug("rows = %s", rows) + assert len(rows) == 1 + assert len(rows[0]) == 1 + assert rows[0][0] == plugin_module.function_result + assert [str(msg) for msg in warnings_record] == [] + + +# Using no_cover as coverage freaks out and crashes because of what is happening here. +def test_parser(tmp_path: Path, no_cover: None) -> None: + with ExitStack() as stack: + stack.enter_context(ctx_plugin(tmp_path, TEST_PLUGINS_DIR / "parser")) + warnings_record = stack.enter_context(warnings.catch_warnings(record=True)) + cleaners = stack.enter_context(ctx_cleaners()) + + ep_name = "example.rdflib.plugin.parser" + ep_ns = f'{ep_name.replace(".", ":")}:' + plugin_module = importlib.import_module(ep_name) + assert plugin_module.ExampleParser.namespace() == ep_ns + + importlib.reload(rdflib.plugin) + cleaners.insert(0, lambda: del_key(rdflib.plugin._plugins, (ep_name, Parser))) + + graph = Graph() + assert len(graph) == 0 + graph.parse(format=ep_name, data="") + + assert len(graph) > 0 + triples = set(graph.triples((None, None, None))) + logging.debug("triples = %s", triples) + assert triples == plugin_module.ExampleParser.constant_output() + assert [str(msg) for msg in warnings_record] == []