Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Unify plugin loading #1694

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,7 @@ max_line_length = 88
# tab indentation
[Makefile]
indent_style = tab

[*.{cfg,ini}]
indent_style = space
indent_size = 4
15 changes: 7 additions & 8 deletions rdflib/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,10 @@
overload,
)

if TYPE_CHECKING:
from pkg_resources import EntryPoint
if sys.version_info < (3, 8):
from importlib_metadata import entry_points, EntryPoint
else:
from importlib.metadata import entry_points, EntryPoint

__all__ = ["register", "get", "plugins", "PluginException", "Plugin", "PKGPlugin"]

Expand Down Expand Up @@ -127,20 +129,17 @@ def get(name: str, kind: Type[PluginT]) -> Type[PluginT]:
return p.getClass()


if sys.version_info < (3, 8):
from importlib_metadata import entry_points
else:
from importlib.metadata import entry_points

all_entry_points = entry_points()
if hasattr(all_entry_points, "select"):
for entry_point, kind in rdflib_entry_points.items():
for ep in all_entry_points.select(group=entry_point):
_plugins[(ep.name, kind)] = PKGPlugin(ep.name, kind, ep)
else:
# Prior to Python 3.10, this returns a dict instead of the selection interface, which is slightly slower
if TYPE_CHECKING:
assert isinstance(all_entry_points, dict)
for entry_point, kind in rdflib_entry_points.items():
for ep in all_entry_points.get(entry_point, []): # type: ignore[union-attr]
for ep in all_entry_points.get(entry_point, []):
_plugins[(ep.name, kind)] = PKGPlugin(ep.name, kind, ep)


Expand Down
11 changes: 7 additions & 4 deletions rdflib/plugins/sparql/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
PLUGIN_ENTRY_POINT = "rdf.plugins.sparqleval"

import sys
from typing import TYPE_CHECKING, Any
from . import parser
from . import operators
from . import parserutils
Expand All @@ -47,10 +48,12 @@
from importlib.metadata import entry_points

all_entry_points = entry_points()
if isinstance(all_entry_points, dict):
# Prior to Python 3.10, this returns a dict instead of the selection interface
for ep in all_entry_points.get(PLUGIN_ENTRY_POINT, []):
if hasattr(all_entry_points, "select"):
for ep in all_entry_points.select(group=PLUGIN_ENTRY_POINT):
CUSTOM_EVALS[ep.name] = ep.load()
else:
for ep in all_entry_points.select(group=PLUGIN_ENTRY_POINT):
# Prior to Python 3.10, this returns a dict instead of the selection interface
if TYPE_CHECKING:
assert isinstance(all_entry_points, dict)
for ep in all_entry_points.get(PLUGIN_ENTRY_POINT, []):
CUSTOM_EVALS[ep.name] = ep.load()
9 changes: 5 additions & 4 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,11 @@ disallow_subclassing_any = False
warn_unreachable = True
warn_unused_ignores = True

[mypy-rdflib.plugin]
# There is one type ignore in rdflib.plugin which is not needed on python 3.10
# but is needed on earlier versions, so this does not work for that module.
warn_unused_ignores = False
# This is here to exclude the setup.py files in test plugins because these
# confuse mypy as mypy think they are the same module.
exclude = (?x)(
^.*test/plugins/.*/setup.py$
)

[tool:pytest]
addopts =
Expand Down
28 changes: 28 additions & 0 deletions test/plugins/parser/example/rdflib/plugin/parser/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from typing import TYPE_CHECKING, Set, Tuple
from rdflib.parser import Parser


if TYPE_CHECKING:
from rdflib.parser import InputSource
from rdflib.graph import Graph
from rdflib.namespace import Namespace
from rdflib.term import Identifier


class ExampleParser(Parser):
def __init__(self):
super().__init__()

def parse(self, source: "InputSource", sink: "Graph"):
for triple in self.constant_output():
sink.add(triple)

@classmethod
def namespace(cls) -> "Namespace":
return Namespace("example:rdflib:plugin:parser:")

@classmethod
def constant_output(cls) -> Set[Tuple["Identifier", "Identifier", "Identifier"]]:
return {(cls.namespace().subj, cls.namespace().pred, cls.namespace().obj)}

from rdflib.namespace import Namespace
17 changes: 17 additions & 0 deletions test/plugins/parser/setup.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# https://setuptools.pypa.io/en/latest/userguide/declarative_config.html
# https://setuptools.pypa.io/en/latest/references/keywords.html
[metadata]
name = example.rdflib.plugin.parser
version = 0.0.0

[options]
packages = find_namespace:

[options.packages.find]
include =
example.rdflib.plugin.parser


[options.entry_points]
rdf.plugins.parser =
example.rdflib.plugin.parser = example.rdflib.plugin.parser:ExampleParser
3 changes: 3 additions & 0 deletions test/plugins/parser/setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from setuptools import setup

setup()
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
from typing import Any


def custom_eval_extended(ctx: Any, extend: Any) -> Any:
for c in evalPart(ctx, extend.p):
try:
if hasattr(extend.expr, "iri") and extend.expr.iri == function_uri:
evaluation = function_result
else:
evaluation = _eval(extend.expr, c.forget(ctx, _except=extend._vars))
if isinstance(evaluation, SPARQLError):
raise evaluation

yield c.merge({extend.var: evaluation})

except SPARQLError:
yield c


def custom_eval(ctx: Any, part: Any) -> Any:
if part.name == "Extend":
return custom_eval_extended(ctx, part)
else:
raise NotImplementedError()


from rdflib import Namespace
from rdflib.plugins.sparql.evaluate import evalPart
from rdflib.plugins.sparql.evalutils import _eval
from rdflib.plugins.sparql.sparql import SPARQLError

namespace = Namespace("example:rdflib:plugin:sparqleval:")
function_uri = namespace["function"]
function_result = namespace["result"]
17 changes: 17 additions & 0 deletions test/plugins/sparqleval/setup.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# https://setuptools.pypa.io/en/latest/userguide/declarative_config.html
# https://setuptools.pypa.io/en/latest/references/keywords.html
[metadata]
name = example.rdflib.plugin.sparqleval
version = 0.0.0

[options]
packages = find_namespace:

[options.packages.find]
include =
example.rdflib.plugin.sparqleval


[options.entry_points]
rdf.plugins.sparqleval =
example.rdflib.plugin.sparqleval = example.rdflib.plugin.sparqleval:custom_eval
3 changes: 3 additions & 0 deletions test/plugins/sparqleval/setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from setuptools import setup

setup()
127 changes: 127 additions & 0 deletions test/test_plugins.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
import importlib
import logging
import shutil
import subprocess
import sys
from contextlib import ExitStack, contextmanager
from pathlib import Path
from typing import Any, Callable, Dict, Generator, List

from sphinx import warnings

import rdflib.plugin
import rdflib.plugins.sparql
import rdflib.plugins.sparql.evaluate
from rdflib import Graph
from rdflib.parser import Parser

TEST_DIR = Path(__file__).parent
TEST_PLUGINS_DIR = TEST_DIR / "plugins"


def del_key(d: Dict[Any, Any], key: Any) -> None:
del d[key]


@contextmanager
def ctx_plugin(tmp_path: Path, plugin_src: Path) -> Generator[None, None, None]:
base = tmp_path / f"{hash(plugin_src)}"
pypath = (base / "pypath").absolute()
plugpath = (base / "plugin").absolute()
shutil.copytree(plugin_src, plugpath)
logging.debug("Installing %s into %s", plugin_src, pypath)
subprocess.run(
[
sys.executable,
"-m",
"pip",
"install",
"--isolated",
"--no-input",
"--no-clean",
"--no-index",
"--disable-pip-version-check",
"--target",
f"{pypath}",
f"{plugpath}",
],
check=True,
)

sys.path.append(f"{pypath}")

yield None

sys.path.remove(f"{pypath}")


@contextmanager
def ctx_cleaners() -> Generator[List[Callable[[], None]], None, None]:
cleaners: List[Callable[[], None]] = []
yield cleaners
for cleaner in cleaners:
logging.debug("running cleaner %s", cleaner)
cleaner()


# Using no_cover as coverage freaks out and crashes because of what is happening here.
def test_sparqleval(tmp_path: Path, no_cover: None) -> None:
with ExitStack() as stack:
stack.enter_context(ctx_plugin(tmp_path, TEST_PLUGINS_DIR / "sparqleval"))
warnings_record = stack.enter_context(warnings.catch_warnings(record=True))
cleaners = stack.enter_context(ctx_cleaners())

ep_name = "example.rdflib.plugin.sparqleval"
ep_ns = f'{ep_name.replace(".", ":")}:'
plugin_module = importlib.import_module(ep_name)
assert plugin_module.namespace == ep_ns

importlib.reload(rdflib.plugins.sparql)
importlib.reload(rdflib.plugins.sparql.evaluate)

cleaners.insert(0, lambda: del_key(rdflib.plugins.sparql.CUSTOM_EVALS, ep_name))

logging.debug(
"rdflib.plugins.sparql.CUSTOM_EVALS = %s",
rdflib.plugins.sparql.CUSTOM_EVALS,
)

graph = Graph()
query_string = (
"SELECT ?output1 WHERE { BIND(<" + ep_ns + "function>() AS ?output1) }"
)
logging.debug("query_string = %s", query_string)
result = graph.query(query_string)
assert result.type == "SELECT"
rows = list(result)
logging.debug("rows = %s", rows)
assert len(rows) == 1
assert len(rows[0]) == 1
assert rows[0][0] == plugin_module.function_result
assert [str(msg) for msg in warnings_record] == []


# Using no_cover as coverage freaks out and crashes because of what is happening here.
def test_parser(tmp_path: Path, no_cover: None) -> None:
with ExitStack() as stack:
stack.enter_context(ctx_plugin(tmp_path, TEST_PLUGINS_DIR / "parser"))
warnings_record = stack.enter_context(warnings.catch_warnings(record=True))
cleaners = stack.enter_context(ctx_cleaners())

ep_name = "example.rdflib.plugin.parser"
ep_ns = f'{ep_name.replace(".", ":")}:'
plugin_module = importlib.import_module(ep_name)
assert plugin_module.ExampleParser.namespace() == ep_ns

importlib.reload(rdflib.plugin)
cleaners.insert(0, lambda: del_key(rdflib.plugin._plugins, (ep_name, Parser)))

graph = Graph()
assert len(graph) == 0
graph.parse(format=ep_name, data="")

assert len(graph) > 0
triples = set(graph.triples((None, None, None)))
logging.debug("triples = %s", triples)
assert triples == plugin_module.ExampleParser.constant_output()
assert [str(msg) for msg in warnings_record] == []