Skip to content

Commit

Permalink
Unify plugin loading
Browse files Browse the repository at this point in the history
Plugin loading in `rdflib.plugins.sparql` will now happen similar to
what is being done for `rdflib.plugins`.

This also eliminates a warning that occurs `rdflib.plugins.sparql` and
makes it possible to enable `warn_unused_ignores` without any exceptions
or workarounds.

Also:
- Removed unused `type: ignore` statements.
  • Loading branch information
aucampia committed Jan 25, 2022
1 parent d3f9453 commit 4918ee7
Show file tree
Hide file tree
Showing 15 changed files with 257 additions and 18 deletions.
4 changes: 4 additions & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,7 @@ max_line_length = 88
# tab indentation
[Makefile]
indent_style = tab

[*.{cfg,ini}]
indent_style = space
indent_size = 4
4 changes: 2 additions & 2 deletions rdflib/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -1223,7 +1223,7 @@ def parse(
parser = plugin.get(format, Parser)()
try:
# TODO FIXME: Parser.parse should have **kwargs argument.
parser.parse(source, self, **args) # type: ignore[call-arg]
parser.parse(source, self, **args)
except SyntaxError as se:
if could_not_guess_format:
raise ParserError(
Expand Down Expand Up @@ -1624,7 +1624,7 @@ def __contains__(self, triple_or_quad):
return True
return False

def add(self, triple_or_quad: Union[Tuple[Node, Node, Node, Optional[Any]], Tuple[Node, Node, Node]]) -> "ConjunctiveGraph": # type: ignore[override]
def add(self, triple_or_quad: Union[Tuple[Node, Node, Node, Optional[Any]], Tuple[Node, Node, Node]]) -> "ConjunctiveGraph":
"""
Add a triple or quad to the store.
Expand Down
15 changes: 7 additions & 8 deletions rdflib/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,10 @@
overload,
)

if TYPE_CHECKING:
from pkg_resources import EntryPoint
if sys.version_info < (3, 8):
from importlib_metadata import entry_points, EntryPoint
else:
from importlib.metadata import entry_points, EntryPoint

__all__ = ["register", "get", "plugins", "PluginException", "Plugin", "PKGPlugin"]

Expand Down Expand Up @@ -127,20 +129,17 @@ def get(name: str, kind: Type[PluginT]) -> Type[PluginT]:
return p.getClass()


if sys.version_info < (3, 8):
from importlib_metadata import entry_points
else:
from importlib.metadata import entry_points

all_entry_points = entry_points()
if hasattr(all_entry_points, "select"):
for entry_point, kind in rdflib_entry_points.items():
for ep in all_entry_points.select(group=entry_point):
_plugins[(ep.name, kind)] = PKGPlugin(ep.name, kind, ep)
else:
# Prior to Python 3.10, this returns a dict instead of the selection interface, which is slightly slower
if TYPE_CHECKING:
assert isinstance(all_entry_points, dict)
for entry_point, kind in rdflib_entry_points.items():
for ep in all_entry_points.get(entry_point, []): # type: ignore[union-attr]
for ep in all_entry_points.get(entry_point, []):
_plugins[(ep.name, kind)] = PKGPlugin(ep.name, kind, ep)


Expand Down
2 changes: 1 addition & 1 deletion rdflib/plugins/parsers/rdfxml.py
Original file line number Diff line number Diff line change
Expand Up @@ -474,7 +474,7 @@ def property_element_start(self, name, qname, attrs):
o = URIRef(atts[att])
else:
if datatype is not None:
language = None # type: ignore[unreachable]
language = None
o = Literal(atts[att], language, datatype)

if object is None:
Expand Down
11 changes: 7 additions & 4 deletions rdflib/plugins/sparql/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
PLUGIN_ENTRY_POINT = "rdf.plugins.sparqleval"

import sys
from typing import TYPE_CHECKING, Any
from . import parser
from . import operators
from . import parserutils
Expand All @@ -47,10 +48,12 @@
from importlib.metadata import entry_points

all_entry_points = entry_points()
if isinstance(all_entry_points, dict):
# Prior to Python 3.10, this returns a dict instead of the selection interface
for ep in all_entry_points.get(PLUGIN_ENTRY_POINT, []):
if hasattr(all_entry_points, "select"):
for ep in all_entry_points.select(group=PLUGIN_ENTRY_POINT):
CUSTOM_EVALS[ep.name] = ep.load()
else:
for ep in all_entry_points.select(group=PLUGIN_ENTRY_POINT):
# Prior to Python 3.10, this returns a dict instead of the selection interface
if TYPE_CHECKING:
assert isinstance(all_entry_points, dict)
for ep in all_entry_points.get(PLUGIN_ENTRY_POINT, []):
CUSTOM_EVALS[ep.name] = ep.load()
4 changes: 2 additions & 2 deletions rdflib/plugins/stores/sparqlstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def _node_to_sparql(node) -> str:
return node.n3()


class SPARQLStore(SPARQLConnector, Store): # type: ignore[misc]
class SPARQLStore(SPARQLConnector, Store):
"""An RDFLib store around a SPARQL endpoint
This is context-aware and should work as expected
Expand Down Expand Up @@ -543,7 +543,7 @@ def open(self, configuration: Union[str, Tuple[str, str]], create=False):
Graph("SPARQLStore"), can set the required parameters
"""
if type(configuration) == str:
self.query_endpoint = configuration # type: ignore[assignment]
self.query_endpoint = configuration
elif type(configuration) == tuple:
self.query_endpoint = configuration[0]
self.update_endpoint = configuration[1]
Expand Down
2 changes: 1 addition & 1 deletion rdflib/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -380,7 +380,7 @@ def namespaces(self):
# See https://stackoverflow.com/q/13243766 and
# https://www.python.org/dev/peps/pep-0255/#why-a-new-keyword-for-yield-why-not-a-builtin-function-instead
if False:
yield None # type: ignore[unreachable]
yield None

# Optional Transactional methods

Expand Down
6 changes: 6 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,12 @@ warn_unused_configs = True
ignore_missing_imports = True
disallow_subclassing_any = False
warn_unreachable = True
warn_unused_ignores = True
# This is here to exclude the setup.py files in test plugins because these
# confuse mypy as mypy think they are the same module.
exclude = (?x)(
^.*test/plugins/.*/setup.py$
)

[tool:pytest]
addopts =
Expand Down
28 changes: 28 additions & 0 deletions test/plugins/parser/example/rdflib/plugin/parser/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
from typing import TYPE_CHECKING, Set, Tuple
from rdflib.parser import Parser


if TYPE_CHECKING:
from rdflib.parser import InputSource
from rdflib.graph import Graph
from rdflib.namespace import Namespace
from rdflib.term import Identifier


class ExampleParser(Parser):
def __init__(self):
super().__init__()

def parse(self, source: "InputSource", sink: "Graph"):
for triple in self.constant_output():
sink.add(triple)

@classmethod
def namespace(cls) -> "Namespace":
return Namespace("example:rdflib:plugin:parser:")

@classmethod
def constant_output(cls) -> Set[Tuple["Identifier", "Identifier", "Identifier"]]:
return {(cls.namespace().subj, cls.namespace().pred, cls.namespace().obj)}

from rdflib.namespace import Namespace
17 changes: 17 additions & 0 deletions test/plugins/parser/setup.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# https://setuptools.pypa.io/en/latest/userguide/declarative_config.html
# https://setuptools.pypa.io/en/latest/references/keywords.html
[metadata]
name = example.rdflib.plugin.parser
version = 0.0.0

[options]
packages = find_namespace:

[options.packages.find]
include =
example.rdflib.plugin.parser


[options.entry_points]
rdf.plugins.parser =
example.rdflib.plugin.parser = example.rdflib.plugin.parser:ExampleParser
3 changes: 3 additions & 0 deletions test/plugins/parser/setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from setuptools import setup

setup()
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
from typing import Any


def custom_eval_extended(ctx: Any, extend: Any) -> Any:
for c in evalPart(ctx, extend.p):
try:
if hasattr(extend.expr, "iri") and extend.expr.iri == function_uri:
evaluation = function_result
else:
evaluation = _eval(extend.expr, c.forget(ctx, _except=extend._vars))
if isinstance(evaluation, SPARQLError):
raise evaluation

yield c.merge({extend.var: evaluation})

except SPARQLError:
yield c


def custom_eval(ctx: Any, part: Any) -> Any:
if part.name == "Extend":
return custom_eval_extended(ctx, part)
else:
raise NotImplementedError()


from rdflib import Namespace
from rdflib.plugins.sparql.evaluate import evalPart
from rdflib.plugins.sparql.evalutils import _eval
from rdflib.plugins.sparql.sparql import SPARQLError

namespace = Namespace("example:rdflib:plugin:sparqleval:")
function_uri = namespace["function"]
function_result = namespace["result"]
17 changes: 17 additions & 0 deletions test/plugins/sparqleval/setup.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# https://setuptools.pypa.io/en/latest/userguide/declarative_config.html
# https://setuptools.pypa.io/en/latest/references/keywords.html
[metadata]
name = example.rdflib.plugin.sparqleval
version = 0.0.0

[options]
packages = find_namespace:

[options.packages.find]
include =
example.rdflib.plugin.sparqleval


[options.entry_points]
rdf.plugins.sparqleval =
example.rdflib.plugin.sparqleval = example.rdflib.plugin.sparqleval:custom_eval
3 changes: 3 additions & 0 deletions test/plugins/sparqleval/setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from setuptools import setup

setup()
125 changes: 125 additions & 0 deletions test/test_plugins.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
import importlib
import logging
import shutil
import subprocess
import sys
from contextlib import ExitStack, contextmanager
from pathlib import Path
from typing import Any, Callable, Dict, Generator, List

from sphinx import warnings

import rdflib.plugin
import rdflib.plugins.sparql
import rdflib.plugins.sparql.evaluate
from rdflib import Graph
from rdflib.parser import Parser

TEST_DIR = Path(__file__).parent
TEST_PLUGINS_DIR = TEST_DIR / "plugins"


def del_key(d: Dict[Any, Any], key: Any) -> None:
del d[key]


@contextmanager
def ctx_plugin(tmp_path: Path, plugin_src: Path) -> Generator[None, None, None]:
base = tmp_path / f"{hash(plugin_src)}"
pypath = (base / "pypath").absolute()
plugpath = (base / "plugin").absolute()
shutil.copytree(plugin_src, plugpath)
logging.debug("Installing %s into %s", plugin_src, pypath)
subprocess.run(
[
sys.executable,
"-m",
"pip",
"install",
"--isolated",
"--no-input",
"--no-clean",
"--no-index",
"--disable-pip-version-check",
"--target",
f"{pypath}",
f"{plugpath}",
],
check=True,
)

sys.path.append(f"{pypath}")

yield None

sys.path.remove(f"{pypath}")


@contextmanager
def ctx_cleaners() -> Generator[List[Callable[[], None]], None, None]:
cleaners: List[Callable[[], None]] = []
yield cleaners
for cleaner in cleaners:
logging.debug("running cleaner %s", cleaner)
cleaner()


def test_sparqleval(tmp_path: Path) -> None:
with ExitStack() as stack:
stack.enter_context(ctx_plugin(tmp_path, TEST_PLUGINS_DIR / "sparqleval"))
warnings_record = stack.enter_context(warnings.catch_warnings(record=True))
cleaners = stack.enter_context(ctx_cleaners())

ep_name = "example.rdflib.plugin.sparqleval"
ep_ns = f'{ep_name.replace(".", ":")}:'
plugin_module = importlib.import_module(ep_name)
assert plugin_module.namespace == ep_ns

importlib.reload(rdflib.plugins.sparql)
importlib.reload(rdflib.plugins.sparql.evaluate)

cleaners.insert(0, lambda: del_key(rdflib.plugins.sparql.CUSTOM_EVALS, ep_name))

logging.debug(
"rdflib.plugins.sparql.CUSTOM_EVALS = %s",
rdflib.plugins.sparql.CUSTOM_EVALS,
)

graph = Graph()
query_string = (
"SELECT ?output1 WHERE { BIND(<" + ep_ns + "function>() AS ?output1) }"
)
logging.debug("query_string = %s", query_string)
result = graph.query(query_string)
assert result.type == "SELECT"
rows = list(result)
logging.debug("rows = %s", rows)
assert len(rows) == 1
assert len(rows[0]) == 1
assert rows[0][0] == plugin_module.function_result
assert [str(msg) for msg in warnings_record] == []


def test_parser(tmp_path: Path) -> None:
with ExitStack() as stack:
stack.enter_context(ctx_plugin(tmp_path, TEST_PLUGINS_DIR / "parser"))
warnings_record = stack.enter_context(warnings.catch_warnings(record=True))
cleaners = stack.enter_context(ctx_cleaners())

ep_name = "example.rdflib.plugin.parser"
ep_ns = f'{ep_name.replace(".", ":")}:'
plugin_module = importlib.import_module(ep_name)
assert plugin_module.ExampleParser.namespace() == ep_ns

importlib.reload(rdflib.plugin)
cleaners.insert(0, lambda: del_key(rdflib.plugin._plugins, (ep_name, Parser)))

graph = Graph()
assert len(graph) == 0
graph.parse(format=ep_name, data="")

assert len(graph) > 0
triples = set(graph.triples((None, None, None)))
logging.debug("triples = %s", triples)
assert triples == plugin_module.ExampleParser.constant_output()
assert [str(msg) for msg in warnings_record] == []

0 comments on commit 4918ee7

Please sign in to comment.