diff --git a/examples/secure_with_audit.py b/examples/secure_with_audit.py index 7df044cad..4a9ad2747 100644 --- a/examples/secure_with_audit.py +++ b/examples/secure_with_audit.py @@ -15,7 +15,7 @@ import logging import os import sys -from typing import Any, Optional +from typing import Any from rdflib import Graph @@ -70,7 +70,7 @@ def main() -> None: # Attempt to parse a JSON-LD document that will result in the blocked URL # being accessed. - error: Optional[PermissionError] = None + error: PermissionError | None = None try: graph.parse( data=r"""{ diff --git a/examples/secure_with_urlopen.py b/examples/secure_with_urlopen.py index 005504796..c201317f3 100644 --- a/examples/secure_with_urlopen.py +++ b/examples/secure_with_urlopen.py @@ -8,7 +8,6 @@ import logging import os import sys -from typing import Optional from urllib.request import HTTPHandler, OpenerDirector, Request, install_opener from rdflib import Graph @@ -61,7 +60,7 @@ def main() -> None: # Attempt to parse a JSON-LD document that will result in the blocked URL # being accessed. - error: Optional[PermissionError] = None + error: PermissionError | None = None try: graph.parse( data=r"""{ diff --git a/rdflib/collection.py b/rdflib/collection.py index 054fdb481..42e5b8922 100644 --- a/rdflib/collection.py +++ b/rdflib/collection.py @@ -93,11 +93,11 @@ def n3(self) -> str: """ return "( %s )" % (" ".join([i.n3() for i in self])) - def _get_container(self, index: int) -> Optional[IdentifiedNode]: + def _get_container(self, index: int) -> IdentifiedNode | None: """Gets the first, rest holding node at index.""" assert isinstance(index, int) graph = self.graph - container: Optional[IdentifiedNode] = self.uri + container: IdentifiedNode | None = self.uri i = 0 while i < index and container is not None: i += 1 @@ -274,7 +274,7 @@ def __iadd__(self, other: Iterable[_ObjectType]): return self def clear(self): - container: Optional[IdentifiedNode] = self.uri + container: IdentifiedNode | None = self.uri graph = self.graph while container is not None: rest = graph.value(container, RDF.rest) diff --git a/rdflib/compare.py b/rdflib/compare.py index 1ba765cd9..58644ae8f 100644 --- a/rdflib/compare.py +++ b/rdflib/compare.py @@ -222,7 +222,7 @@ def __str__(self): def key(self): return (len(self.nodes), self.hash_color()) - def hash_color(self, color: Optional[tuple[ColorItem, ...]] = None) -> str: + def hash_color(self, color: tuple[ColorItem, ...] | None = None) -> str: if color is None: color = self.color if color in self._hash_cache: @@ -371,7 +371,7 @@ def _refine(self, coloring: list[Color], sequence: list[Color]) -> list[Color]: return combined_colors @_runtime("to_hash_runtime") - def to_hash(self, stats: Optional[Stats] = None): + def to_hash(self, stats: Stats | None = None): result = 0 for triple in self.canonical_triples(stats=stats): result += self.hashfunc(" ".join([x.n3() for x in triple])) @@ -392,7 +392,7 @@ def _experimental_path(self, coloring: list[Color]) -> list[Color]: def _create_generator( self, colorings: list[list[Color]], - groupings: Optional[dict[Node, set[Node]]] = None, + groupings: dict[Node, set[Node]] | None = None, ) -> dict[Node, set[Node]]: if not groupings: groupings = defaultdict(set) @@ -408,7 +408,7 @@ def _create_generator( def _traces( self, coloring: list[Color], - stats: Optional[Stats] = None, + stats: Stats | None = None, depth: list[int] = [0], ) -> list[Color]: if stats is not None and "prunings" not in stats: @@ -475,7 +475,7 @@ def _traces( depth[0] = best_depth # type: ignore[assignment] return discrete[0] - def canonical_triples(self, stats: Optional[Stats] = None): + def canonical_triples(self, stats: Stats | None = None): if stats is not None: start_coloring = datetime.now() coloring = self._initial_color() @@ -569,9 +569,7 @@ def isomorphic(graph1: Graph, graph2: Graph) -> bool: return gd1 == gd2 -def to_canonical_graph( - g1: Graph, stats: Optional[Stats] = None -) -> ReadOnlyGraphAggregate: +def to_canonical_graph(g1: Graph, stats: Stats | None = None) -> ReadOnlyGraphAggregate: """Creates a canonical, read-only graph. Creates a canonical, read-only graph where all bnode id:s are based on diff --git a/rdflib/events.py b/rdflib/events.py index 07d6e6475..c0a6eee33 100644 --- a/rdflib/events.py +++ b/rdflib/events.py @@ -25,7 +25,7 @@ from __future__ import annotations -from typing import Any, Optional +from typing import Any __all__ = ["Event", "Dispatcher"] @@ -57,7 +57,7 @@ class Dispatcher: subscribers. """ - _dispatch_map: Optional[dict[Any, Any]] = None + _dispatch_map: dict[Any, Any] | None = None def set_map(self, amap: dict[Any, Any]): self._dispatch_map = amap diff --git a/rdflib/exceptions.py b/rdflib/exceptions.py index cbe68fb98..2f183d72e 100644 --- a/rdflib/exceptions.py +++ b/rdflib/exceptions.py @@ -11,13 +11,13 @@ ] -from typing import Any, Optional +from typing import Any class Error(Exception): """Base class for rdflib exceptions.""" - def __init__(self, msg: Optional[str] = None): + def __init__(self, msg: str | None = None): Exception.__init__(self, msg) self.msg = msg diff --git a/rdflib/extras/infixowl.py b/rdflib/extras/infixowl.py index bc2db9179..42efb6bed 100644 --- a/rdflib/extras/infixowl.py +++ b/rdflib/extras/infixowl.py @@ -117,7 +117,7 @@ import itertools import logging -from typing import TYPE_CHECKING, Optional, Union, cast +from typing import TYPE_CHECKING, Union, cast from rdflib.collection import Collection from rdflib.graph import Graph, _ObjectType @@ -380,13 +380,13 @@ class Individual: # Instance typing graph: Graph __identifier: IdentifiedNode - qname: Optional[str] + qname: str | None def serialize(self, graph): for fact in self.factoryGraph.triples((self.identifier, None, None)): graph.add(fact) - def __init__(self, identifier: Optional[IdentifiedNode] = None, graph=None): + def __init__(self, identifier: IdentifiedNode | None = None, graph=None): self.__identifier = identifier is not None and identifier or BNode() if graph is None: self.graph = self.factoryGraph @@ -605,7 +605,7 @@ class AnnotatableTerms(Individual): def __init__( self, - identifier: Optional[IdentifiedNode], + identifier: IdentifiedNode | None, graph=None, nameAnnotation=None, # noqa: N803 nameIsLabel=False, # noqa: N803 @@ -663,7 +663,7 @@ def _get_comment(self): def _set_comment( self, - comment: Optional[IdentifiedNode | Literal | list[IdentifiedNode | Literal]], + comment: IdentifiedNode | Literal | list[IdentifiedNode | Literal] | None, ): if not comment: return @@ -702,7 +702,7 @@ def _get_label(self): yield label def _set_label( - self, label: Optional[IdentifiedNode | Literal | list[IdentifiedNode | Literal]] + self, label: IdentifiedNode | Literal | list[IdentifiedNode | Literal] | None ): if not label: return @@ -1058,7 +1058,7 @@ def setupNounAnnotations(self, noun_annotations): # noqa: N802 def __init__( self, - identifier: Optional[IdentifiedNode] = None, + identifier: IdentifiedNode | None = None, subClassOf=None, # noqa: N803 equivalentClass=None, # noqa: N803 disjointWith=None, # noqa: N803 @@ -1762,22 +1762,22 @@ class Restriction(Class): def __init__( self, onProperty, # noqa: N803 - graph: Optional[Graph] = None, - allValuesFrom: Optional[ # noqa: N803 - IdentifiedNode | Literal | Class | bool - ] = None, - someValuesFrom: Optional[ # noqa: N803 - IdentifiedNode | Literal | Class | bool - ] = None, - value: Optional[IdentifiedNode | Literal | Class | bool] = None, - cardinality: Optional[IdentifiedNode | Literal | Class | bool] = None, - maxCardinality: Optional[ # noqa: N803 - IdentifiedNode | Literal | Class | bool - ] = None, - minCardinality: Optional[ # noqa: N803 - IdentifiedNode | Literal | Class | bool - ] = None, - identifier: Optional[IdentifiedNode] = None, + graph: Graph | None = None, + allValuesFrom: ( # noqa: N803 + IdentifiedNode | Literal | Class | bool | None + ) = None, + someValuesFrom: ( # noqa: N803 + IdentifiedNode | Literal | Class | bool | None + ) = None, + value: IdentifiedNode | Literal | Class | bool | None = None, + cardinality: IdentifiedNode | Literal | Class | bool | None = None, + maxCardinality: ( # noqa: N803 + IdentifiedNode | Literal | Class | bool | None + ) = None, + minCardinality: ( # noqa: N803 + IdentifiedNode | Literal | Class | bool | None + ) = None, + identifier: IdentifiedNode | None = None, ): graph = Graph() if graph is None else graph super(Restriction, self).__init__( diff --git a/rdflib/extras/shacl.py b/rdflib/extras/shacl.py index dd8ca5498..e4f873572 100644 --- a/rdflib/extras/shacl.py +++ b/rdflib/extras/shacl.py @@ -4,7 +4,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from rdflib import Graph, Literal, URIRef, paths from rdflib.namespace import RDF, SH @@ -33,7 +33,7 @@ def parse_shacl_path( :param path_identifier: A :class:`~rdflib.term.Node` of the path :return: A :class:`~rdflib.term.URIRef` or a :class:`~rdflib.paths.Path` """ - path: Optional[URIRef | Path] = None + path: URIRef | Path | None = None # Literals are not allowed. if isinstance(path_identifier, Literal): diff --git a/rdflib/graph.py b/rdflib/graph.py index cc36a6e82..ad75d7354 100644 --- a/rdflib/graph.py +++ b/rdflib/graph.py @@ -443,14 +443,14 @@ class Graph(Node): context_aware: bool formula_aware: bool default_union: bool - base: Optional[str] + base: str | None def __init__( self, store: Store | str = "default", - identifier: Optional[_ContextIdentifierType | str] = None, - namespace_manager: Optional[NamespaceManager] = None, - base: Optional[str] = None, + identifier: _ContextIdentifierType | str | None = None, + namespace_manager: NamespaceManager | None = None, + base: str | None = None, bind_namespaces: _NamespaceSetString = "rdflib", ): super(Graph, self).__init__() @@ -533,7 +533,7 @@ def rollback(self: _GraphT) -> _GraphT: self.__store.rollback() return self - def open(self, configuration: str, create: bool = False) -> Optional[int]: + def open(self, configuration: str, create: bool = False) -> int | None: """Open the graph store Might be necessary for stores that require opening a connection to a @@ -822,8 +822,8 @@ def set( def subjects( self, - predicate: Optional[Path | _PredicateType] = None, - object: Optional[_ObjectType] = None, + predicate: Path | _PredicateType | None = None, + object: _ObjectType | None = None, unique: bool = False, ) -> Generator[_SubjectType, None, None]: """A generator of (optionally unique) subjects with the given @@ -846,8 +846,8 @@ def subjects( def predicates( self, - subject: Optional[_SubjectType] = None, - object: Optional[_ObjectType] = None, + subject: _SubjectType | None = None, + object: _ObjectType | None = None, unique: bool = False, ) -> Generator[_PredicateType, None, None]: """A generator of (optionally unique) predicates with the given @@ -870,8 +870,8 @@ def predicates( def objects( self, - subject: Optional[_SubjectType] = None, - predicate: Optional[Path | _PredicateType] = None, + subject: _SubjectType | None = None, + predicate: Path | _PredicateType | None = None, unique: bool = False, ) -> Generator[_ObjectType, None, None]: """A generator of (optionally unique) objects with the given @@ -893,7 +893,7 @@ def objects( raise def subject_predicates( - self, object: Optional[_ObjectType] = None, unique: bool = False + self, object: _ObjectType | None = None, unique: bool = False ) -> Generator[tuple[_SubjectType, _PredicateType], None, None]: """A generator of (optionally unique) (subject, predicate) tuples for the given object""" @@ -915,7 +915,7 @@ def subject_predicates( def subject_objects( self, - predicate: Optional[Path | _PredicateType] = None, + predicate: Path | _PredicateType | None = None, unique: bool = False, ) -> Generator[tuple[_SubjectType, _ObjectType], None, None]: """A generator of (optionally unique) (subject, object) tuples @@ -937,7 +937,7 @@ def subject_objects( raise def predicate_objects( - self, subject: Optional[_SubjectType] = None, unique: bool = False + self, subject: _SubjectType | None = None, unique: bool = False ) -> Generator[tuple[_PredicateType, _ObjectType], None, None]: """A generator of (optionally unique) (predicate, object) tuples for the given subject""" @@ -963,20 +963,20 @@ def triples_choices( tuple[ list[_SubjectType] | tuple[_SubjectType], _PredicateType, - Optional[_ObjectType], + _ObjectType | None, ] | tuple[ - Optional[_SubjectType], + _SubjectType | None, list[_PredicateType] | tuple[_PredicateType], - Optional[_ObjectType], + _ObjectType | None, ] | tuple[ - Optional[_SubjectType], + _SubjectType | None, _PredicateType, list[_ObjectType] | tuple[_ObjectType], ] ), - context: Optional[_ContextType] = None, + context: _ContextType | None = None, ) -> Generator[_TripleType, None, None]: subject, predicate, object_ = triple # type error: Argument 1 to "triples_choices" of "Store" has incompatible type "tuple[Union[list[Node], Node], Union[Node, list[Node]], Union[Node, list[Node]]]"; expected "Union[tuple[list[Node], Node, Node], tuple[Node, list[Node], Node], tuple[Node, Node, list[Node]]]" @@ -991,18 +991,18 @@ def value( self, subject: None = ..., predicate: None = ..., - object: Optional[_ObjectType] = ..., - default: Optional[Node] = ..., + object: _ObjectType | None = ..., + default: Node | None = ..., any: bool = ..., ) -> None: ... @overload def value( self, - subject: Optional[_SubjectType] = ..., + subject: _SubjectType | None = ..., predicate: None = ..., object: None = ..., - default: Optional[Node] = ..., + default: Node | None = ..., any: bool = ..., ) -> None: ... @@ -1010,9 +1010,9 @@ def value( def value( self, subject: None = ..., - predicate: Optional[_PredicateType] = ..., + predicate: _PredicateType | None = ..., object: None = ..., - default: Optional[Node] = ..., + default: Node | None = ..., any: bool = ..., ) -> None: ... @@ -1020,50 +1020,50 @@ def value( def value( self, subject: None = ..., - predicate: Optional[_PredicateType] = ..., - object: Optional[_ObjectType] = ..., - default: Optional[_SubjectType] = ..., + predicate: _PredicateType | None = ..., + object: _ObjectType | None = ..., + default: _SubjectType | None = ..., any: bool = ..., - ) -> Optional[_SubjectType]: ... + ) -> _SubjectType | None: ... @overload def value( self, - subject: Optional[_SubjectType] = ..., + subject: _SubjectType | None = ..., predicate: None = ..., - object: Optional[_ObjectType] = ..., - default: Optional[_PredicateType] = ..., + object: _ObjectType | None = ..., + default: _PredicateType | None = ..., any: bool = ..., - ) -> Optional[_PredicateType]: ... + ) -> _PredicateType | None: ... @overload def value( self, - subject: Optional[_SubjectType] = ..., - predicate: Optional[_PredicateType] = ..., + subject: _SubjectType | None = ..., + predicate: _PredicateType | None = ..., object: None = ..., - default: Optional[_ObjectType] = ..., + default: _ObjectType | None = ..., any: bool = ..., - ) -> Optional[_ObjectType]: ... + ) -> _ObjectType | None: ... @overload def value( self, - subject: Optional[_SubjectType] = ..., - predicate: Optional[_PredicateType] = ..., - object: Optional[_ObjectType] = ..., - default: Optional[Node] = ..., + subject: _SubjectType | None = ..., + predicate: _PredicateType | None = ..., + object: _ObjectType | None = ..., + default: Node | None = ..., any: bool = ..., - ) -> Optional[Node]: ... + ) -> Node | None: ... def value( self, - subject: Optional[_SubjectType] = None, - predicate: Optional[_PredicateType] = RDF.value, - object: Optional[_ObjectType] = None, - default: Optional[Node] = None, + subject: _SubjectType | None = None, + predicate: _PredicateType | None = RDF.value, + object: _ObjectType | None = None, + default: Node | None = None, any: bool = True, - ) -> Optional[Node]: + ) -> Node | None: """Get a value for a pair of two criteria Exactly one of subject, predicate, object must be None. Useful if one @@ -1141,7 +1141,7 @@ def transitiveClosure( # noqa: N802 self, func: Callable[[_TCArgT, Graph], Iterable[_TCArgT]], arg: _TCArgT, - seen: Optional[dict[_TCArgT, int]] = None, + seen: dict[_TCArgT, int] | None = None, ): """ Generates transitive closure of a user-defined @@ -1201,10 +1201,10 @@ def transitiveClosure( # noqa: N802 def transitive_objects( self, - subject: Optional[_SubjectType], - predicate: Optional[_PredicateType], - remember: Optional[dict[Optional[_SubjectType], int]] = None, - ) -> Generator[Optional[_SubjectType], None, None]: + subject: _SubjectType | None, + predicate: _PredicateType | None, + remember: dict[_SubjectType | None, int] | None = None, + ) -> Generator[_SubjectType | None, None, None]: """Transitively generate objects for the ``predicate`` relationship Generated objects belong to the depth first transitive closure of the @@ -1222,10 +1222,10 @@ def transitive_objects( def transitive_subjects( self, - predicate: Optional[_PredicateType], - object: Optional[_ObjectType], - remember: Optional[dict[Optional[_ObjectType], int]] = None, - ) -> Generator[Optional[_ObjectType], None, None]: + predicate: _PredicateType | None, + object: _ObjectType | None, + remember: dict[_ObjectType | None, int] | None = None, + ) -> Generator[_ObjectType | None, None, None]: """Transitively generate subjects for the ``predicate`` relationship Generated subjects belong to the depth first transitive closure of the @@ -1249,7 +1249,7 @@ def compute_qname(self, uri: str, generate: bool = True) -> tuple[str, URIRef, s def bind( self, - prefix: Optional[str], + prefix: str | None, namespace: Any, # noqa: F811 override: bool = True, replace: bool = False, @@ -1290,7 +1290,7 @@ def serialize( self, destination: None, format: str, - base: Optional[str], + base: str | None, encoding: str, **args: Any, ) -> bytes: ... @@ -1301,7 +1301,7 @@ def serialize( self, destination: None = ..., format: str = ..., - base: Optional[str] = ..., + base: str | None = ..., *, encoding: str, **args: Any, @@ -1313,7 +1313,7 @@ def serialize( self, destination: None = ..., format: str = ..., - base: Optional[str] = ..., + base: str | None = ..., encoding: None = ..., **args: Any, ) -> str: ... @@ -1324,8 +1324,8 @@ def serialize( self, destination: str | pathlib.PurePath | IO[bytes], format: str = ..., - base: Optional[str] = ..., - encoding: Optional[str] = ..., + base: str | None = ..., + encoding: str | None = ..., **args: Any, ) -> Graph: ... @@ -1333,19 +1333,19 @@ def serialize( @overload def serialize( self, - destination: Optional[str | pathlib.PurePath | IO[bytes]] = ..., + destination: str | pathlib.PurePath | IO[bytes] | None = ..., format: str = ..., - base: Optional[str] = ..., - encoding: Optional[str] = ..., + base: str | None = ..., + encoding: str | None = ..., **args: Any, ) -> bytes | str | Graph: ... def serialize( self: _GraphT, - destination: Optional[str | pathlib.PurePath | IO[bytes]] = None, + destination: str | pathlib.PurePath | IO[bytes] | None = None, format: str = "turtle", - base: Optional[str] = None, - encoding: Optional[str] = None, + base: str | None = None, + encoding: str | None = None, **args: Any, ) -> bytes | str | _GraphT: """ @@ -1419,7 +1419,7 @@ def print( self, format: str = "turtle", encoding: str = "utf-8", - out: Optional[TextIO] = None, + out: TextIO | None = None, ) -> None: print( self.serialize(None, format=format, encoding=encoding).decode(encoding), @@ -1429,14 +1429,14 @@ def print( def parse( self, - source: Optional[ - IO[bytes] | TextIO | InputSource | str | bytes | pathlib.PurePath - ] = None, - publicID: Optional[str] = None, # noqa: N803 - format: Optional[str] = None, - location: Optional[str] = None, - file: Optional[BinaryIO | TextIO] = None, - data: Optional[str | bytes] = None, + source: ( + IO[bytes] | TextIO | InputSource | str | bytes | pathlib.PurePath | None + ) = None, + publicID: str | None = None, # noqa: N803 + format: str | None = None, + location: str | None = None, + file: BinaryIO | TextIO | None = None, + data: str | bytes | None = None, **args: Any, ) -> Graph: """ @@ -1577,8 +1577,8 @@ def query( query_object: str | Query, processor: str | rdflib.query.Processor = "sparql", result: str | type[rdflib.query.Result] = "sparql", - initNs: Optional[Mapping[str, Any]] = None, # noqa: N803 - initBindings: Optional[Mapping[str, Identifier]] = None, # noqa: N803 + initNs: Mapping[str, Any] | None = None, # noqa: N803 + initBindings: Mapping[str, Identifier] | None = None, # noqa: N803 use_store_provided: bool = True, **kwargs: Any, ) -> rdflib.query.Result: @@ -1642,10 +1642,10 @@ def update( self, update_object: Update | str, processor: str | rdflib.query.UpdateProcessor = "sparql", - initNs: Optional[Mapping[str, Any]] = None, # noqa: N803 - initBindings: Optional[ # noqa: N803 - Mapping[str, rdflib.query.QueryBindingsValueType] - ] = None, + initNs: Mapping[str, Any] | None = None, # noqa: N803 + initBindings: ( # noqa: N803 + Mapping[str, rdflib.query.QueryBindingsValueType] | None + ) = None, use_store_provided: bool = True, **kwargs: Any, ) -> None: @@ -1692,7 +1692,7 @@ def update( return processor.update(update_object, initBindings, initNs, **kwargs) - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: """Return an n3 identifier for the Graph""" return "[%s]" % self.identifier.n3(namespace_manager=namespace_manager) @@ -1816,10 +1816,10 @@ def _process_skolem_tuples( def skolemize( self, - new_graph: Optional[Graph] = None, - bnode: Optional[BNode] = None, - authority: Optional[str] = None, - basepath: Optional[str] = None, + new_graph: Graph | None = None, + bnode: BNode | None = None, + authority: str | None = None, + basepath: str | None = None, ) -> Graph: def do_skolemize(bnode: BNode, t: _TripleType) -> _TripleType: (s, p, o) = t @@ -1852,7 +1852,7 @@ def do_skolemize2(t: _TripleType) -> _TripleType: return retval def de_skolemize( - self, new_graph: Optional[Graph] = None, uriref: Optional[URIRef] = None + self, new_graph: Graph | None = None, uriref: URIRef | None = None ) -> Graph: def do_de_skolemize(uriref: URIRef, t: _TripleType) -> _TripleType: (s, p, o) = t @@ -1892,7 +1892,7 @@ def do_de_skolemize2(t: _TripleType) -> _TripleType: return retval def cbd( - self, resource: _SubjectType, *, target_graph: Optional[Graph] = None + self, resource: _SubjectType, *, target_graph: Graph | None = None ) -> Graph: """Retrieves the Concise Bounded Description of a Resource from a Graph @@ -1976,8 +1976,8 @@ class ConjunctiveGraph(Graph): def __init__( self, store: Store | str = "default", - identifier: Optional[IdentifiedNode | str] = None, - default_graph_base: Optional[str] = None, + identifier: IdentifiedNode | str | None = None, + default_graph_base: str | None = None, ): super(ConjunctiveGraph, self).__init__(store, identifier=identifier) @@ -2026,12 +2026,12 @@ def _spoc( self, triple_or_quad: None, default: bool = False, - ) -> tuple[None, None, None, Optional[Graph]]: ... + ) -> tuple[None, None, None, Graph | None]: ... @overload def _spoc( self, - triple_or_quad: Optional[_TripleOrQuadPatternType], + triple_or_quad: _TripleOrQuadPatternType | None, default: bool = False, ) -> _QuadPatternType: ... @@ -2045,13 +2045,13 @@ def _spoc( @overload def _spoc( self, - triple_or_quad: Optional[_TripleOrQuadSelectorType], + triple_or_quad: _TripleOrQuadSelectorType | None, default: bool = False, ) -> _QuadSelectorType: ... def _spoc( self, - triple_or_quad: Optional[_TripleOrQuadSelectorType], + triple_or_quad: _TripleOrQuadSelectorType | None, default: bool = False, ) -> _QuadSelectorType: """ @@ -2101,9 +2101,7 @@ def _graph(self, c: Graph | _ContextIdentifierType | str) -> Graph: ... @overload def _graph(self, c: None) -> None: ... - def _graph( - self, c: Optional[Graph | _ContextIdentifierType | str] - ) -> Optional[Graph]: + def _graph(self, c: Graph | _ContextIdentifierType | str | None) -> Graph | None: if c is None: return None if not isinstance(c, Graph): @@ -2140,27 +2138,27 @@ def remove(self: _ConjunctiveGraphT, triple_or_quad: _TripleOrOptionalQuadType) def triples( self, triple_or_quad: _TripleOrQuadPatternType, - context: Optional[_ContextType] = ..., + context: _ContextType | None = ..., ) -> Generator[_TripleType, None, None]: ... @overload def triples( self, triple_or_quad: _TripleOrQuadPathPatternType, - context: Optional[_ContextType] = ..., + context: _ContextType | None = ..., ) -> Generator[_TriplePathType, None, None]: ... @overload def triples( self, triple_or_quad: _TripleOrQuadSelectorType, - context: Optional[_ContextType] = ..., + context: _ContextType | None = ..., ) -> Generator[_TripleOrTriplePathType, None, None]: ... def triples( self, triple_or_quad: _TripleOrQuadSelectorType, - context: Optional[_ContextType] = None, + context: _ContextType | None = None, ) -> Generator[_TripleOrTriplePathType, None, None]: """ Iterate over all the triples in the entire conjunctive graph @@ -2191,7 +2189,7 @@ def triples( yield s, p, o def quads( - self, triple_or_quad: Optional[_TripleOrQuadPatternType] = None + self, triple_or_quad: _TripleOrQuadPatternType | None = None ) -> Generator[_OptionalQuadType, None, None]: """Iterate over all the quads in the entire conjunctive graph""" @@ -2207,20 +2205,20 @@ def triples_choices( tuple[ list[_SubjectType] | tuple[_SubjectType], _PredicateType, - Optional[_ObjectType], + _ObjectType | None, ] | tuple[ - Optional[_SubjectType], + _SubjectType | None, list[_PredicateType] | tuple[_PredicateType], - Optional[_ObjectType], + _ObjectType | None, ] | tuple[ - Optional[_SubjectType], + _SubjectType | None, _PredicateType, list[_ObjectType] | tuple[_ObjectType], ] ), - context: Optional[_ContextType] = None, + context: _ContextType | None = None, ) -> Generator[_TripleType, None, None]: """Iterate over all the triples in the entire conjunctive graph""" s, p, o = triple @@ -2239,7 +2237,7 @@ def __len__(self) -> int: return self.store.__len__() def contexts( - self, triple: Optional[_TripleType] = None + self, triple: _TripleType | None = None ) -> Generator[_ContextType, None, None]: """Iterate over all contexts in the graph @@ -2255,15 +2253,15 @@ def contexts( # type error: Statement is unreachable yield self.get_context(context) # type: ignore[unreachable] - def get_graph(self, identifier: _ContextIdentifierType) -> Optional[Graph]: + def get_graph(self, identifier: _ContextIdentifierType) -> Graph | None: """Returns the graph identified by given identifier""" return [x for x in self.contexts() if x.identifier == identifier][0] def get_context( self, - identifier: Optional[_ContextIdentifierType | str], + identifier: _ContextIdentifierType | str | None, quoted: bool = False, - base: Optional[str] = None, + base: str | None = None, ) -> Graph: """Return a context graph for the given identifier @@ -2280,7 +2278,7 @@ def remove_context(self, context: _ContextType) -> None: """Removes the given context from the graph""" self.store.remove((None, None, None), context) - def context_id(self, uri: str, context_id: Optional[str] = None) -> URIRef: + def context_id(self, uri: str, context_id: str | None = None) -> URIRef: """URI#context""" uri = uri.split("#", 1)[0] if context_id is None: @@ -2289,14 +2287,14 @@ def context_id(self, uri: str, context_id: Optional[str] = None) -> URIRef: def parse( self, - source: Optional[ - IO[bytes] | TextIO | InputSource | str | bytes | pathlib.PurePath - ] = None, - publicID: Optional[str] = None, # noqa: N803 - format: Optional[str] = None, - location: Optional[str] = None, - file: Optional[BinaryIO | TextIO] = None, - data: Optional[str | bytes] = None, + source: ( + IO[bytes] | TextIO | InputSource | str | bytes | pathlib.PurePath | None + ) = None, + publicID: str | None = None, # noqa: N803 + format: str | None = None, + location: str | None = None, + file: BinaryIO | TextIO | None = None, + data: str | bytes | None = None, **args: Any, ) -> Graph: """ @@ -2492,7 +2490,7 @@ def __init__( self, store: Store | str = "default", default_union: bool = False, - default_graph_base: Optional[str] = None, + default_graph_base: str | None = None, ): super(Dataset, self).__init__(store=store, identifier=None) @@ -2532,8 +2530,8 @@ def __setstate__( def graph( self, - identifier: Optional[_ContextIdentifierType | _ContextType | str] = None, - base: Optional[str] = None, + identifier: _ContextIdentifierType | _ContextType | str | None = None, + base: str | None = None, ) -> Graph: if identifier is None: from rdflib.term import _SKOLEM_DEFAULT_AUTHORITY, rdflib_skolem_genid @@ -2553,14 +2551,14 @@ def graph( def parse( self, - source: Optional[ - IO[bytes] | TextIO | InputSource | str | bytes | pathlib.PurePath - ] = None, - publicID: Optional[str] = None, # noqa: N803 - format: Optional[str] = None, - location: Optional[str] = None, - file: Optional[BinaryIO | TextIO] = None, - data: Optional[str | bytes] = None, + source: ( + IO[bytes] | TextIO | InputSource | str | bytes | pathlib.PurePath | None + ) = None, + publicID: str | None = None, # noqa: N803 + format: str | None = None, + location: str | None = None, + file: BinaryIO | TextIO | None = None, + data: str | bytes | None = None, **args: Any, ) -> Graph: """ @@ -2602,14 +2600,12 @@ def parse( self.graph(c) return c - def add_graph( - self, g: Optional[_ContextIdentifierType | _ContextType | str] - ) -> Graph: + def add_graph(self, g: _ContextIdentifierType | _ContextType | str | None) -> Graph: """alias of graph for consistency""" return self.graph(g) def remove_graph( - self: _DatasetT, g: Optional[_ContextIdentifierType | _ContextType | str] + self: _DatasetT, g: _ContextIdentifierType | _ContextType | str | None ) -> _DatasetT: if not isinstance(g, Graph): g = self.get_context(g) @@ -2622,7 +2618,7 @@ def remove_graph( return self def contexts( - self, triple: Optional[_TripleType] = None + self, triple: _TripleType | None = None ) -> Generator[_ContextType, None, None]: default = False for c in super(Dataset, self).contexts(triple): @@ -2635,7 +2631,7 @@ def contexts( # type error: Return type "Generator[tuple[Node, Node, Node, Optional[Node]], None, None]" of "quads" incompatible with return type "Generator[tuple[Node, Node, Node, Optional[Graph]], None, None]" in supertype "ConjunctiveGraph" def quads( # type: ignore[override] - self, quad: Optional[_TripleOrQuadPatternType] = None + self, quad: _TripleOrQuadPatternType | None = None ) -> Generator[_OptionalIdentifiedQuadType, None, None]: for s, p, o, c in super(Dataset, self).quads(quad): # type error: Item "None" of "Optional[Graph]" has no attribute "identifier" @@ -2664,11 +2660,11 @@ class QuotedGraph(Graph, IdentifiedNode): def __new__( cls, store: Store | str, - identifier: Optional[_ContextIdentifierType | str], + identifier: _ContextIdentifierType | str | None, ): return str.__new__(cls, identifier) - def __init__(self, store: Store, identifier: Optional[_ContextIdentifierType]): + def __init__(self, store: Store, identifier: _ContextIdentifierType | None): super(QuotedGraph, self).__init__(store, identifier) def add(self: _QuotedGraphT, triple: _TripleType) -> _QuotedGraphT: @@ -2695,7 +2691,7 @@ def addN( # noqa: N802 ) return self - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: """Return an n3 identifier for the Graph""" return "{%s}" % self.identifier.n3(namespace_manager=namespace_manager) @@ -2963,20 +2959,20 @@ def triples_choices( tuple[ list[_SubjectType] | tuple[_SubjectType], _PredicateType, - Optional[_ObjectType], + _ObjectType | None, ] | tuple[ - Optional[_SubjectType], + _SubjectType | None, list[_PredicateType] | tuple[_PredicateType], - Optional[_ObjectType], + _ObjectType | None, ] | tuple[ - Optional[_SubjectType], + _SubjectType | None, _PredicateType, list[_ObjectType] | tuple[_ObjectType], ] ), - context: Optional[_ContextType] = None, + context: _ContextType | None = None, ) -> Generator[_TripleType, None, None]: subject, predicate, object_ = triple for graph in self.graphs: @@ -2998,7 +2994,7 @@ def compute_qname(self, uri: str, generate: bool = True) -> tuple[str, URIRef, s # type error: Signature of "bind" incompatible with supertype "Graph" def bind( # type: ignore[override] - self, prefix: Optional[str], namespace: Any, override: bool = True # noqa: F811 + self, prefix: str | None, namespace: Any, override: bool = True # noqa: F811 ) -> NoReturn: raise UnSupportedAggregateOperation() @@ -3017,16 +3013,16 @@ def absolutize(self, uri: str, defrag: int = 1) -> NoReturn: # type error: Signature of "parse" incompatible with supertype "ConjunctiveGraph" def parse( # type: ignore[override] self, - source: Optional[ - IO[bytes] | TextIO | InputSource | str | bytes | pathlib.PurePath - ], - publicID: Optional[str] = None, # noqa: N803 - format: Optional[str] = None, + source: ( + IO[bytes] | TextIO | InputSource | str | bytes | pathlib.PurePath | None + ), + publicID: str | None = None, # noqa: N803 + format: str | None = None, **args: Any, ) -> NoReturn: raise ModificationException() - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> NoReturn: + def n3(self, namespace_manager: NamespaceManager | None = None) -> NoReturn: raise UnSupportedAggregateOperation() def __reduce__(self) -> NoReturn: diff --git a/rdflib/namespace/__init__.py b/rdflib/namespace/__init__.py index e6d0b1391..c0c388022 100644 --- a/rdflib/namespace/__init__.py +++ b/rdflib/namespace/__init__.py @@ -77,7 +77,7 @@ from collections.abc import Iterable from functools import lru_cache from pathlib import Path -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from unicodedata import category from urllib.parse import urldefrag, urljoin @@ -580,7 +580,7 @@ def normalizeUri(self, rdfTerm: str) -> str: # noqa: N802, N803 return ":".join([qNameParts[0], qNameParts[-1]]) def compute_qname(self, uri: str, generate: bool = True) -> tuple[str, URIRef, str]: - prefix: Optional[str] + prefix: str | None if uri not in self.__cache: if not _is_valid_uri(uri): raise ValueError( @@ -631,7 +631,7 @@ def compute_qname_strict( # if output needs to be strict (e.g. for xml) then # only the strict output should bear the overhead namespace: str - prefix: Optional[str] + prefix: str | None prefix, namespace, name = self.compute_qname(uri, generate) if is_ncname(str(name)): return prefix, namespace, name @@ -730,7 +730,7 @@ def _store_bind(self, prefix: str, namespace: URIRef, override: bool) -> None: def bind( self, - prefix: Optional[str], + prefix: str | None, namespace: Any, override: bool = True, replace: bool = False, @@ -930,7 +930,7 @@ def insert_strie(strie: dict[str, Any], trie: dict[str, Any], value: str) -> Non strie[value] = insert_trie(trie, value) -def get_longest_namespace(trie: dict[str, Any], value: str) -> Optional[str]: +def get_longest_namespace(trie: dict[str, Any], value: str) -> str | None: for key in trie: if value.startswith(key): out = get_longest_namespace(trie[key], value) diff --git a/rdflib/parser.py b/rdflib/parser.py index 462076490..ae48afb6b 100644 --- a/rdflib/parser.py +++ b/rdflib/parser.py @@ -22,7 +22,6 @@ TYPE_CHECKING, Any, BinaryIO, - Optional, TextIO, Union, cast, @@ -89,13 +88,13 @@ def __init__(self, wrapped: Union[str, StringIO, TextIOBase], encoding="utf-8"): self.wrapped = wrapped self.encoding = encoding self.encoder = codecs.getencoder(self.encoding) - self.enc_str: Optional[Union[BytesIO, BufferedIOBase]] = None - self.text_str: Optional[Union[StringIO, TextIOBase]] = None - self.has_read1: Optional[bool] = None - self.has_seek: Optional[bool] = None - self._name: Optional[str] = None - self._fileno: Optional[Union[int, BaseException]] = None - self._isatty: Optional[Union[bool, BaseException]] = None + self.enc_str: Union[BytesIO, BufferedIOBase] | None = None + self.text_str: Union[StringIO, TextIOBase] | None = None + self.has_read1: bool | None = None + self.has_seek: bool | None = None + self._name: str | None = None + self._fileno: Union[int, BaseException] | None = None + self._isatty: Union[bool, BaseException] | None = None self._leftover: bytes = b"" self._text_bytes_offset: int = 0 norm_encoding = encoding.lower().replace("_", "-") @@ -123,7 +122,7 @@ def __init__(self, wrapped: Union[str, StringIO, TextIOBase], encoding="utf-8"): self._bytes_per_char = 2 def _init(self): - name: Optional[str] = None + name: str | None = None if isinstance(self.wrapped, str): b, blen = self.encoder(self.wrapped) self.enc_str = BytesIO(b) @@ -200,7 +199,7 @@ def name(self) -> Any: def closed(self) -> bool: if self.enc_str is None and self.text_str is None: return False - closed: Optional[bool] = None + closed: bool | None = None if self.enc_str is not None: try: closed = self.enc_str.closed @@ -219,7 +218,7 @@ def readable(self) -> bool: def writable(self) -> bool: return False - def truncate(self, size: Optional[int] = None) -> int: + def truncate(self, size: int | None = None) -> int: raise NotImplementedError("Cannot truncate on BytesIOWrapper") def isatty(self) -> bool: @@ -255,7 +254,7 @@ def close(self): def flush(self): return # Does nothing on read-only streams - def _read_bytes_from_text_stream(self, size: Optional[int] = -1, /) -> bytes: + def _read_bytes_from_text_stream(self, size: int | None = -1, /) -> bytes: if TYPE_CHECKING: assert self.text_str is not None if size is None or size < 0: @@ -298,7 +297,7 @@ def _read_bytes_from_text_stream(self, size: Optional[int] = -1, /) -> bytes: self._text_bytes_offset += len(ret_bytes) return ret_bytes - def read(self, size: Optional[int] = -1, /) -> bytes: + def read(self, size: int | None = -1, /) -> bytes: """ Read at most size bytes, returned as a bytes object. @@ -315,7 +314,7 @@ def read(self, size: Optional[int] = -1, /) -> bytes: ret_bytes = self._read_bytes_from_text_stream(size) return ret_bytes - def read1(self, size: Optional[int] = -1, /) -> bytes: + def read1(self, size: int | None = -1, /) -> bytes: """ Read at most size bytes, with at most one call to the underlying raw stream’s read() or readinto() method. Returned as a bytes object. @@ -421,9 +420,9 @@ class InputSource(xmlreader.InputSource): TODO: """ - def __init__(self, system_id: Optional[str] = None): + def __init__(self, system_id: str | None = None): xmlreader.InputSource.__init__(self, system_id=system_id) - self.content_type: Optional[str] = None + self.content_type: str | None = None self.auto_close = False # see Graph.parse(), true if opened by us def close(self) -> None: @@ -457,23 +456,23 @@ class PythonInputSource(InputSource): True """ - def __init__(self, data: Any, system_id: Optional[str] = None): + def __init__(self, data: Any, system_id: str | None = None): self.content_type = None self.auto_close = False # see Graph.parse(), true if opened by us - self.public_id: Optional[str] = None - self.system_id: Optional[str] = system_id + self.public_id: str | None = None + self.system_id: str | None = system_id self.data = data - def getPublicId(self) -> Optional[str]: # noqa: N802 + def getPublicId(self) -> str | None: # noqa: N802 return self.public_id - def setPublicId(self, public_id: Optional[str]) -> None: # noqa: N802 + def setPublicId(self, public_id: str | None) -> None: # noqa: N802 self.public_id = public_id - def getSystemId(self) -> Optional[str]: # noqa: N802 + def getSystemId(self) -> str | None: # noqa: N802 return self.system_id - def setSystemId(self, system_id: Optional[str]) -> None: # noqa: N802 + def setSystemId(self, system_id: str | None) -> None: # noqa: N802 self.system_id = system_id def close(self) -> None: @@ -489,7 +488,7 @@ def __init__( self, value: str | bytes, encoding: str = "utf-8", - system_id: Optional[str] = None, + system_id: str | None = None, ): super(StringInputSource, self).__init__(system_id) stream: Union[BinaryIO, TextIO] @@ -537,8 +536,8 @@ def get_links(cls, response: addinfourl) -> list[str]: retarray.append(link) return retarray - def get_alternates(self, type_: Optional[str] = None) -> list[str]: - typestr: Optional[str] = f'type="{type_}"' if type_ else None + def get_alternates(self, type_: str | None = None) -> list[str]: + typestr: str | None = f'type="{type_}"' if type_ else None relstr = 'rel="alternate"' alts = [] for link in self.links: @@ -552,7 +551,7 @@ def get_alternates(self, type_: Optional[str] = None) -> list[str]: alts.append(parts[0].strip("<>")) return alts - def __init__(self, system_id: Optional[str] = None, format: Optional[str] = None): + def __init__(self, system_id: str | None = None, format: str | None = None): super(URLInputSource, self).__init__(system_id) self.url = system_id @@ -620,7 +619,7 @@ def __init__( self, file: Union[BinaryIO, TextIO, TextIOBase, RawIOBase, BufferedIOBase], /, - encoding: Optional[str] = None, + encoding: str | None = None, ): base = pathlib.Path.cwd().as_uri() system_id = URIRef(pathlib.Path(file.name).absolute().as_uri(), base=base) # type: ignore[union-attr] @@ -651,14 +650,14 @@ def __repr__(self) -> str: def create_input_source( - source: Optional[ - Union[IO[bytes], TextIO, InputSource, str, bytes, pathlib.PurePath] - ] = None, - publicID: Optional[str] = None, # noqa: N803 - location: Optional[str] = None, - file: Optional[Union[BinaryIO, TextIO]] = None, - data: Optional[Union[str, bytes, dict]] = None, - format: Optional[str] = None, + source: ( + Union[IO[bytes], TextIO, InputSource, str, bytes, pathlib.PurePath] | None + ) = None, + publicID: str | None = None, # noqa: N803 + location: str | None = None, + file: BinaryIO | TextIO | None = None, + data: str | bytes | dict | None = None, + format: str | None = None, ) -> InputSource: """ Return an appropriate InputSource instance for the given @@ -771,11 +770,11 @@ def create_input_source( def _create_input_source_from_location( - file: Optional[Union[BinaryIO, TextIO]], - format: Optional[str], - input_source: Optional[InputSource], + file: BinaryIO | TextIO | None, + format: str | None, + input_source: InputSource | None, location: str, -) -> tuple[URIRef, bool, Optional[Union[BinaryIO, TextIO]], Optional[InputSource]]: +) -> tuple[URIRef, bool, BinaryIO | TextIO | None, InputSource | None]: # Fix for Windows problem https://github.com/RDFLib/rdflib/issues/145 and # https://github.com/RDFLib/rdflib/issues/1430 # NOTE: using pathlib.Path.exists on a URL fails on windows as it is not a diff --git a/rdflib/paths.py b/rdflib/paths.py index 4cb1e989c..ae2edf381 100644 --- a/rdflib/paths.py +++ b/rdflib/paths.py @@ -185,7 +185,7 @@ import warnings from abc import ABC, abstractmethod from functools import total_ordering -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from rdflib.term import Node, URIRef @@ -210,9 +210,7 @@ ZeroOrOne = "?" -def _n3( - arg: URIRef | Path, namespace_manager: Optional[NamespaceManager] = None -) -> str: +def _n3(arg: URIRef | Path, namespace_manager: NamespaceManager | None = None) -> str: if isinstance(arg, (SequencePath, AlternativePath)) and len(arg.args) > 1: return "(%s)" % arg.n3(namespace_manager) return arg.n3(namespace_manager) @@ -230,12 +228,12 @@ class Path(ABC): def eval( self, graph: Graph, - subj: Optional[SubjectType] = None, - obj: Optional[ObjectType] = None, + subj: SubjectType | None = None, + obj: ObjectType | None = None, ) -> Iterator[tuple[SubjectType, ObjectType]]: ... @abstractmethod - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: ... + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: ... def __hash__(self): return hash(repr(self)) @@ -258,8 +256,8 @@ def __init__(self, arg: Path | URIRef): def eval( self, graph: Graph, - subj: Optional[SubjectType] = None, - obj: Optional[ObjectType] = None, + subj: SubjectType | None = None, + obj: ObjectType | None = None, ) -> Generator[tuple[ObjectType, SubjectType], None, None]: for s, o in eval_path(graph, (obj, self.arg, subj)): yield o, s @@ -267,7 +265,7 @@ def eval( def __repr__(self) -> str: return "Path(~%s)" % (self.arg,) - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: return "^%s" % _n3(self.arg, namespace_manager) @@ -283,13 +281,13 @@ def __init__(self, *args: Path | URIRef): def eval( self, graph: Graph, - subj: Optional[SubjectType] = None, - obj: Optional[ObjectType] = None, + subj: SubjectType | None = None, + obj: ObjectType | None = None, ) -> Generator[tuple[SubjectType, ObjectType], None, None]: def _eval_seq( paths: list[Path | URIRef], - subj: Optional[SubjectType], - obj: Optional[ObjectType], + subj: SubjectType | None, + obj: ObjectType | None, ) -> Generator[tuple[SubjectType, ObjectType], None, None]: if paths[1:]: for s, o in eval_path(graph, (subj, paths[0], None)): @@ -302,7 +300,7 @@ def _eval_seq( def _eval_seq_bw( paths: list[Path | URIRef], - subj: Optional[SubjectType], + subj: SubjectType | None, obj: ObjectType, ) -> Generator[tuple[SubjectType, ObjectType], None, None]: if paths[:-1]: @@ -324,7 +322,7 @@ def _eval_seq_bw( def __repr__(self) -> str: return "Path(%s)" % " / ".join(str(x) for x in self.args) - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: return "/".join(_n3(a, namespace_manager) for a in self.args) @@ -340,8 +338,8 @@ def __init__(self, *args: Path | URIRef): def eval( self, graph: Graph, - subj: Optional[SubjectType] = None, - obj: Optional[ObjectType] = None, + subj: SubjectType | None = None, + obj: ObjectType | None = None, ) -> Generator[tuple[SubjectType, ObjectType], None, None]: for x in self.args: for y in eval_path(graph, (subj, x, obj)): @@ -350,7 +348,7 @@ def eval( def __repr__(self) -> str: return "Path(%s)" % " | ".join(str(x) for x in self.args) - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: return "|".join(_n3(a, namespace_manager) for a in self.args) @@ -374,8 +372,8 @@ def __init__(self, path: Path | URIRef, mod: _MulPathMod): def eval( self, graph: Graph, - subj: Optional[SubjectType] = None, - obj: Optional[ObjectType] = None, + subj: SubjectType | None = None, + obj: ObjectType | None = None, first: bool = True, ) -> Generator[tuple[SubjectType, ObjectType], None, None]: if self.zero and first: @@ -389,7 +387,7 @@ def eval( def _fwd( subj: SubjectType, - obj: Optional[ObjectType], + obj: ObjectType | None, seen: set[SubjectType], ) -> Generator[tuple[SubjectType, ObjectType], None, None]: seen.add(subj) @@ -404,7 +402,7 @@ def _fwd( yield s, o2 def _bwd( - subj: Optional[SubjectType], + subj: SubjectType | None, obj: ObjectType, seen: set[ObjectType], ) -> Generator[tuple[SubjectType, ObjectType], None, None]: @@ -468,7 +466,7 @@ def _all_fwd_paths() -> Generator[tuple[SubjectType, ObjectType], None, None]: def __repr__(self) -> str: return "Path(%s%s)" % (self.path, self.mod) - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: return "%s%s" % (_n3(self.path, namespace_manager), self.mod) @@ -502,7 +500,7 @@ def eval(self, graph, subj=None, obj=None): def __repr__(self) -> str: return "Path(! %s)" % ",".join(str(x) for x in self.args) - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: return "!(%s)" % ("|".join(_n3(arg, namespace_manager) for arg in self.args)) @@ -531,9 +529,9 @@ def path_sequence(self: URIRef | Path, other: URIRef | Path): def evalPath( # noqa: N802 graph: Graph, t: tuple[ - Optional[SubjectType], - Optional[Path | PredicateType], - Optional[ObjectType], + SubjectType | None, + Path | PredicateType | None, + ObjectType | None, ], ) -> Iterator[tuple[SubjectType, ObjectType]]: warnings.warn( @@ -549,9 +547,9 @@ def evalPath( # noqa: N802 def eval_path( graph: Graph, t: tuple[ - Optional[SubjectType], - Optional[Path | PredicateType], - Optional[ObjectType], + SubjectType | None, + Path | PredicateType | None, + ObjectType | None, ], ) -> Iterator[tuple[SubjectType, ObjectType]]: return ((s, o) for s, p, o in graph.triples(t)) diff --git a/rdflib/plugin.py b/rdflib/plugin.py index aa6203e40..8722df453 100644 --- a/rdflib/plugin.py +++ b/rdflib/plugin.py @@ -33,7 +33,6 @@ TYPE_CHECKING, Any, Generic, - Optional, TypeVar, overload, ) @@ -91,7 +90,7 @@ def __init__( self.kind = kind self.module_path = module_path self.class_name = class_name - self._class: Optional[type[PluginT]] = None + self._class: type[PluginT] | None = None def getClass(self) -> type[PluginT]: # noqa: N802 if self._class is None: @@ -105,7 +104,7 @@ def __init__(self, name: str, kind: type[PluginT], ep: EntryPoint): self.name = name self.kind = kind self.ep = ep - self._class: Optional[type[PluginT]] = None + self._class: type[PluginT] | None = None def getClass(self) -> type[PluginT]: # noqa: N802 if self._class is None: @@ -150,16 +149,16 @@ def get(name: str, kind: type[PluginT]) -> type[PluginT]: @overload def plugins( - name: Optional[str] = ..., kind: type[PluginT] = ... + name: str | None = ..., kind: type[PluginT] = ... ) -> Iterator[Plugin[PluginT]]: ... @overload -def plugins(name: Optional[str] = ..., kind: None = ...) -> Iterator[Plugin]: ... +def plugins(name: str | None = ..., kind: None = ...) -> Iterator[Plugin]: ... def plugins( - name: Optional[str] = None, kind: Optional[type[PluginT]] = None + name: str | None = None, kind: type[PluginT] | None = None ) -> Iterator[Plugin[PluginT]]: """ A generator of the plugins. diff --git a/rdflib/plugins/parsers/hext.py b/rdflib/plugins/parsers/hext.py index 9753b38db..1192338d8 100644 --- a/rdflib/plugins/parsers/hext.py +++ b/rdflib/plugins/parsers/hext.py @@ -9,7 +9,7 @@ import json import warnings from io import TextIOWrapper -from typing import TYPE_CHECKING, Any, BinaryIO, Optional, TextIO, Union +from typing import TYPE_CHECKING, Any, BinaryIO, TextIO, Union from rdflib.graph import ConjunctiveGraph, Dataset, Graph from rdflib.parser import InputSource, Parser @@ -36,7 +36,7 @@ class HextuplesParser(Parser): def __init__(self): super(HextuplesParser, self).__init__() - self.default_context: Optional[Graph] = None + self.default_context: Graph | None = None self.skolemize = False def _parse_hextuple( @@ -124,11 +124,11 @@ def parse(self, source: InputSource, graph: Graph, skolemize: bool = False, **kw ds.remove_graph(ds_default) # remove the original unused default graph try: - text_stream: Optional[TextIO] = source.getCharacterStream() + text_stream: TextIO | None = source.getCharacterStream() except (AttributeError, LookupError): text_stream = None try: - binary_stream: Optional[BinaryIO] = source.getByteStream() + binary_stream: BinaryIO | None = source.getByteStream() except (AttributeError, LookupError): binary_stream = None diff --git a/rdflib/plugins/parsers/jsonld.py b/rdflib/plugins/parsers/jsonld.py index 10ce4d04c..bc4585624 100644 --- a/rdflib/plugins/parsers/jsonld.py +++ b/rdflib/plugins/parsers/jsonld.py @@ -36,7 +36,7 @@ import warnings from collections.abc import Iterable -from typing import TYPE_CHECKING, Any, Optional, Union +from typing import TYPE_CHECKING, Any, Union import rdflib.parser from rdflib.graph import ConjunctiveGraph, Graph @@ -91,17 +91,11 @@ def parse( sink: Graph, version: float = 1.1, skolemize: bool = False, - encoding: Optional[str] = "utf-8", - base: Optional[str] = None, - context: Optional[ - Union[ - list[Union[dict[str, Any], str, None]], - dict[str, Any], - str, - ] - ] = None, - generalized_rdf: Optional[bool] = False, - extract_all_scripts: Optional[bool] = False, + encoding: str | None = "utf-8", + base: str | None = None, + context: list[dict[str, Any] | str | None] | dict[str, Any] | str | None = None, + generalized_rdf: bool | None = False, + extract_all_scripts: bool | None = False, **kwargs: Any, ) -> None: """Parse JSON-LD from a source document. @@ -186,17 +180,13 @@ def parse( def to_rdf( data: Any, dataset: Graph, - base: Optional[str] = None, - context_data: Optional[ - Union[ - list[Union[dict[str, Any], str, None]], - dict[str, Any], - str, - ] - ] = None, - version: Optional[float] = None, + base: str | None = None, + context_data: ( + list[dict[str, Any] | str | None] | dict[str, Any] | str | None + ) = None, + version: float | None = None, generalized_rdf: bool = False, - allow_lists_of_lists: Optional[bool] = None, + allow_lists_of_lists: bool | None = None, skolemize: bool = False, ): # TODO: docstring w. args and return value @@ -215,7 +205,7 @@ class Parser: def __init__( self, generalized_rdf: bool = False, - allow_lists_of_lists: Optional[bool] = None, + allow_lists_of_lists: bool | None = None, skolemize: bool = False, ): self.skolemize = skolemize @@ -262,7 +252,7 @@ def _add_to_graph( context: Context, node: Any, topcontext: bool = False, - ) -> Optional[IdentifiedNode]: + ) -> IdentifiedNode | None: if not isinstance(node, dict) or context.get_value(node): # type error: Return value expected return # type: ignore[return-value] @@ -284,7 +274,7 @@ def _add_to_graph( if nested_id is not None and len(nested_id) > 0: id_val = nested_id - subj: Optional[IdentifiedNode] + subj: IdentifiedNode | None if isinstance(id_val, str): subj = self._to_rdf_id(context, id_val) @@ -320,7 +310,7 @@ def _add_to_graph( return subj # type error: Missing return statement - def _get_nested_id(self, context: Context, node: dict[str, Any]) -> Optional[str]: # type: ignore[return] + def _get_nested_id(self, context: Context, node: dict[str, Any]) -> str | None: # type: ignore[return] for key, obj in node.items(): if context.version >= 1.1 and key in context.get_keys(NEST): term = context.terms.get(key) @@ -548,10 +538,10 @@ def _to_object( dataset: Graph, graph: Graph, context: Context, - term: Optional[Term], + term: Term | None, node: Any, inlist: bool = False, - ) -> Optional[_ObjectType]: + ) -> _ObjectType | None: if isinstance(node, tuple): value, lang = node if value is None: @@ -622,7 +612,7 @@ def _to_object( else: return self._add_to_graph(dataset, graph, context, node) - def _to_rdf_id(self, context: Context, id_val: str) -> Optional[IdentifiedNode]: + def _to_rdf_id(self, context: Context, id_val: str) -> IdentifiedNode | None: bid = self._get_bnodeid(id_val) if bid: b = BNode(bid) @@ -635,7 +625,7 @@ def _to_rdf_id(self, context: Context, id_val: str) -> Optional[IdentifiedNode]: return None return URIRef(uri) - def _get_bnodeid(self, ref: str) -> Optional[str]: + def _get_bnodeid(self, ref: str) -> str | None: if not ref.startswith("_:"): # type error: Return value expected return # type: ignore[return-value] @@ -647,7 +637,7 @@ def _add_list( dataset: Graph, graph: Graph, context: Context, - term: Optional[Term], + term: Term | None, node_list: Any, ) -> IdentifiedNode: if not isinstance(node_list, list): diff --git a/rdflib/plugins/parsers/notation3.py b/rdflib/plugins/parsers/notation3.py index 92b2e5548..acc56215b 100755 --- a/rdflib/plugins/parsers/notation3.py +++ b/rdflib/plugins/parsers/notation3.py @@ -40,7 +40,6 @@ TYPE_CHECKING, Any, NoReturn, - Optional, TypeVar, Union, cast, @@ -273,7 +272,7 @@ def _fixslash(s: str) -> str: N3_Empty = (SYMBOL, List_NS + "Empty") -runNamespaceValue: Optional[str] = None +runNamespaceValue: str | None = None def runNamespace() -> str: @@ -381,11 +380,11 @@ class SinkParser: def __init__( self, store: RDFSink, - openFormula: Optional[Formula] = None, + openFormula: Formula | None = None, thisDoc: str = "", - baseURI: Optional[str] = None, + baseURI: str | None = None, genPrefix: str = "", - why: Optional[Callable[[], None]] = None, + why: Callable[[], None] | None = None, turtle: bool = False, ): """note: namespace names should *not* end in # ; @@ -418,7 +417,7 @@ def __init__( # only allows double quotes. self.string_delimiters = ('"', "'") if turtle else ('"',) - self._reason2: Optional[Callable[..., None]] = None # Why these triples + self._reason2: Callable[..., None] | None = None # Why these triples # was: diag.tracking if tracking: # type error: "BecauseOfData" does not return a value @@ -426,7 +425,7 @@ def __init__( store.newSymbol(thisDoc), because=self._reason ) - self._baseURI: Optional[str] + self._baseURI: str | None if baseURI: self._baseURI = baseURI else: @@ -443,7 +442,7 @@ def __init__( else: self._genPrefix = uniqueURI() - self._formula: Optional[Formula] + self._formula: Formula | None if openFormula is None and not turtle: if self._thisDoc: # TODO FIXME: store.newFormula does not take any arguments @@ -453,8 +452,8 @@ def __init__( else: self._formula = openFormula - self._context: Optional[Formula] = self._formula - self._parentContext: Optional[Formula] = None + self._context: Formula | None = self._formula + self._parentContext: Formula | None = None def here(self, i: int) -> str: """String generated from position in file @@ -469,13 +468,13 @@ def here(self, i: int) -> str: return "%s_L%iC%i" % (self._genPrefix, self.lines, i - self.startOfLine + 1) - def formula(self) -> Optional[Formula]: + def formula(self) -> Formula | None: return self._formula - def loadStream(self, stream: Union[IO[str], IO[bytes]]) -> Optional[Formula]: + def loadStream(self, stream: Union[IO[str], IO[bytes]]) -> Formula | None: return self.loadBuf(stream.read()) # Not ideal - def loadBuf(self, buf: str | bytes) -> Optional[Formula]: + def loadBuf(self, buf: str | bytes) -> Formula | None: """Parses a buffer and returns its top level formula""" self.startDoc() @@ -744,7 +743,7 @@ def bind(self, qn: str, uri: bytes) -> None: else: self._store.bind(qn, uri) - def setKeywords(self, k: Optional[list[str]]) -> None: + def setKeywords(self, k: list[str] | None) -> None: """Takes a list of strings""" if k is None: self.keywordsSet = 0 @@ -756,7 +755,7 @@ def startDoc(self) -> None: # was: self._store.startDoc() self._store.startDoc(self._formula) - def endDoc(self) -> Optional[Formula]: + def endDoc(self) -> Formula | None: """Signal end of document and stop parsing. returns formula""" self._store.endDoc(self._formula) # don't canonicalize yet return self._formula @@ -873,7 +872,7 @@ def prop(self, argstr: str, i: int, res: MutableSequence[Any]) -> int: def item(self, argstr: str, i, res: MutableSequence[Any]) -> int: return self.path(argstr, i, res) - def blankNode(self, uri: Optional[str] = None) -> BNode: + def blankNode(self, uri: str | None = None) -> BNode: return self._store.newBlankNode(self._context, uri, why=self._reason2) def path(self, argstr: str, i: int, res: MutableSequence[Any]) -> int: @@ -911,13 +910,13 @@ def node( argstr: str, i: int, res: MutableSequence[Any], - subjectAlready: Optional[Node] = None, + subjectAlready: Node | None = None, ) -> int: """Parse the production. Space is now skipped once at the beginning instead of in multiple calls to self.skipSpace(). """ - subj: Optional[Node] = subjectAlready + subj: Node | None = subjectAlready j = self.skipSpace(argstr, i) if j < 0: @@ -1049,9 +1048,7 @@ def node( return j if ch == "(": - thing_type: Callable[ - [list[Any], Optional[Formula]], Union[set[Any], IdentifiedNode] - ] + thing_type: Callable[[list[Any], Formula | None], set[Any] | IdentifiedNode] thing_type = self._store.newList ch2 = argstr[i + 1] if ch2 == "$": @@ -1789,9 +1786,7 @@ def __str__(self) -> str: def id(self) -> BNode: return BNode("_:Formula%s" % self.number) - def newBlankNode( - self, uri: Optional[str] = None, why: Optional[Any] = None - ) -> BNode: + def newBlankNode(self, uri: str | None = None, why: Any | None = None) -> BNode: if uri is None: self.counter += 1 bn = BNode("f%sb%s" % (self.uuid, self.counter)) @@ -1799,7 +1794,7 @@ def newBlankNode( bn = BNode(uri.split("#").pop().replace("_", "b")) return bn - def newUniversal(self, uri: str, why: Optional[Any] = None) -> Variable: + def newUniversal(self, uri: str, why: Any | None = None) -> Variable: return Variable(uri.split("#").pop()) def declareExistential(self, x: str) -> None: @@ -1814,7 +1809,7 @@ def close(self) -> QuotedGraph: class RDFSink: def __init__(self, graph: Graph): - self.rootFormula: Optional[Formula] = None + self.rootFormula: Formula | None = None self.uuid = uuid4().hex self.counter = 0 self.graph = graph @@ -1836,9 +1831,9 @@ def newSymbol(self, *args: str) -> URIRef: def newBlankNode( self, - arg: Optional[Union[Formula, Graph, Any]] = None, - uri: Optional[str] = None, - why: Optional[Callable[[], None]] = None, + arg: Formula | Graph | Any | None = None, + uri: str | None = None, + why: Callable[[], None] | None = None, ) -> BNode: if isinstance(arg, Formula): return arg.newBlankNode(uri) @@ -1849,13 +1844,13 @@ def newBlankNode( bn = BNode(str(arg[0]).split("#").pop().replace("_", "b")) return bn - def newLiteral(self, s: str, dt: Optional[URIRef], lang: Optional[str]) -> Literal: + def newLiteral(self, s: str, dt: URIRef | None, lang: str | None) -> Literal: if dt: return Literal(s, datatype=dt) else: return Literal(s, lang=lang) - def newList(self, n: list[Any], f: Optional[Formula]) -> IdentifiedNode: + def newList(self, n: list[Any], f: Formula | None) -> IdentifiedNode: nil = self.newSymbol("http://www.w3.org/1999/02/22-rdf-syntax-ns#nil") if not n: return nil @@ -1881,8 +1876,8 @@ def setDefaultNamespace(self, *args: bytes) -> str: def makeStatement( self, - quadruple: tuple[Optional[Union[Formula, Graph]], Node, Node, Node], - why: Optional[Any] = None, + quadruple: tuple[Formula | Graph | None, Node, Node, Node], + why: Any | None = None, ) -> None: f, p, s, o = quadruple @@ -1906,26 +1901,26 @@ def makeStatement( # return str(quadruple) @overload - def normalise(self, f: Optional[Formula | Graph], n: tuple[int, str]) -> URIRef: ... + def normalise(self, f: Formula | Graph | None, n: tuple[int, str]) -> URIRef: ... @overload - def normalise(self, f: Optional[Formula | Graph], n: bool) -> Literal: ... + def normalise(self, f: Formula | Graph | None, n: bool) -> Literal: ... @overload - def normalise(self, f: Optional[Formula | Graph], n: int) -> Literal: ... + def normalise(self, f: Formula | Graph | None, n: int) -> Literal: ... @overload - def normalise(self, f: Optional[Formula | Graph], n: Decimal) -> Literal: ... + def normalise(self, f: Formula | Graph | None, n: Decimal) -> Literal: ... @overload - def normalise(self, f: Optional[Formula | Graph], n: float) -> Literal: ... + def normalise(self, f: Formula | Graph | None, n: float) -> Literal: ... @overload - def normalise(self, f: Optional[Formula | Graph], n: Node) -> Node: ... + def normalise(self, f: Formula | Graph | None, n: Node) -> Node: ... def normalise( self, - f: Optional[Formula | Graph], + f: Formula | Graph | None, n: Union[tuple[int, str], bool, int, Decimal, float, Node, _AnyT], ) -> Union[URIRef, Literal, BNode, Node, _AnyT]: if isinstance(n, tuple): @@ -1970,10 +1965,10 @@ def intern(self, something: _AnyT) -> _AnyT: def bind(self, pfx, uri) -> None: pass # print pfx, ':', uri - def startDoc(self, formula: Optional[Formula]) -> None: + def startDoc(self, formula: Formula | None) -> None: self.rootFormula = formula - def endDoc(self, formula: Optional[Formula]) -> None: + def endDoc(self, formula: Formula | None) -> None: pass @@ -2016,7 +2011,7 @@ def parse( self, source: InputSource, graph: Graph, - encoding: Optional[str] = "utf-8", + encoding: str | None = "utf-8", turtle: bool = True, ) -> None: if encoding not in [None, "utf-8"]: @@ -2051,7 +2046,7 @@ def __init__(self): # type error: Signature of "parse" incompatible with supertype "TurtleParser" def parse( # type: ignore[override] - self, source: InputSource, graph: Graph, encoding: Optional[str] = "utf-8" + self, source: InputSource, graph: Graph, encoding: str | None = "utf-8" ) -> None: # we're currently being handed a Graph, not a ConjunctiveGraph # context-aware is this implied by formula_aware diff --git a/rdflib/plugins/parsers/nquads.py b/rdflib/plugins/parsers/nquads.py index b197d3430..2ed2ab4a1 100644 --- a/rdflib/plugins/parsers/nquads.py +++ b/rdflib/plugins/parsers/nquads.py @@ -27,7 +27,7 @@ from codecs import getreader from collections.abc import MutableMapping -from typing import Any, Optional +from typing import Any from rdflib.exceptions import ParserError as ParseError from rdflib.graph import ConjunctiveGraph, Dataset, Graph @@ -49,7 +49,7 @@ def parse( # type: ignore[override] self, inputsource: InputSource, sink: Graph, - bnode_context: Optional[_BNodeContextType] = None, + bnode_context: _BNodeContextType | None = None, skolemize: bool = False, **kwargs: Any, ): @@ -107,7 +107,7 @@ def parse( # type: ignore[override] return self.sink - def parseline(self, bnode_context: Optional[_BNodeContextType] = None) -> None: + def parseline(self, bnode_context: _BNodeContextType | None = None) -> None: self.eat(r_wspace) if (not self.line) or self.line.startswith("#"): return # The line is empty or a comment diff --git a/rdflib/plugins/parsers/ntriples.py b/rdflib/plugins/parsers/ntriples.py index 6f73dda5d..327580ef3 100644 --- a/rdflib/plugins/parsers/ntriples.py +++ b/rdflib/plugins/parsers/ntriples.py @@ -15,7 +15,6 @@ IO, TYPE_CHECKING, Any, - Optional, TextIO, Union, ) @@ -141,8 +140,8 @@ class W3CNTriplesParser: def __init__( self, - sink: Optional[Union[DummySink, NTGraphSink]] = None, - bnode_context: Optional[_BNodeContextType] = None, + sink: DummySink | NTGraphSink | None = None, + bnode_context: _BNodeContextType | None = None, ): self.skolemize = False @@ -157,16 +156,16 @@ def __init__( else: self.sink = DummySink() - self.buffer: Optional[str] = None - self.file: Optional[Union[TextIO, codecs.StreamReader]] = None - self.line: Optional[str] = "" + self.buffer: str | None = None + self.file: TextIO | codecs.StreamReader | None = None + self.line: str | None = "" def parse( self, - f: Union[TextIO, IO[bytes], codecs.StreamReader], - bnode_context: Optional[_BNodeContextType] = None, + f: TextIO | IO[bytes] | codecs.StreamReader, + bnode_context: _BNodeContextType | None = None, skolemize: bool = False, - ) -> Union[DummySink, NTGraphSink]: + ) -> DummySink | NTGraphSink: """ Parse f as an N-Triples file. @@ -210,7 +209,7 @@ def parsestring(self, s: Union[bytes, bytearray, str], **kwargs) -> None: f = StringIO(s) self.parse(f, **kwargs) - def readline(self) -> Optional[str]: + def readline(self) -> str | None: """Read an N-Triples line from buffered input.""" # N-Triples lines end in either CRLF, CR, or LF # Therefore, we can't just use f.readline() @@ -236,7 +235,7 @@ def readline(self) -> Optional[str]: return None self.buffer += buffer - def parseline(self, bnode_context: Optional[_BNodeContextType] = None) -> None: + def parseline(self, bnode_context: _BNodeContextType | None = None) -> None: self.eat(r_wspace) if (not self.line) or self.line.startswith("#"): return # The line is empty or a comment @@ -280,7 +279,7 @@ def predicate(self) -> Union[bNode, URIRef]: return pred def object( - self, bnode_context: Optional[_BNodeContextType] = None + self, bnode_context: _BNodeContextType | None = None ) -> Union[URI, bNode, Literal]: objt = self.uriref() or self.nodeid(bnode_context) or self.literal() if objt is False: @@ -296,7 +295,7 @@ def uriref(self) -> Union[te.Literal[False], URI]: return False def nodeid( - self, bnode_context: Optional[_BNodeContextType] = None + self, bnode_context: _BNodeContextType | None = None ) -> Union[te.Literal[False], bNode, URI]: if self.peek("_"): if self.skolemize: diff --git a/rdflib/plugins/parsers/patch.py b/rdflib/plugins/parsers/patch.py index 0c0b87d64..b9cc8d443 100644 --- a/rdflib/plugins/parsers/patch.py +++ b/rdflib/plugins/parsers/patch.py @@ -3,7 +3,7 @@ from codecs import getreader from collections.abc import MutableMapping from enum import Enum -from typing import TYPE_CHECKING, Any, Optional, Union +from typing import TYPE_CHECKING, Any, Union from rdflib.exceptions import ParserError as ParseError from rdflib.graph import Dataset @@ -52,7 +52,7 @@ def parse( # type: ignore[override] self, inputsource: InputSource, sink: Dataset, - bnode_context: Optional[_BNodeContextType] = None, + bnode_context: _BNodeContextType | None = None, skolemize: bool = False, **kwargs: Any, ) -> Dataset: @@ -94,7 +94,7 @@ def parse( # type: ignore[override] raise ParseError("Invalid line (%s):\n%r" % (msg, __line)) return self.sink - def parsepatch(self, bnode_context: Optional[_BNodeContextType] = None) -> None: + def parsepatch(self, bnode_context: _BNodeContextType | None = None) -> None: self.eat(r_wspace) # From spec: "No comments should be included (comments start # and run to end # of line)." @@ -113,7 +113,7 @@ def parsepatch(self, bnode_context: Optional[_BNodeContextType] = None) -> None: self.delete_prefix() def add_or_remove_triple_or_quad( - self, operation, bnode_context: Optional[_BNodeContextType] = None + self, operation, bnode_context: _BNodeContextType | None = None ) -> None: self.eat(r_wspace) if (not self.line) or self.line.startswith("#"): @@ -170,7 +170,7 @@ def eat_op(self, op: str) -> None: self.line = self.line.lstrip(op) # type: ignore[union-attr] def nodeid( - self, bnode_context: Optional[_BNodeContextType] = None + self, bnode_context: _BNodeContextType | None = None ) -> Union[te.Literal[False], BNode, URIRef]: if self.peek("_"): return BNode(self.eat(r_nodeid).group(1)) diff --git a/rdflib/plugins/parsers/rdfxml.py b/rdflib/plugins/parsers/rdfxml.py index 72bef1dcf..6ce4dd3cb 100644 --- a/rdflib/plugins/parsers/rdfxml.py +++ b/rdflib/plugins/parsers/rdfxml.py @@ -4,7 +4,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, NoReturn, Optional +from typing import TYPE_CHECKING, Any, NoReturn from urllib.parse import urldefrag, urljoin from xml.sax import handler, make_parser, xmlreader from xml.sax.handler import ErrorHandler @@ -146,16 +146,16 @@ def reset(self) -> None: document_element = ElementHandler() document_element.start = self.document_element_start document_element.end = lambda name, qname: None - self.stack: list[Optional[ElementHandler]] = [ + self.stack: list[ElementHandler | None] = [ None, document_element, ] self.ids: dict[str, int] = {} # remember IDs we have already seen self.bnode: dict[str, IdentifiedNode] = {} - self._ns_contexts: list[dict[str, Optional[str]]] = [ + self._ns_contexts: list[dict[str, str | None]] = [ {} ] # contains uri -> prefix dicts - self._current_context: dict[str, Optional[str]] = self._ns_contexts[-1] + self._current_context: dict[str, str | None] = self._ns_contexts[-1] # ContentHandler methods @@ -165,17 +165,17 @@ def setDocumentLocator(self, locator: Locator): def startDocument(self) -> None: pass - def startPrefixMapping(self, prefix: Optional[str], namespace: str) -> None: + def startPrefixMapping(self, prefix: str | None, namespace: str) -> None: self._ns_contexts.append(self._current_context.copy()) self._current_context[namespace] = prefix self.store.bind(prefix, namespace or "", override=False) - def endPrefixMapping(self, prefix: Optional[str]) -> None: + def endPrefixMapping(self, prefix: str | None) -> None: self._current_context = self._ns_contexts[-1] del self._ns_contexts[-1] def startElementNS( - self, name: tuple[Optional[str], str], qname, attrs: AttributesImpl + self, name: tuple[str | None, str], qname, attrs: AttributesImpl ) -> None: stack = self.stack stack.append(ElementHandler()) @@ -207,7 +207,7 @@ def startElementNS( current.language = language current.start(name, qname, attrs) - def endElementNS(self, name: tuple[Optional[str], str], qname) -> None: + def endElementNS(self, name: tuple[str | None, str], qname) -> None: self.current.end(name, qname) self.stack.pop() @@ -238,21 +238,21 @@ def error(self, message: str) -> NoReturn: ) raise ParserError(info + message) - def get_current(self) -> Optional[ElementHandler]: + def get_current(self) -> ElementHandler | None: return self.stack[-2] # Create a read only property called current so that self.current # give the current element handler. current = property(get_current) - def get_next(self) -> Optional[ElementHandler]: + def get_next(self) -> ElementHandler | None: return self.stack[-1] # Create a read only property that gives the element handler to be # used for the next element. next = property(get_next) - def get_parent(self) -> Optional[ElementHandler]: + def get_parent(self) -> ElementHandler | None: return self.stack[-3] # Create a read only property that gives the current parent @@ -267,7 +267,7 @@ def absolutize(self, uri: str) -> URIRef: return URIRef(result) def convert( - self, name: tuple[Optional[str], str], qname, attrs: AttributesImpl + self, name: tuple[str | None, str], qname, attrs: AttributesImpl ) -> tuple[URIRef, dict[URIRef, str]]: if name[0] is None: # type error: Incompatible types in assignment (expression has type "URIRef", variable has type "Tuple[Optional[str], str]") @@ -412,7 +412,7 @@ def property_element_start( # Cheap hack so 2to3 doesn't turn it into __next__ next = getattr(self, "next") - object: Optional[_ObjectType] = None + object: _ObjectType | None = None current.data = None current.list = None diff --git a/rdflib/plugins/parsers/trix.py b/rdflib/plugins/parsers/trix.py index 9120bc729..574dd9627 100644 --- a/rdflib/plugins/parsers/trix.py +++ b/rdflib/plugins/parsers/trix.py @@ -4,7 +4,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, NoReturn, Optional +from typing import TYPE_CHECKING, Any, NoReturn from xml.sax import handler, make_parser from xml.sax.handler import ErrorHandler @@ -29,8 +29,8 @@ class TriXHandler(handler.ContentHandler): """An Sax Handler for TriX. See http://sw.nokia.com/trix/""" - lang: Optional[str] - datatype: Optional[str] + lang: str | None + datatype: str | None def __init__(self, store: Store): self.store = store @@ -39,8 +39,8 @@ def __init__(self, store: Store): def reset(self) -> None: self.bnode: dict[str, BNode] = {} - self.graph: Optional[Graph] = None - self.triple: Optional[list[Identifier]] = None + self.graph: Graph | None = None + self.triple: list[Identifier] | None = None self.state = 0 self.lang = None self.datatype = None @@ -53,14 +53,14 @@ def setDocumentLocator(self, locator: Locator): def startDocument(self) -> None: pass - def startPrefixMapping(self, prefix: Optional[str], namespace: str) -> None: + def startPrefixMapping(self, prefix: str | None, namespace: str) -> None: pass - def endPrefixMapping(self, prefix: Optional[str]) -> None: + def endPrefixMapping(self, prefix: str | None) -> None: pass def startElementNS( - self, name: tuple[Optional[str], str], qname, attrs: AttributesImpl + self, name: tuple[str | None, str], qname, attrs: AttributesImpl ) -> None: if name[0] != str(TRIXNS): self.error( @@ -150,7 +150,7 @@ def startElementNS( self.chars = "" - def endElementNS(self, name: tuple[Optional[str], str], qname) -> None: + def endElementNS(self, name: tuple[str | None, str], qname) -> None: if TYPE_CHECKING: assert self.triple is not None if name[0] != str(TRIXNS): diff --git a/rdflib/plugins/serializers/hext.py b/rdflib/plugins/serializers/hext.py index 250e96cb9..17e99ce4a 100644 --- a/rdflib/plugins/serializers/hext.py +++ b/rdflib/plugins/serializers/hext.py @@ -8,7 +8,7 @@ import json import warnings from collections.abc import Callable -from typing import IO, Any, Optional, Union, cast +from typing import IO, Any, Union, cast from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, ConjunctiveGraph, Dataset, Graph from rdflib.namespace import RDF, XSD @@ -31,18 +31,18 @@ class HextuplesSerializer(Serializer): Serializes RDF graphs to NTriples format. """ - contexts: list[Union[Graph, IdentifiedNode]] + contexts: list[Graph | IdentifiedNode] dumps: Callable - def __new__(cls, store: Union[Graph, Dataset, ConjunctiveGraph]): + def __new__(cls, store: Graph | Dataset | ConjunctiveGraph): if _HAS_ORJSON: - cls.str_local_id: Union[str, Any] = orjson.Fragment(b'"localId"') - cls.str_global_id: Union[str, Any] = orjson.Fragment(b'"globalId"') - cls.empty: Union[str, Any] = orjson.Fragment(b'""') - cls.lang_str: Union[str, Any] = orjson.Fragment( + cls.str_local_id: str | Any = orjson.Fragment(b'"localId"') + cls.str_global_id: str | Any = orjson.Fragment(b'"globalId"') + cls.empty: str | Any = orjson.Fragment(b'""') + cls.lang_str: str | Any = orjson.Fragment( b'"' + RDF.langString.encode("utf-8") + b'"' ) - cls.xsd_string: Union[str, Any] = orjson.Fragment( + cls.xsd_string: str | Any = orjson.Fragment( b'"' + XSD.string.encode("utf-8") + b'"' ) else: @@ -53,9 +53,9 @@ def __new__(cls, store: Union[Graph, Dataset, ConjunctiveGraph]): cls.xsd_string = f"{XSD.string}" return super(cls, cls).__new__(cls) - def __init__(self, store: Union[Graph, Dataset, ConjunctiveGraph]): - self.default_context: Optional[Union[Graph, IdentifiedNode]] - self.graph_type: Union[type[Graph], type[Dataset], type[ConjunctiveGraph]] + def __init__(self, store: Graph | Dataset | ConjunctiveGraph): + self.default_context: Graph | IdentifiedNode | None + self.graph_type: type[Graph] | type[Dataset] | type[ConjunctiveGraph] if isinstance(store, (Dataset, ConjunctiveGraph)): self.graph_type = ( Dataset if isinstance(store, Dataset) else ConjunctiveGraph @@ -76,8 +76,8 @@ def __init__(self, store: Union[Graph, Dataset, ConjunctiveGraph]): def serialize( self, stream: IO[bytes], - base: Optional[str] = None, - encoding: Optional[str] = "utf-8", + base: str | None = None, + encoding: str | None = "utf-8", **kwargs: Any, ) -> None: if base is not None: @@ -97,8 +97,8 @@ def serialize( raise Exception( "Hextuple serialization can't (yet) handle formula-aware stores" ) - context: Union[Graph, IdentifiedNode] - context_str: Union[bytes, str] + context: Graph | IdentifiedNode + context_str: bytes | str for context in self.contexts: for triple in context: # Generate context string just once, because it doesn't change @@ -119,7 +119,7 @@ def serialize( if hl is not None: stream.write(hl if _HAS_ORJSON else hl.encode()) - def _hex_line(self, triple, context_str: Union[bytes, str]): + def _hex_line(self, triple, context_str: bytes | str): if isinstance( triple[0], (URIRef, BNode) ): # exclude QuotedGraph and other objects @@ -181,7 +181,7 @@ def _iri_or_bn(self, i_): else: return None - def _context_str(self, context: Union[Graph, IdentifiedNode]) -> str: + def _context_str(self, context: Graph | IdentifiedNode) -> str: context_identifier: IdentifiedNode = ( context.identifier if isinstance(context, Graph) else context ) diff --git a/rdflib/plugins/serializers/jsonld.py b/rdflib/plugins/serializers/jsonld.py index a259b09b7..ed8c7e23f 100644 --- a/rdflib/plugins/serializers/jsonld.py +++ b/rdflib/plugins/serializers/jsonld.py @@ -38,7 +38,7 @@ from __future__ import annotations import warnings -from typing import IO, TYPE_CHECKING, Any, Optional, Union, cast +from typing import IO, TYPE_CHECKING, Any, Union, cast from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Graph from rdflib.namespace import RDF, XSD @@ -68,8 +68,8 @@ def __init__(self, store: Graph): def serialize( self, stream: IO[bytes], - base: Optional[str] = None, - encoding: Optional[str] = None, + base: str | None = None, + encoding: str | None = None, **kwargs: Any, ) -> None: # TODO: docstring w. args and return value @@ -276,7 +276,7 @@ def add_to_node( nodemap, ): context = self.context - term: Optional[Term] = None + term: Term | None = None if isinstance(o, Literal): _datatype = str(o.datatype) if o.datatype else None language = o.language @@ -292,7 +292,7 @@ def add_to_node( break language = None if term is None else term.language - node: Optional[str | list[Any] | dict[str, Any]] = None + node: str | list[Any] | dict[str, Any] | None = None use_set = not context.active if term is not None: @@ -368,7 +368,7 @@ def add_to_node( def type_coerce( self, o: IdentifiedNode | Literal, coerce_type: str - ) -> Optional[str | IdentifiedNode | Literal]: + ) -> str | IdentifiedNode | Literal | None: if coerce_type == ID: if isinstance(o, URIRef): return self.context.shrink_iri(o) @@ -392,7 +392,7 @@ def to_raw_value( ): context = self.context if isinstance(o, (URIRef, BNode)): - coll: Optional[list[Any]] = self.to_collection(graph, o) + coll: list[Any] | None = self.to_collection(graph, o) else: coll = None if coll is not None: @@ -436,12 +436,12 @@ def to_raw_value( def to_collection( self, graph: Graph, l_: JSONLDSubjectType - ) -> Optional[list[_ObjectType]]: + ) -> list[_ObjectType] | None: if l_ != RDF.nil and not graph.value(l_, RDF.first): return None list_nodes: list[_ObjectType] = [] chain: set[_ObjectType] = set([l_]) - list_head: Optional[_ObjectType] = l_ + list_head: _ObjectType | None = l_ while list_head: if list_head == RDF.nil: # The only way to return a real result is to reach diff --git a/rdflib/plugins/serializers/nquads.py b/rdflib/plugins/serializers/nquads.py index b74b9cab5..335148c8c 100644 --- a/rdflib/plugins/serializers/nquads.py +++ b/rdflib/plugins/serializers/nquads.py @@ -1,7 +1,7 @@ from __future__ import annotations import warnings -from typing import IO, Any, Optional +from typing import IO, Any from rdflib.graph import ConjunctiveGraph, Graph from rdflib.plugins.serializers.nt import _quoteLiteral @@ -24,8 +24,8 @@ def __init__(self, store: Graph): def serialize( self, stream: IO[bytes], - base: Optional[str] = None, - encoding: Optional[str] = None, + base: str | None = None, + encoding: str | None = None, **kwargs: Any, ) -> None: if base is not None: diff --git a/rdflib/plugins/serializers/nt.py b/rdflib/plugins/serializers/nt.py index 8e11fe66e..32c3891e7 100644 --- a/rdflib/plugins/serializers/nt.py +++ b/rdflib/plugins/serializers/nt.py @@ -2,7 +2,7 @@ import codecs import warnings -from typing import IO, TYPE_CHECKING, Any, Optional +from typing import IO, TYPE_CHECKING, Any from rdflib.graph import Graph from rdflib.serializer import Serializer @@ -31,8 +31,8 @@ def __init__(self, store: Graph): def serialize( self, stream: IO[bytes], - base: Optional[str] = None, - encoding: Optional[str] = "utf-8", + base: str | None = None, + encoding: str | None = "utf-8", **kwargs: Any, ) -> None: if base is not None: diff --git a/rdflib/plugins/serializers/patch.py b/rdflib/plugins/serializers/patch.py index 1bc5ff41f..58928c6a0 100644 --- a/rdflib/plugins/serializers/patch.py +++ b/rdflib/plugins/serializers/patch.py @@ -1,7 +1,7 @@ from __future__ import annotations import warnings -from typing import IO, Any, Optional +from typing import IO, Any from uuid import uuid4 from rdflib import Dataset @@ -30,8 +30,8 @@ def __init__( def serialize( self, stream: IO[bytes], - base: Optional[str] = None, - encoding: Optional[str] = None, + base: str | None = None, + encoding: str | None = None, **kwargs: Any, ) -> None: """ diff --git a/rdflib/plugins/serializers/rdfxml.py b/rdflib/plugins/serializers/rdfxml.py index e122c1398..7dbafd2fa 100644 --- a/rdflib/plugins/serializers/rdfxml.py +++ b/rdflib/plugins/serializers/rdfxml.py @@ -2,7 +2,7 @@ import xml.dom.minidom from collections.abc import Generator -from typing import IO, TYPE_CHECKING, Any, Optional +from typing import IO, TYPE_CHECKING, Any from xml.sax.saxutils import escape, quoteattr from rdflib.collection import Collection @@ -48,8 +48,8 @@ def __bindings(self) -> Generator[tuple[str, URIRef], None, None]: def serialize( self, stream: IO[bytes], - base: Optional[str] = None, - encoding: Optional[str] = None, + base: str | None = None, + encoding: str | None = None, **kwargs: Any, ) -> None: # if base is given here, use that, if not and a base is set for the graph use that @@ -174,8 +174,8 @@ def __init__(self, store: Graph, max_depth=3): def serialize( self, stream: IO[bytes], - base: Optional[str] = None, - encoding: Optional[str] = None, + base: str | None = None, + encoding: str | None = None, **kwargs: Any, ) -> None: self.__serialized: dict[IdentifiedNode | Literal, int] = {} diff --git a/rdflib/plugins/serializers/trig.py b/rdflib/plugins/serializers/trig.py index 38cc31591..0de71134c 100644 --- a/rdflib/plugins/serializers/trig.py +++ b/rdflib/plugins/serializers/trig.py @@ -5,7 +5,7 @@ from __future__ import annotations -from typing import IO, TYPE_CHECKING, Any, Optional, Union +from typing import IO, TYPE_CHECKING, Any, Union from rdflib.graph import ConjunctiveGraph, Graph from rdflib.plugins.serializers.turtle import TurtleSerializer @@ -22,7 +22,7 @@ class TrigSerializer(TurtleSerializer): indentString = 4 * " " def __init__(self, store: Union[Graph, ConjunctiveGraph]): - self.default_context: Optional[Node] + self.default_context: Node | None if store.context_aware: if TYPE_CHECKING: assert isinstance(store, ConjunctiveGraph) @@ -64,11 +64,11 @@ def reset(self) -> None: def serialize( self, stream: IO[bytes], - base: Optional[str] = None, - encoding: Optional[str] = None, - spacious: Optional[bool] = None, + base: str | None = None, + encoding: str | None = None, + spacious: bool | None = None, **kwargs: Any, - ) -> None: + ): self.reset() self.stream = stream # if base is given here, use that, if not and a base is set for the graph use that @@ -96,7 +96,7 @@ def serialize( if self.default_context and store.identifier == self.default_context: self.write(self.indent() + "\n{") else: - iri: Optional[str] + iri: str | None if isinstance(store.identifier, BNode): iri = store.identifier.n3() else: diff --git a/rdflib/plugins/serializers/trix.py b/rdflib/plugins/serializers/trix.py index 95730e8fb..7c6dab493 100644 --- a/rdflib/plugins/serializers/trix.py +++ b/rdflib/plugins/serializers/trix.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import IO, Any, Optional +from typing import IO, Any from rdflib.graph import ConjunctiveGraph, Graph from rdflib.namespace import Namespace @@ -26,10 +26,10 @@ def __init__(self, store: Graph): def serialize( self, stream: IO[bytes], - base: Optional[str] = None, - encoding: Optional[str] = None, + base: str | None = None, + encoding: str | None = None, **kwargs: Any, - ) -> None: + ): nm = self.store.namespace_manager self.writer = XMLWriter(stream, nm, encoding, extra_ns={"": TRIXNS}) diff --git a/rdflib/plugins/serializers/turtle.py b/rdflib/plugins/serializers/turtle.py index fba8600df..6165e11ee 100644 --- a/rdflib/plugins/serializers/turtle.py +++ b/rdflib/plugins/serializers/turtle.py @@ -11,7 +11,6 @@ IO, TYPE_CHECKING, Any, - Optional, ) from rdflib.exceptions import Error @@ -35,7 +34,7 @@ class RecursiveSerializer(Serializer): def __init__(self, store: Graph): super(RecursiveSerializer, self).__init__(store) - self.stream: Optional[IO[bytes]] = None + self.stream: IO[bytes] | None = None self.reset() def addNamespace(self, prefix: str, uri: URIRef) -> None: @@ -220,9 +219,9 @@ def reset(self) -> None: def serialize( self, stream: IO[bytes], - base: Optional[str] = None, - encoding: Optional[str] = None, - spacious: Optional[bool] = None, + base: str | None = None, + encoding: str | None = None, + spacious: bool | None = None, **kwargs: Any, ) -> None: self.reset() @@ -270,7 +269,7 @@ def preprocessTriple(self, triple: _TripleType) -> None: self._references[p] += 1 # TODO: Rename to get_pname - def getQName(self, uri: Node, gen_prefix: bool = True) -> Optional[str]: + def getQName(self, uri: Node, gen_prefix: bool = True) -> str | None: if not isinstance(uri, URIRef): return None diff --git a/rdflib/plugins/serializers/xmlwriter.py b/rdflib/plugins/serializers/xmlwriter.py index c062d4f0d..bd8402195 100644 --- a/rdflib/plugins/serializers/xmlwriter.py +++ b/rdflib/plugins/serializers/xmlwriter.py @@ -2,7 +2,7 @@ import codecs from collections.abc import Iterable -from typing import IO, TYPE_CHECKING, Optional +from typing import IO, TYPE_CHECKING from xml.sax.saxutils import escape, quoteattr from rdflib.term import URIRef @@ -21,9 +21,9 @@ def __init__( self, stream: IO[bytes], namespace_manager: NamespaceManager, - encoding: Optional[str] = None, + encoding: str | None = None, decl: int = 1, - extra_ns: Optional[dict[str, Namespace]] = None, + extra_ns: dict[str, Namespace] | None = None, ): encoding = encoding or "utf-8" encoder, decoder, stream_reader, stream_writer = codecs.lookup(encoding) @@ -58,7 +58,7 @@ def push(self, uri: str) -> None: self.closed = False self.parent = False - def pop(self, uri: Optional[str] = None) -> None: + def pop(self, uri: str | None = None) -> None: top = self.element_stack.pop() if uri: assert uri == top diff --git a/rdflib/plugins/shared/jsonld/context.py b/rdflib/plugins/shared/jsonld/context.py index 9c43b87e1..49df77a1b 100644 --- a/rdflib/plugins/shared/jsonld/context.py +++ b/rdflib/plugins/shared/jsonld/context.py @@ -79,33 +79,33 @@ class Context: def __init__( self, source: _ContextSourceType = None, - base: Optional[str] = None, - version: Optional[float] = 1.1, + base: str | None = None, + version: float | None = 1.1, ): self.version: float = version or 1.1 self.language = None - self.vocab: Optional[str] = None - self._base: Optional[str] + self.vocab: str | None = None + self._base: str | None self.base = base self.doc_base = base self.terms: dict[str, Any] = {} # _alias maps NODE_KEY to list of aliases self._alias: dict[str, list[str]] = {} - self._lookup: dict[tuple[str, Any, Union[Defined, str], bool], Term] = {} + self._lookup: dict[tuple[str, Any, Defined | str, bool], Term] = {} self._prefixes: dict[str, Any] = {} self.active = False - self.parent: Optional[Context] = None + self.parent: Context | None = None self.propagate = True self._context_cache: dict[str, Any] = {} if source: self.load(source) @property - def base(self) -> Optional[str]: + def base(self) -> str | None: return self._base @base.setter - def base(self, base: Optional[str]): + def base(self, base: str | None): if base: hash_index = base.find("#") if hash_index > -1: @@ -149,12 +149,12 @@ def _clear(self) -> None: self.active = False self.propagate = True - def get_context_for_term(self, term: Optional[Term]) -> Context: + def get_context_for_term(self, term: Term | None) -> Context: if term and term.context is not UNDEF: return self._subcontext(term.context, propagate=True) return self - def get_context_for_type(self, node: Any) -> Optional[Context]: + def get_context_for_type(self, node: Any) -> Context | None: if self.version >= 1.1: rtype = self.get_type(node) if isinstance(node, dict) else None if not isinstance(rtype, list): @@ -230,13 +230,13 @@ def add_term( self, name: str, idref: str, - coercion: Union[Defined, str] = UNDEF, - container: Union[Collection[Any], str, Defined] = UNDEF, - index: Optional[Union[str, Defined]] = None, - language: Optional[Union[str, Defined]] = UNDEF, + coercion: Defined | str = UNDEF, + container: Collection[Any] | str | Defined = UNDEF, + index: str | Defined | None = None, + language: str | Defined | None = UNDEF, reverse: bool = False, context: Any = UNDEF, - prefix: Optional[bool] = None, + prefix: bool | None = None, protected: bool = False, ): if self.version < 1.1 or prefix is None: @@ -272,7 +272,7 @@ def add_term( self.terms[name] = term - container_key: Union[Defined, str] + container_key: Defined | str for container_key in (LIST, LANG, SET): # , INDEX, ID, GRAPH): if container_key in container: break @@ -287,11 +287,11 @@ def add_term( def find_term( self, idref: str, - coercion: Optional[str | Defined] = None, - container: Optional[Defined | str] = UNDEF, - language: Optional[str] = None, + coercion: str | Defined | None = None, + container: Defined | str | None = UNDEF, + language: str | None = None, reverse: bool = False, - ) -> Optional[Term]: + ) -> Term | None: lu = self._lookup if coercion is None: @@ -383,9 +383,9 @@ def shrink_iri(self, iri: str) -> str: return iri[len(self._basedomain) :] # type: ignore[arg-type] return iri - def to_symbol(self, iri: str) -> Optional[str]: + def to_symbol(self, iri: str) -> str | None: iri = str(iri) - term: Optional[Term] = self.find_term(iri) + term: Term | None = self.find_term(iri) if term is not None: return term.name ns, name = split_iri(iri) @@ -400,11 +400,11 @@ def to_symbol(self, iri: str) -> Optional[str]: def load( self, source: _ContextSourceType, - base: Optional[str] = None, + base: str | None = None, referenced_contexts: set[Any] = None, ): self.active = True - sources: list[tuple[Optional[str], Union[dict[str, Any], str, None]]] = [] + sources: list[tuple[str | None, dict[str, Any] | str | None]] = [] # "Union[List[Union[Dict[str, Any], str]], list[Dict[str, Any]], list[str]]" : expression # "Union[List[Dict[str, Any]], dict[str, Any], list[str], str]" : variable source = source if isinstance(source, list) else [source] @@ -427,11 +427,11 @@ def _accept_term(self, key: str) -> bool: def _prep_sources( self, - base: Optional[str], - inputs: Union[list[Union[dict[str, Any], str, None]], list[str]], - sources: list[tuple[Optional[str], Union[dict[str, Any], str, None]]], + base: str | None, + inputs: list[dict[str, Any] | str | None] | list[str], + sources: list[tuple[str | None, dict[str, Any] | str | None]], referenced_contexts: set[str], - in_source_url: Optional[str] = None, + in_source_url: str | None = None, ): for source in inputs: source_url = in_source_url @@ -468,7 +468,7 @@ def _prep_sources( sources.append((source_url, source)) def _fetch_context( - self, source: str, base: Optional[str], referenced_contexts: set[str] + self, source: str, base: str | None, referenced_contexts: set[str] ): # type error: Value of type variable "AnyStr" of "urljoin" cannot be "Optional[str]" source_url = urljoin(base, source) # type: ignore[type-var] @@ -495,8 +495,8 @@ def _fetch_context( def _read_source( self, source: dict[str, Any], - source_url: Optional[str] = None, - referenced_contexts: Optional[set[str]] = None, + source_url: str | None = None, + referenced_contexts: set[str] | None = None, ): imports = source.get(IMPORT) if imports: @@ -534,7 +534,7 @@ def _read_term( self, source: dict[str, Any], name: str, - dfn: Union[dict[str, Any], str], + dfn: dict[str, Any] | str, protected: bool = False, ) -> None: idref = None @@ -589,12 +589,12 @@ def _read_term( v.remove(name) def _rec_expand( - self, source: dict[str, Any], expr: Optional[str], prev: Optional[str] = None - ) -> Optional[str]: + self, source: dict[str, Any], expr: str | None, prev: str | None = None + ) -> str | None: if expr == prev or expr in NODE_KEYS: return expr - nxt: Optional[str] + nxt: str | None # type error: Argument 1 to "_prep_expand" of "Context" has incompatible type "Optional[str]"; expected "str" is_term, pfx, nxt = self._prep_expand(expr) # type: ignore[arg-type] if pfx: @@ -618,7 +618,7 @@ def _rec_expand( return self._rec_expand(source, nxt, expr) - def _prep_expand(self, expr: str) -> tuple[bool, Optional[str], str]: + def _prep_expand(self, expr: str) -> tuple[bool, str | None, str]: if ":" not in expr: return True, None, expr pfx, local = expr.split(":", 1) @@ -627,7 +627,7 @@ def _prep_expand(self, expr: str) -> tuple[bool, Optional[str], str]: else: return False, None, expr - def _get_source_id(self, source: dict[str, Any], key: str) -> Optional[str]: + def _get_source_id(self, source: dict[str, Any], key: str) -> str | None: # .. from source dict or if already defined term = source.get(key) if term is None: @@ -638,7 +638,7 @@ def _get_source_id(self, source: dict[str, Any], key: str) -> Optional[str]: term = term.get(ID) return term - def _term_dict(self, term: Term) -> Union[dict[str, Any], str]: + def _term_dict(self, term: Term) -> dict[str, Any] | str: tdict: dict[str, Any] = {} if term.type != UNDEF: tdict[TYPE] = self.shrink_iri(term.type) diff --git a/rdflib/plugins/shared/jsonld/util.py b/rdflib/plugins/shared/jsonld/util.py index 9ff111315..4ba4dbb3a 100644 --- a/rdflib/plugins/shared/jsonld/util.py +++ b/rdflib/plugins/shared/jsonld/util.py @@ -5,7 +5,7 @@ import pathlib from html.parser import HTMLParser from io import StringIO, TextIOBase, TextIOWrapper -from typing import IO, TYPE_CHECKING, Any, Optional, TextIO, Union +from typing import IO, TYPE_CHECKING, Any, TextIO, Union if TYPE_CHECKING: import json @@ -41,11 +41,9 @@ def source_to_json( - source: Optional[ - Union[IO[bytes], TextIO, InputSource, str, bytes, pathlib.PurePath] - ], - fragment_id: Optional[str] = None, - extract_all_scripts: Optional[bool] = False, + source: IO[bytes] | TextIO | InputSource | str | bytes | pathlib.PurePath | None, + fragment_id: str | None = None, + extract_all_scripts: bool | None = False, ) -> tuple[Union[dict, list[dict]], Any]: """Extract JSON from a source document. @@ -70,8 +68,8 @@ def source_to_json( # We can get the original string from the StringInputSource # It's hidden in the BytesIOWrapper 'wrapped' attribute b_stream = source.getByteStream() - original_string: Optional[str] = None - json_dict: Union[dict, list[dict]] + original_string: str | None = None + json_dict: dict | list[dict] if isinstance(b_stream, BytesIOWrapper): wrapped_inner = cast(Union[str, StringIO, TextIOBase], b_stream.wrapped) if isinstance(wrapped_inner, str): @@ -108,7 +106,7 @@ def source_to_json( "application/xhtml+xml", ) if is_html: - html_docparser: Optional[HTMLJSONParser] = HTMLJSONParser( + html_docparser: HTMLJSONParser | None = HTMLJSONParser( fragment_id=fragment_id, extract_all_scripts=extract_all_scripts ) else: @@ -126,10 +124,10 @@ def source_to_json( f"Source does not have a character stream or a byte stream and cannot be used {type(source)}" ) try: - b_encoding: Optional[str] = None if b_stream is None else source.getEncoding() + b_encoding: str | None = None if b_stream is None else source.getEncoding() except (AttributeError, LookupError): b_encoding = None - underlying_string: Optional[str] = None + underlying_string: str | None = None if b_stream is not None and isinstance(b_stream, BytesIOWrapper): # Try to find an underlying wrapped Unicode string to use? wrapped_inner = b_stream.wrapped @@ -198,7 +196,7 @@ def source_to_json( VOCAB_DELIMS = ("#", "/", ":") -def split_iri(iri: str) -> tuple[str, Optional[str]]: +def split_iri(iri: str) -> tuple[str, str | None]: for delim in VOCAB_DELIMS: at = iri.rfind(delim) if at > -1: @@ -251,7 +249,7 @@ def norm_url(base: str, url: str) -> str: # type error: Missing return statement -def context_from_urlinputsource(source: URLInputSource) -> Optional[str]: # type: ignore[return] +def context_from_urlinputsource(source: URLInputSource) -> str | None: # type: ignore[return] """ Please note that JSON-LD documents served with the application/ld+json media type MUST have all context information, including references to external contexts, @@ -288,8 +286,8 @@ def context_from_urlinputsource(source: URLInputSource) -> Optional[str]: # typ class HTMLJSONParser(HTMLParser): def __init__( self, - fragment_id: Optional[str] = None, - extract_all_scripts: Optional[bool] = False, + fragment_id: str | None = None, + extract_all_scripts: bool | None = False, ): super().__init__() self.fragment_id = fragment_id diff --git a/rdflib/plugins/sparql/aggregates.py b/rdflib/plugins/sparql/aggregates.py index 531e20d22..cac24059f 100644 --- a/rdflib/plugins/sparql/aggregates.py +++ b/rdflib/plugins/sparql/aggregates.py @@ -8,7 +8,6 @@ from decimal import Decimal from typing import ( Any, - Optional, TypeVar, overload, ) @@ -26,7 +25,7 @@ class Accumulator: """abstract base class for different aggregation functions""" def __init__(self, aggregation: CompValue): - self.get_value: Callable[[], Optional[Literal]] + self.get_value: Callable[[], Literal | None] self.update: Callable[[FrozenBindings, Aggregator], None] self.var = aggregation.res self.expr = aggregation.vars @@ -110,7 +109,7 @@ class Sum(Accumulator): def __init__(self, aggregation: CompValue): super(Sum, self).__init__(aggregation) self.value = 0 - self.datatype: Optional[str] = None + self.datatype: str | None = None def update(self, row: FrozenBindings, aggregator: Aggregator) -> None: try: @@ -138,7 +137,7 @@ def __init__(self, aggregation: CompValue): super(Average, self).__init__(aggregation) self.counter = 0 self.sum = 0 - self.datatype: Optional[str] = None + self.datatype: str | None = None def update(self, row: FrozenBindings, aggregator: Aggregator) -> None: try: diff --git a/rdflib/plugins/sparql/algebra.py b/rdflib/plugins/sparql/algebra.py index a0bee7656..cb5059130 100644 --- a/rdflib/plugins/sparql/algebra.py +++ b/rdflib/plugins/sparql/algebra.py @@ -15,7 +15,6 @@ from functools import reduce from typing import ( Any, - Optional, overload, ) @@ -51,7 +50,7 @@ def Union(p1: CompValue, p2: CompValue) -> CompValue: return CompValue("Union", p1=p1, p2=p2) -def Join(p1: CompValue, p2: Optional[CompValue]) -> CompValue: +def Join(p1: CompValue, p2: CompValue | None) -> CompValue: return CompValue("Join", p1=p1, p2=p2) @@ -64,7 +63,7 @@ def Graph(term: Identifier, graph: CompValue) -> CompValue: def BGP( - triples: Optional[list[tuple[Identifier, Identifier, Identifier]]] = None + triples: list[tuple[Identifier, Identifier, Identifier]] | None = None ) -> CompValue: return CompValue("BGP", triples=triples or []) @@ -91,7 +90,7 @@ def Project(p: CompValue, PV: list[Variable]) -> CompValue: return CompValue("Project", p=p, PV=PV) -def Group(p: CompValue, expr: Optional[list[Variable]] = None) -> CompValue: +def Group(p: CompValue, expr: list[Variable] | None = None) -> CompValue: return CompValue("Group", p=p, expr=expr) @@ -176,7 +175,7 @@ def triples( # type error: Missing return statement def translatePName( # type: ignore[return] p: typing.Union[CompValue, str], prologue: Prologue -) -> Optional[Identifier]: +) -> Identifier | None: """ Expand prefixed/relative URIs """ @@ -203,7 +202,7 @@ def translatePath(p: CompValue) -> Path: ... # type error: Missing return statement -def translatePath(p: typing.Union[CompValue, URIRef]) -> Optional[Path]: # type: ignore[return] +def translatePath(p: CompValue | URIRef) -> Path | None: # type: ignore[return] """ Translate PropertyPath expressions """ @@ -269,7 +268,7 @@ def _c(n): return e -def collectAndRemoveFilters(parts: list[CompValue]) -> Optional[Expr]: +def collectAndRemoveFilters(parts: list[CompValue]) -> Expr | None: """ FILTER expressions apply to the whole group graph pattern in which @@ -296,8 +295,8 @@ def collectAndRemoveFilters(parts: list[CompValue]) -> Optional[Expr]: return None -def translateGroupOrUnionGraphPattern(graphPattern: CompValue) -> Optional[CompValue]: - A: Optional[CompValue] = None +def translateGroupOrUnionGraphPattern(graphPattern: CompValue) -> CompValue | None: + A: CompValue | None = None for g in graphPattern.graph: g = translateGroupGraphPattern(g) @@ -452,7 +451,7 @@ def traverse( tree, visitPre: Callable[[Any], Any] = lambda n: None, visitPost: Callable[[Any], Any] = lambda n: None, - complete: Optional[bool] = None, + complete: bool | None = None, ) -> Any: """ Traverse tree, visit each node with visit function @@ -481,7 +480,7 @@ def _hasAggregate(x) -> None: # type error: Missing return statement -def _aggs(e, A) -> Optional[Variable]: # type: ignore[return] +def _aggs(e, A) -> Variable | None: # type: ignore[return] """ Collect Aggregates in A replaces aggregates with variable references @@ -497,7 +496,7 @@ def _aggs(e, A) -> Optional[Variable]: # type: ignore[return] # type error: Missing return statement -def _findVars(x, res: set[Variable]) -> Optional[CompValue]: # type: ignore[return] +def _findVars(x, res: set[Variable]) -> CompValue | None: # type: ignore[return] """ Find all variables in a tree """ @@ -548,7 +547,7 @@ def _addVars(x, children: list[set[Variable]]) -> set[Variable]: # type error: Missing return statement -def _sample(e: typing.Union[CompValue, list[Expr], Expr, list[str], Variable], v: Optional[Variable] = None) -> Optional[CompValue]: # type: ignore[return] +def _sample(e: typing.Union[CompValue, list[Expr], Expr, list[str], Variable], v: Variable | None = None) -> CompValue | None: # type: ignore[return] """ For each unaggregated variable V in expr Replace V with Sample(V) @@ -621,7 +620,7 @@ def translateValues( return Values(res) -def translate(q: CompValue) -> tuple[Optional[CompValue], list[Variable]]: +def translate(q: CompValue) -> tuple[CompValue | None, list[Variable]]: """ http://www.w3.org/TR/sparql11-query/#convertSolMod @@ -777,7 +776,7 @@ def _find_first_child_projections(M: CompValue) -> Iterable[CompValue]: # type error: Missing return statement -def simplify(n: Any) -> Optional[CompValue]: # type: ignore[return] +def simplify(n: Any) -> CompValue | None: # type: ignore[return] """Remove joins to empty BGPs""" if isinstance(n, CompValue): if n.name == "Join": @@ -811,9 +810,9 @@ def analyse(n: Any, children: Any) -> bool: def translatePrologue( p: ParseResults, - base: Optional[str], - initNs: Optional[Mapping[str, Any]] = None, - prologue: Optional[Prologue] = None, + base: str | None, + initNs: Mapping[str, Any] | None = None, + prologue: Prologue | None = None, ) -> Prologue: if prologue is None: prologue = Prologue() @@ -883,8 +882,8 @@ def translateUpdate1(u: CompValue, prologue: Prologue) -> CompValue: def translateUpdate( q: CompValue, - base: Optional[str] = None, - initNs: Optional[Mapping[str, Any]] = None, + base: str | None = None, + initNs: Mapping[str, Any] | None = None, ) -> Update: """ Returns a list of SPARQL Update Algebra expressions @@ -912,8 +911,8 @@ def translateUpdate( def translateQuery( q: ParseResults, - base: Optional[str] = None, - initNs: Optional[Mapping[str, Any]] = None, + base: str | None = None, + initNs: Mapping[str, Any] | None = None, ) -> Query: """ Translate a query-parsetree to a SPARQL Algebra Expression diff --git a/rdflib/plugins/sparql/datatypes.py b/rdflib/plugins/sparql/datatypes.py index 9943eab6e..5541cd720 100644 --- a/rdflib/plugins/sparql/datatypes.py +++ b/rdflib/plugins/sparql/datatypes.py @@ -4,7 +4,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from rdflib.namespace import XSD @@ -86,7 +86,7 @@ } -def type_promotion(t1: URIRef, t2: Optional[URIRef]) -> URIRef: +def type_promotion(t1: URIRef, t2: URIRef | None) -> URIRef: if t2 is None: return t1 t1 = _super_types.get(t1, t1) diff --git a/rdflib/plugins/sparql/evaluate.py b/rdflib/plugins/sparql/evaluate.py index 0c95af758..9fd76f806 100644 --- a/rdflib/plugins/sparql/evaluate.py +++ b/rdflib/plugins/sparql/evaluate.py @@ -23,7 +23,6 @@ from typing import ( TYPE_CHECKING, Any, - Optional, Union, ) from urllib.parse import urlencode @@ -643,8 +642,8 @@ def evalDescribeQuery(ctx: QueryContext, query) -> dict[str, Union[str, Graph]]: def evalQuery( graph: Graph, query: Query, - initBindings: Optional[Mapping[str, Identifier]] = None, - base: Optional[str] = None, + initBindings: Mapping[str, Identifier] | None = None, + base: str | None = None, ) -> Mapping[Any, Any]: """ diff --git a/rdflib/plugins/sparql/operators.py b/rdflib/plugins/sparql/operators.py index ca8b7a904..3aa6fbc3e 100644 --- a/rdflib/plugins/sparql/operators.py +++ b/rdflib/plugins/sparql/operators.py @@ -18,7 +18,7 @@ import warnings from decimal import ROUND_HALF_DOWN, ROUND_HALF_UP, Decimal, InvalidOperation from functools import reduce -from typing import Any, Callable, NoReturn, Optional, Union, overload +from typing import Any, Callable, NoReturn, Union, overload from urllib.parse import quote from pyparsing import ParseResults @@ -550,7 +550,7 @@ def Builtin_LANG(e: Expr, ctx) -> Literal: return Literal(l_.language or "") -def Builtin_DATATYPE(e: Expr, ctx) -> Optional[str]: +def Builtin_DATATYPE(e: Expr, ctx) -> str | None: l_ = e.arg if not isinstance(l_, Literal): raise SPARQLError("Can only get datatype of literal: %r" % l_) @@ -626,7 +626,7 @@ def decorator(func: _CustomFunction) -> _CustomFunction: def unregister_custom_function( - uri: URIRef, func: Optional[Callable[..., Any]] = None + uri: URIRef, func: Callable[..., Any] | None = None ) -> None: """ The 'func' argument is included for compatibility with existing code. diff --git a/rdflib/plugins/sparql/parserutils.py b/rdflib/plugins/sparql/parserutils.py index 2927dfc7a..79a60f6f9 100644 --- a/rdflib/plugins/sparql/parserutils.py +++ b/rdflib/plugins/sparql/parserutils.py @@ -33,7 +33,6 @@ from typing import ( TYPE_CHECKING, Any, - Optional, TypeVar, Union, ) @@ -207,7 +206,7 @@ class Expr(CompValue): def __init__( self, name: str, - evalfn: Optional[Callable[[Any, Any], Any]] = None, + evalfn: Callable[[Any, Any], Any] | None = None, **values, ): super(Expr, self).__init__(name, **values) @@ -218,7 +217,7 @@ def __init__( def eval(self, ctx: Any = {}) -> Union[SPARQLError, Any]: try: - self.ctx: Optional[Union[Mapping, FrozenBindings]] = ctx + self.ctx: Union[Mapping, FrozenBindings] | None = ctx # type error: "None" not callable return self._evalfn(ctx) # type: ignore[misc] except SPARQLError as e: @@ -239,7 +238,7 @@ def __init__(self, name: str, expr: ParserElement): self.expr = expr TokenConverter.__init__(self, expr) self.setName(name) - self.evalfn: Optional[Callable[[Any, Any], Any]] = None + self.evalfn: Callable[[Any, Any], Any] | None = None def postParse( self, instring: str, loc: int, tokenList: ParseResults diff --git a/rdflib/plugins/sparql/processor.py b/rdflib/plugins/sparql/processor.py index fe9e3fb1a..7c0adf778 100644 --- a/rdflib/plugins/sparql/processor.py +++ b/rdflib/plugins/sparql/processor.py @@ -8,7 +8,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any, Optional, Union +from typing import Any, Union from rdflib.graph import Graph from rdflib.plugins.sparql.algebra import translateQuery, translateUpdate @@ -22,8 +22,8 @@ def prepareQuery( queryString: str, - initNs: Optional[Mapping[str, Any]] = None, - base: Optional[str] = None, + initNs: Mapping[str, Any] | None = None, + base: str | None = None, ) -> Query: """ Parse and translate a SPARQL Query @@ -37,8 +37,8 @@ def prepareQuery( def prepareUpdate( updateString: str, - initNs: Optional[Mapping[str, Any]] = None, - base: Optional[str] = None, + initNs: Mapping[str, Any] | None = None, + base: str | None = None, ) -> Update: """ Parse and translate a SPARQL Update @@ -53,9 +53,9 @@ def prepareUpdate( def processUpdate( graph: Graph, updateString: str, - initBindings: Optional[Mapping[str, Identifier]] = None, - initNs: Optional[Mapping[str, Any]] = None, - base: Optional[str] = None, + initBindings: Mapping[str, Identifier] | None = None, + initNs: Mapping[str, Any] | None = None, + base: str | None = None, ) -> None: """ Process a SPARQL Update Request @@ -82,9 +82,9 @@ def __init__(self, graph): def update( self, - strOrQuery: Union[str, Update], - initBindings: Optional[Mapping[str, Identifier]] = None, - initNs: Optional[Mapping[str, Any]] = None, + strOrQuery: str | Update, + initBindings: Mapping[str, Identifier] | None = None, + initNs: Mapping[str, Any] | None = None, ) -> None: """ .. caution:: @@ -118,9 +118,9 @@ def __init__(self, graph): def query( # type: ignore[override] self, strOrQuery: Union[str, Query], - initBindings: Optional[Mapping[str, Identifier]] = None, - initNs: Optional[Mapping[str, Any]] = None, - base: Optional[str] = None, + initBindings: Mapping[str, Identifier] | None = None, + initNs: Mapping[str, Any] | None = None, + base: str | None = None, DEBUG: bool = False, ) -> Mapping[str, Any]: """ diff --git a/rdflib/plugins/sparql/results/csvresults.py b/rdflib/plugins/sparql/results/csvresults.py index bb80ac794..1f5658744 100644 --- a/rdflib/plugins/sparql/results/csvresults.py +++ b/rdflib/plugins/sparql/results/csvresults.py @@ -12,7 +12,7 @@ import codecs import csv from io import BufferedIOBase, TextIOBase -from typing import IO, Optional, Union, cast +from typing import IO, Union, cast from rdflib.plugins.sparql.processor import SPARQLResult from rdflib.query import Result, ResultParser, ResultSerializer @@ -24,7 +24,7 @@ def __init__(self): self.delim = "," # type error: Signature of "parse" incompatible with supertype "ResultParser" - def parse(self, source: IO, content_type: Optional[str] = None) -> Result: # type: ignore[override] + def parse(self, source: IO, content_type: str | None = None) -> Result: # type: ignore[override] r = Result("SELECT") # type error: Incompatible types in assignment (expression has type "StreamReader", variable has type "IO[Any]") @@ -51,7 +51,7 @@ def parseRow( if val is not None ) - def convertTerm(self, t: str) -> Optional[Union[BNode, URIRef, Literal]]: + def convertTerm(self, t: str) -> BNode | URIRef | Literal | None: if t == "": return None if t.startswith("_:"): @@ -94,7 +94,7 @@ def serialize(self, stream: IO, encoding: str = "utf-8", **kwargs) -> None: ) def serializeTerm( - self, term: Optional[Identifier], encoding: str + self, term: Identifier | None, encoding: str ) -> Union[str, Identifier]: if term is None: return "" diff --git a/rdflib/plugins/sparql/results/jsonresults.py b/rdflib/plugins/sparql/results/jsonresults.py index fa2e940d6..c855398f1 100644 --- a/rdflib/plugins/sparql/results/jsonresults.py +++ b/rdflib/plugins/sparql/results/jsonresults.py @@ -13,7 +13,7 @@ import json from collections.abc import Mapping, MutableSequence -from typing import IO, TYPE_CHECKING, Any, Optional +from typing import IO, TYPE_CHECKING, Any from rdflib.query import Result, ResultException, ResultParser, ResultSerializer from rdflib.term import BNode, Literal, URIRef, Variable @@ -33,7 +33,7 @@ class JSONResultParser(ResultParser): # type error: Signature of "parse" incompatible with supertype "ResultParser" - def parse(self, source: IO, content_type: Optional[str] = None) -> Result: # type: ignore[override] + def parse(self, source: IO, content_type: str | None = None) -> Result: # type: ignore[override] inp = source.read() if _HAS_ORJSON: try: @@ -151,8 +151,8 @@ def parseJsonTerm(d: dict[str, str]) -> IdentifiedNode | Literal: def termToJSON( - self: JSONResultSerializer, term: Optional[IdentifiedNode | Literal] -) -> Optional[dict[str, str]]: + self: JSONResultSerializer, term: IdentifiedNode | Literal | None +) -> dict[str, str] | None: if isinstance(term, URIRef): return {"type": "uri", "value": str(term)} elif isinstance(term, Literal): diff --git a/rdflib/plugins/sparql/results/rdfresults.py b/rdflib/plugins/sparql/results/rdfresults.py index fe3040e57..7dc7f1f74 100644 --- a/rdflib/plugins/sparql/results/rdfresults.py +++ b/rdflib/plugins/sparql/results/rdfresults.py @@ -38,7 +38,7 @@ def __init__(self, source: IO | Graph, **kwargs: Any): # use a new graph g = Graph() g += graph - askAnswer: Optional[Literal] = None + askAnswer: Literal | None = None else: askAnswer = cast(Optional[Literal], graph.value(rs, RS.boolean)) @@ -61,7 +61,7 @@ def __init__(self, source: IO | Graph, **kwargs: Any): for s in graph.objects(rs, RS.solution): sol: dict[Variable, IdentifiedNode | Literal] = {} for b in graph.objects(s, RS.binding): - var_name: Optional[_ObjectType | str] = graph.value(b, RS.variable) + var_name: _ObjectType | str | None = graph.value(b, RS.variable) if var_name is None: continue # Technically we should check for QuotedGraph here, to make MyPy happy diff --git a/rdflib/plugins/sparql/results/tsvresults.py b/rdflib/plugins/sparql/results/tsvresults.py index b5f3461af..0ae7c1767 100644 --- a/rdflib/plugins/sparql/results/tsvresults.py +++ b/rdflib/plugins/sparql/results/tsvresults.py @@ -7,7 +7,6 @@ from __future__ import annotations import codecs -import typing from typing import IO, Union from pyparsing import ( @@ -66,7 +65,7 @@ class TSVResultParser(ResultParser): # type error: Signature of "parse" incompatible with supertype "ResultParser" [override] - def parse(self, source: IO, content_type: typing.Optional[str] = None) -> Result: # type: ignore[override] + def parse(self, source: IO, content_type: str | None = None) -> Result: # type: ignore[override] if isinstance(source.read(0), bytes): # if reading from source returns bytes do utf-8 decoding # type error: Incompatible types in assignment (expression has type "StreamReader", variable has type "IO[Any]") @@ -100,7 +99,7 @@ def parse(self, source: IO, content_type: typing.Optional[str] = None) -> Result def convertTerm( self, t: Union[object, RDFLiteral, BNode, CompValue, URIRef] - ) -> typing.Optional[BNode | URIRef | RDFLiteral]: + ) -> BNode | URIRef | RDFLiteral | None: if t is NONE_VALUE: return None elif isinstance(t, CompValue): diff --git a/rdflib/plugins/sparql/results/txtresults.py b/rdflib/plugins/sparql/results/txtresults.py index 8bdf2b53d..50501e6f9 100644 --- a/rdflib/plugins/sparql/results/txtresults.py +++ b/rdflib/plugins/sparql/results/txtresults.py @@ -1,7 +1,7 @@ from __future__ import annotations from io import StringIO -from typing import IO, Optional, Union +from typing import IO from rdflib.namespace import NamespaceManager from rdflib.query import ResultSerializer @@ -9,8 +9,8 @@ def _termString( - t: Optional[Union[URIRef, Literal, BNode]], - namespace_manager: Optional[NamespaceManager], + t: URIRef | Literal | BNode | None, + namespace_manager: NamespaceManager | None, ) -> str: if t is None: return "-" @@ -35,7 +35,7 @@ def serialize( stream: IO, encoding: str = "utf-8", *, - namespace_manager: Optional[NamespaceManager] = None, + namespace_manager: NamespaceManager | None = None, **kwargs, ) -> None: """ diff --git a/rdflib/plugins/sparql/results/xmlresults.py b/rdflib/plugins/sparql/results/xmlresults.py index a0debafe4..1430d2522 100644 --- a/rdflib/plugins/sparql/results/xmlresults.py +++ b/rdflib/plugins/sparql/results/xmlresults.py @@ -19,7 +19,6 @@ TYPE_CHECKING, Any, BinaryIO, - Optional, TextIO, Union, cast, @@ -47,13 +46,13 @@ class XMLResultParser(ResultParser): # TODO FIXME: content_type should be a keyword only arg. - def parse(self, source: IO, content_type: Optional[str] = None) -> Result: # type: ignore[override] + def parse(self, source: IO, content_type: str | None = None) -> Result: # type: ignore[override] return XMLResult(source) class XMLResult(Result): - def __init__(self, source: IO, content_type: Optional[str] = None): - parser_encoding: Optional[str] = None + def __init__(self, source: IO, content_type: str | None = None): + parser_encoding: str | None = None if hasattr(source, "encoding"): if TYPE_CHECKING: assert isinstance(source, TextIO) @@ -241,10 +240,10 @@ def write_end_result(self) -> None: def write_binding(self, name: Variable, val: Identifier) -> None: assert self._resultStarted - attr_vals: dict[tuple[Optional[str], str], str] = { + attr_vals: dict[tuple[str | None, str], str] = { (None, "name"): str(name), } - attr_qnames: dict[tuple[Optional[str], str], str] = { + attr_qnames: dict[tuple[str | None, str], str] = { (None, "name"): "name", } self.writer.startElementNS( diff --git a/rdflib/plugins/sparql/sparql.py b/rdflib/plugins/sparql/sparql.py index dd6eff999..cdd460fd3 100644 --- a/rdflib/plugins/sparql/sparql.py +++ b/rdflib/plugins/sparql/sparql.py @@ -8,7 +8,6 @@ from typing import ( TYPE_CHECKING, Any, - Optional, TypeVar, Union, ) @@ -27,12 +26,12 @@ class SPARQLError(Exception): - def __init__(self, msg: Optional[str] = None): + def __init__(self, msg: str | None = None): Exception.__init__(self, msg) class NotBoundError(SPARQLError): - def __init__(self, msg: Optional[str] = None): + def __init__(self, msg: str | None = None): SPARQLError.__init__(self, msg) @@ -44,7 +43,7 @@ def __init__(self): class SPARQLTypeError(SPARQLError): - def __init__(self, msg: Optional[str]): + def __init__(self, msg: str | None): SPARQLError.__init__(self, msg) @@ -58,7 +57,7 @@ class Bindings(MutableMapping): In python 3.3 this could be a collections.ChainMap """ - def __init__(self, outer: Optional[Bindings] = None, d=[]): + def __init__(self, outer: Bindings | None = None, d=[]): self._d: dict[str, str] = dict(d) self.outer = outer @@ -85,14 +84,14 @@ def __delitem__(self, key: str) -> None: def __len__(self) -> int: i = 0 - d: Optional[Bindings] = self + d: Bindings | None = self while d is not None: i += len(d._d) d = d.outer return i def __iter__(self) -> Generator[str, None, None]: - d: Optional[Bindings] = self + d: Bindings | None = self while d is not None: yield from d._d d = d.outer @@ -115,7 +114,7 @@ class FrozenDict(Mapping): def __init__(self, *args: Any, **kwargs: Any): self._d: dict[Identifier, Identifier] = dict(*args, **kwargs) - self._hash: Optional[int] = None + self._hash: int | None = None def __iter__(self): return iter(self._d) @@ -202,11 +201,11 @@ def bnodes(self) -> t.Mapping[Identifier, BNode]: return self.ctx.bnodes @property - def prologue(self) -> Optional[Prologue]: + def prologue(self) -> Prologue | None: return self.ctx.prologue def forget( - self, before: QueryContext, _except: Optional[Container[Variable]] = None + self, before: QueryContext, _except: Container[Variable] | None = None ) -> FrozenBindings: """ return a frozen dict only of bindings made in self @@ -244,9 +243,9 @@ class QueryContext: def __init__( self, - graph: Optional[Graph] = None, - bindings: Optional[Union[Bindings, FrozenBindings, list[Any]]] = None, - initBindings: Optional[Mapping[str, Identifier]] = None, + graph: Graph | None = None, + bindings: Bindings | FrozenBindings | list[Any] | None = None, + initBindings: Mapping[str, Identifier] | None = None, datasetClause=None, ): self.initBindings = initBindings @@ -254,8 +253,8 @@ def __init__( if initBindings: self.bindings.update(initBindings) - self.graph: Optional[Graph] - self._dataset: Optional[Union[Dataset, ConjunctiveGraph]] + self.graph: Graph | None + self._dataset: Dataset | ConjunctiveGraph | None if isinstance(graph, (Dataset, ConjunctiveGraph)): if datasetClause: self._dataset = Dataset() @@ -284,8 +283,8 @@ def __init__( self._dataset = None self.graph = graph - self.prologue: Optional[Prologue] = None - self._now: Optional[datetime.datetime] = None + self.prologue: Prologue | None = None + self._now: datetime.datetime | None = None self.bnodes: t.MutableMapping[Identifier, BNode] = collections.defaultdict( BNode @@ -298,7 +297,7 @@ def now(self) -> datetime.datetime: return self._now def clone( - self, bindings: Optional[Union[FrozenBindings, Bindings, list[Any]]] = None + self, bindings: FrozenBindings | Bindings | list[Any] | None = None ) -> QueryContext: r = QueryContext( self._dataset if self._dataset is not None else self.graph, @@ -325,7 +324,7 @@ def load( self, source: URIRef, default: bool = False, - into: Optional[Identifier] = None, + into: Identifier | None = None, **kwargs: Any, ) -> None: """ @@ -376,7 +375,7 @@ def _load(graph, source): into = source _load(self.dataset.get_context(into), source) - def __getitem__(self, key: Union[str, Path]) -> Optional[Union[str, Path]]: + def __getitem__(self, key: str | Path) -> str | Path | None: # in SPARQL BNodes are just labels if not isinstance(key, (BNode, Variable)): return key @@ -385,13 +384,13 @@ def __getitem__(self, key: Union[str, Path]) -> Optional[Union[str, Path]]: except KeyError: return None - def get(self, key: str, default: Optional[Any] = None) -> Any: + def get(self, key: str, default: Any | None = None) -> Any: try: return self[key] except KeyError: return default - def solution(self, vars: Optional[Iterable[Variable]] = None) -> FrozenBindings: + def solution(self, vars: Iterable[Variable] | None = None) -> FrozenBindings: """ Return a static copy of the current variable bindings as dict """ @@ -408,7 +407,7 @@ def __setitem__(self, key: str, value: str) -> None: self.bindings[key] = value - def pushGraph(self, graph: Optional[Graph]) -> QueryContext: + def pushGraph(self, graph: Graph | None) -> QueryContext: r = self.clone() r.graph = graph return r @@ -435,21 +434,19 @@ class Prologue: """ def __init__(self) -> None: - self.base: Optional[str] = None + self.base: str | None = None self.namespace_manager = NamespaceManager(Graph()) # ns man needs a store - def resolvePName(self, prefix: Optional[str], localname: Optional[str]) -> URIRef: + def resolvePName(self, prefix: str | None, localname: str | None) -> URIRef: ns = self.namespace_manager.store.namespace(prefix or "") if ns is None: raise Exception("Unknown namespace prefix : %s" % prefix) return URIRef(ns + (localname or "")) - def bind(self, prefix: Optional[str], uri: Any) -> None: + def bind(self, prefix: str | None, uri: Any) -> None: self.namespace_manager.bind(prefix, uri, replace=True) - def absolutize( - self, iri: Optional[Union[CompValue, str]] - ) -> Optional[Union[CompValue, str]]: + def absolutize(self, iri: CompValue | str | None) -> CompValue | str | None: """ Apply BASE / PREFIXes to URIs (and to datatypes in Literals) @@ -479,7 +476,7 @@ class Query: def __init__(self, prologue: Prologue, algebra: CompValue): self.prologue = prologue self.algebra = algebra - self._original_args: tuple[str, Mapping[str, str], Optional[str]] + self._original_args: tuple[str, Mapping[str, str], str | None] class Update: @@ -490,4 +487,4 @@ class Update: def __init__(self, prologue: Prologue, algebra: list[CompValue]): self.prologue = prologue self.algebra = algebra - self._original_args: tuple[str, Mapping[str, str], Optional[str]] + self._original_args: tuple[str, Mapping[str, str], str | None] diff --git a/rdflib/plugins/sparql/update.py b/rdflib/plugins/sparql/update.py index 3acf03b52..75c36a123 100644 --- a/rdflib/plugins/sparql/update.py +++ b/rdflib/plugins/sparql/update.py @@ -7,7 +7,7 @@ from __future__ import annotations from collections.abc import Iterator, Mapping, Sequence -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from rdflib.graph import Graph from rdflib.plugins.sparql.evaluate import evalBGP, evalPart @@ -17,7 +17,7 @@ from rdflib.term import Identifier, URIRef, Variable -def _graphOrDefault(ctx: QueryContext, g: str) -> Optional[Graph]: +def _graphOrDefault(ctx: QueryContext, g: str) -> Graph | None: if g == "DEFAULT": return ctx.graph else: @@ -143,7 +143,7 @@ def evalModify(ctx: QueryContext, u: CompValue) -> None: originalctx = ctx # Using replaces the dataset for evaluating the where-clause - dg: Optional[Graph] + dg: Graph | None if u.using: otherDefault = False for d in u.using: @@ -284,7 +284,7 @@ def evalCopy(ctx: QueryContext, u: CompValue) -> None: def evalUpdate( graph: Graph, update: Update, - initBindings: Optional[Mapping[str, Identifier]] = None, + initBindings: Mapping[str, Identifier] | None = None, ) -> None: """ diff --git a/rdflib/plugins/stores/auditable.py b/rdflib/plugins/stores/auditable.py index 7fe536b25..a5e51087a 100644 --- a/rdflib/plugins/stores/auditable.py +++ b/rdflib/plugins/stores/auditable.py @@ -19,7 +19,7 @@ import threading from collections.abc import Generator, Iterator -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from rdflib.graph import ConjunctiveGraph, Graph from rdflib.store import Store @@ -54,16 +54,16 @@ def __init__(self, store: Store): self.transaction_aware = True # This is only half true self.reverseOps: list[ tuple[ - Optional[_SubjectType], - Optional[_PredicateType], - Optional[_ObjectType], - Optional[_ContextIdentifierType], + _SubjectType | None, + _PredicateType | None, + _ObjectType | None, + _ContextIdentifierType | None, str, ] ] = [] self.rollbackLock = threading.RLock() - def open(self, configuration: str, create: bool = True) -> Optional[int]: + def open(self, configuration: str, create: bool = True) -> int | None: return self.store.open(configuration, create) def close(self, commit_pending_transaction: bool = False) -> None: @@ -98,7 +98,7 @@ def add( self.store.add((s, p, o), context, quoted) def remove( - self, spo: _TriplePatternType, context: Optional[_ContextType] = None + self, spo: _TriplePatternType, context: _ContextType | None = None ) -> None: subject, predicate, object_ = spo lock = destructiveOpLocks["remove"] @@ -142,8 +142,8 @@ def remove( self.store.remove((subject, predicate, object_), context) def triples( - self, triple: _TriplePatternType, context: Optional[_ContextType] = None - ) -> Iterator[tuple[_TripleType, Iterator[Optional[_ContextType]]]]: + self, triple: _TriplePatternType, context: _ContextType | None = None + ) -> Iterator[tuple[_TripleType, Iterator[_ContextType | None]]]: (su, pr, ob) = triple context = ( context.__class__(self.store, context.identifier) @@ -153,7 +153,7 @@ def triples( for (s, p, o), cg in self.store.triples((su, pr, ob), context): yield (s, p, o), cg - def __len__(self, context: Optional[_ContextType] = None): + def __len__(self, context: _ContextType | None = None): context = ( context.__class__(self.store, context.identifier) if context is not None @@ -162,7 +162,7 @@ def __len__(self, context: Optional[_ContextType] = None): return self.store.__len__(context) def contexts( - self, triple: Optional[_TripleType] = None + self, triple: _TripleType | None = None ) -> Generator[_ContextType, None, None]: for ctx in self.store.contexts(triple): yield ctx @@ -170,10 +170,10 @@ def contexts( def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None: self.store.bind(prefix, namespace, override=override) - def prefix(self, namespace: URIRef) -> Optional[str]: + def prefix(self, namespace: URIRef) -> str | None: return self.store.prefix(namespace) - def namespace(self, prefix: str) -> Optional[URIRef]: + def namespace(self, prefix: str) -> URIRef | None: return self.store.namespace(prefix) def namespaces(self) -> Iterator[tuple[str, URIRef]]: diff --git a/rdflib/plugins/stores/berkeleydb.py b/rdflib/plugins/stores/berkeleydb.py index 13eaa7170..c3ad61d90 100644 --- a/rdflib/plugins/stores/berkeleydb.py +++ b/rdflib/plugins/stores/berkeleydb.py @@ -91,8 +91,8 @@ class BerkeleyDB(Store): def __init__( self, - configuration: Optional[str] = None, - identifier: Optional[Identifier] = None, + configuration: str | None = None, + identifier: Identifier | None = None, ): if not has_bsddb: raise ImportError("Unable to import berkeleydb, store is unusable.") @@ -103,7 +103,7 @@ def __init__( self._dumps = self.node_pickler.dumps self.__indicies_info: list[tuple[Any, _ToKeyFunc, _FromKeyFunc]] - def __get_identifier(self) -> Optional[Identifier]: + def __get_identifier(self) -> Identifier | None: return self.__identifier identifier = property(__get_identifier) @@ -128,7 +128,7 @@ def _init_db_environment( def is_open(self) -> bool: return self.__open - def open(self, path: str, create: bool = True) -> Optional[int]: + def open(self, path: str, create: bool = True) -> int | None: if not has_bsddb: return NO_STORE homeDir = path # noqa: N806 @@ -203,7 +203,7 @@ def open(self, path: str, create: bool = True) -> Optional[int]: def get_prefix_func(start: int, end: int) -> _GetPrefixFunc: def get_prefix( - triple: tuple[str, str, str], context: Optional[str] + triple: tuple[str, str, str], context: str | None ) -> Generator[str, None, None]: if context is None: yield "" @@ -304,7 +304,7 @@ def add( triple: _TripleType, context: _ContextType, quoted: bool = False, - txn: Optional[Any] = None, + txn: Any | None = None, ) -> None: """\ Add a triple to the store of triples. @@ -350,7 +350,7 @@ def __remove( spo: tuple[bytes, bytes, bytes], c: bytes, quoted: bool = False, - txn: Optional[Any] = None, + txn: Any | None = None, ) -> None: s, p, o = spo cspo, cpos, cosp = self.__indicies @@ -384,8 +384,8 @@ def __remove( def remove( # type: ignore[override] self, spo: _TriplePatternType, - context: Optional[_ContextType], - txn: Optional[Any] = None, + context: _ContextType | None, + txn: Any | None = None, ) -> None: subject, predicate, object = spo assert self.__open, "The Store must be open." @@ -475,10 +475,10 @@ def remove( # type: ignore[override] def triples( self, spo: _TriplePatternType, - context: Optional[_ContextType] = None, - txn: Optional[Any] = None, + context: _ContextType | None = None, + txn: Any | None = None, ) -> Generator[ - tuple[_TripleType, Generator[Optional[_ContextType], None, None]], + tuple[_TripleType, Generator[_ContextType | None, None, None]], None, None, ]: @@ -521,7 +521,7 @@ def triples( else: break - def __len__(self, context: Optional[_ContextType] = None) -> int: + def __len__(self, context: _ContextType | None = None) -> int: assert self.__open, "The Store must be open." if context is not None: if context == self: @@ -567,7 +567,7 @@ def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None: self.__prefix[bound_namespace or namespace] = bound_prefix or prefix self.__namespace[bound_prefix or prefix] = bound_namespace or namespace - def namespace(self, prefix: str) -> Optional[URIRef]: + def namespace(self, prefix: str) -> URIRef | None: # NOTE on type error: this is because the variable is reused with # another type. # type error: Incompatible types in assignment (expression has type "bytes", variable has type "str") @@ -577,7 +577,7 @@ def namespace(self, prefix: str) -> Optional[URIRef]: return URIRef(ns.decode("utf-8")) return None - def prefix(self, namespace: URIRef) -> Optional[str]: + def prefix(self, namespace: URIRef) -> str | None: # NOTE on type error: this is because the variable is reused with # another type. # type error: Incompatible types in assignment (expression has type "bytes", variable has type "URIRef") @@ -601,7 +601,7 @@ def namespaces(self) -> Generator[tuple[str, URIRef], None, None]: yield prefix, URIRef(namespace) def contexts( - self, triple: Optional[_TripleType] = None + self, triple: _TripleType | None = None ) -> Generator[_ContextType, None, None]: _from_string = self._from_string _to_string = self._to_string @@ -650,7 +650,7 @@ def _from_string(self, i: bytes) -> Node: k = self.__i2k.get(int(i)) return self._loads(k) - def _to_string(self, term: Node, txn: Optional[Any] = None) -> str: + def _to_string(self, term: Node, txn: Any | None = None) -> str: k = self._dumps(term) i = self.__k2i.get(k, txn=txn) if i is None: @@ -669,18 +669,18 @@ def _to_string(self, term: Node, txn: Optional[Any] = None) -> str: def __lookup( self, spo: _TriplePatternType, - context: Optional[_ContextType], - txn: Optional[Any] = None, + context: _ContextType | None, + txn: Any | None = None, ) -> tuple[db.DB, bytes, _FromKeyFunc, _ResultsFromKeyFunc]: subject, predicate, object_ = spo _to_string = self._to_string - context_str: Optional[str] = ( + context_str: str | None = ( None if context is None else _to_string(context, txn=txn) ) i = 0 - subject_str: Optional[str] - predicate_str: Optional[str] - object_str: Optional[str] + subject_str: str | None + predicate_str: str | None + object_str: str | None if subject is not None: i += 1 subject_str = _to_string(subject, txn=txn) @@ -740,9 +740,9 @@ def results_from_key_func( ) -> _ResultsFromKeyFunc: def from_key( key: bytes, - subject: Optional[Node], - predicate: Optional[Node], - object: Optional[Node], + subject: Node | None, + predicate: Node | None, + object: Node | None, contexts_value: bytes, ) -> tuple[tuple[Node, Node, Node], Generator[Node, None, None]]: "Takes a key and subject, predicate, object; returns tuple for yield" diff --git a/rdflib/plugins/stores/memory.py b/rdflib/plugins/stores/memory.py index 6e6435deb..bf0051460 100644 --- a/rdflib/plugins/stores/memory.py +++ b/rdflib/plugins/stores/memory.py @@ -6,7 +6,6 @@ from typing import ( TYPE_CHECKING, Any, - Optional, Union, overload, ) @@ -46,8 +45,8 @@ class SimpleMemory(Store): def __init__( self, - configuration: Optional[str] = None, - identifier: Optional[Identifier] = None, + configuration: str | None = None, + identifier: Identifier | None = None, ): super(SimpleMemory, self).__init__(configuration) self.identifier = identifier @@ -119,7 +118,7 @@ def add( def remove( self, triple_pattern: _TriplePatternType, - context: Optional[_ContextType] = None, + context: _ContextType | None = None, ) -> None: for (subject, predicate, object), c in list(self.triples(triple_pattern)): del self.__spo[subject][predicate][object] @@ -129,8 +128,8 @@ def remove( def triples( self, triple_pattern: _TriplePatternType, - context: Optional[_ContextType] = None, - ) -> Iterator[tuple[_TripleType, Iterator[Optional[_ContextType]]]]: + context: _ContextType | None = None, + ) -> Iterator[tuple[_TripleType, Iterator[_ContextType | None]]]: """A generator over all the triples matching""" subject, predicate, object = triple_pattern if subject != ANY: # subject is given @@ -190,7 +189,7 @@ def triples( for o in subjectDictionary[p].keys(): yield (s, p, o), self.__contexts() - def __len__(self, context: Optional[_ContextType] = None) -> int: + def __len__(self, context: _ContextType | None = None) -> int: # @@ optimize i = 0 for triple in self.triples((None, None, None)): @@ -222,10 +221,10 @@ def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None: bound_namespace, default=namespace ) - def namespace(self, prefix: str) -> Optional[URIRef]: + def namespace(self, prefix: str) -> URIRef | None: return self.__namespace.get(prefix, None) - def prefix(self, namespace: URIRef) -> Optional[str]: + def prefix(self, namespace: URIRef) -> str | None: return self.__prefix.get(namespace, None) def namespaces(self) -> Iterator[tuple[str, URIRef]]: @@ -277,8 +276,8 @@ class Memory(Store): def __init__( self, - configuration: Optional[str] = None, - identifier: Optional[Identifier] = None, + configuration: str | None = None, + identifier: Identifier | None = None, ): super(Memory, self).__init__(configuration) self.identifier = identifier @@ -301,12 +300,12 @@ def __init__( self.__namespace: dict[str, URIRef] = {} self.__prefix: dict[URIRef, str] = {} self.__context_obj_map: dict[str, Graph] = {} - self.__tripleContexts: dict[_TripleType, dict[Optional[str], bool]] = {} - self.__contextTriples: dict[Optional[str], set[_TripleType]] = {None: set()} + self.__tripleContexts: dict[_TripleType, dict[str | None, bool]] = {} + self.__contextTriples: dict[str | None, set[_TripleType]] = {None: set()} # all contexts used in store (unencoded) self.__all_contexts: set[Graph] = set() # default context information for triples - self.__defaultContexts: Optional[dict[Optional[str], bool]] = None + self.__defaultContexts: dict[str | None, bool] | None = None def add( self, @@ -373,7 +372,7 @@ def add( def remove( self, triple_pattern: _TriplePatternType, - context: Optional[_ContextType] = None, + context: _ContextType | None = None, ) -> None: req_ctx = self.__ctx_to_str(context) for triple, c in self.triples(triple_pattern, context=context): @@ -411,9 +410,9 @@ def remove( def triples( self, triple_pattern: _TriplePatternType, - context: Optional[_ContextType] = None, + context: _ContextType | None = None, ) -> Generator[ - tuple[_TripleType, Generator[Optional[_ContextType], None, None]], + tuple[_TripleType, Generator[_ContextType | None, None, None]], None, None, ]: @@ -542,10 +541,10 @@ def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None: bound_namespace, default=namespace ) - def namespace(self, prefix: str) -> Optional[URIRef]: + def namespace(self, prefix: str) -> URIRef | None: return self.__namespace.get(prefix, None) - def prefix(self, namespace: URIRef) -> Optional[str]: + def prefix(self, namespace: URIRef) -> str | None: return self.__prefix.get(namespace, None) def namespaces(self) -> Iterator[tuple[str, URIRef]]: @@ -553,7 +552,7 @@ def namespaces(self) -> Iterator[tuple[str, URIRef]]: yield prefix, namespace def contexts( - self, triple: Optional[_TripleType] = None + self, triple: _TripleType | None = None ) -> Generator[_ContextType, None, None]: if triple is None or triple == (None, None, None): return (context for context in self.__all_contexts) @@ -565,7 +564,7 @@ def contexts( except KeyError: return (_ for _ in []) - def __len__(self, context: Optional[_ContextType] = None) -> int: + def __len__(self, context: _ContextType | None = None) -> int: ctx = self.__ctx_to_str(context) if ctx not in self.__contextTriples: return 0 @@ -592,7 +591,7 @@ def __add_triple_context( self, triple: _TripleType, triple_exists: bool, - context: Optional[_ContextType], + context: _ContextType | None, quoted: bool, ) -> None: """add the given context to the set of contexts for the triple""" @@ -643,7 +642,7 @@ def __add_triple_context( def __get_context_for_triple( self, triple: _TripleType, skipQuoted: bool = False # noqa: N803 - ) -> Collection[Optional[str]]: + ) -> Collection[str | None]: """return a list of contexts (str) for the triple, skipping quoted contexts if skipQuoted==True""" @@ -656,7 +655,7 @@ def __get_context_for_triple( # type error: Item "None" of "Optional[dict[Optional[str], bool]]" has no attribute "items" return [ctx for ctx, quoted in ctxs.items() if not quoted] # type: ignore[union-attr] - def __triple_has_context(self, triple: _TripleType, ctx: Optional[str]) -> bool: + def __triple_has_context(self, triple: _TripleType, ctx: str | None) -> bool: """return True if the triple exists in the given context""" # type error: Unsupported right operand type for in ("Optional[dict[Optional[str], bool]]") return ctx in self.__tripleContexts.get(triple, self.__defaultContexts) # type: ignore[operator] @@ -678,7 +677,7 @@ def __ctx_to_str(self, ctx: _ContextType) -> str: ... @overload def __ctx_to_str(self, ctx: None) -> None: ... - def __ctx_to_str(self, ctx: Optional[_ContextType]) -> Optional[str]: + def __ctx_to_str(self, ctx: _ContextType | None) -> str | None: if ctx is None: return None try: diff --git a/rdflib/plugins/stores/sparqlconnector.py b/rdflib/plugins/stores/sparqlconnector.py index 86f5957d1..2fe454003 100644 --- a/rdflib/plugins/stores/sparqlconnector.py +++ b/rdflib/plugins/stores/sparqlconnector.py @@ -4,7 +4,7 @@ import copy import logging from io import BytesIO -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from urllib.error import HTTPError from urllib.parse import urlencode from urllib.request import Request, urlopen @@ -39,11 +39,11 @@ class SPARQLConnector: def __init__( self, - query_endpoint: Optional[str] = None, - update_endpoint: Optional[str] = None, + query_endpoint: str | None = None, + update_endpoint: str | None = None, returnFormat: str = "xml", # noqa: N803 method: te.Literal["GET", "POST", "POST_FORM"] = "GET", - auth: Optional[tuple[str, str]] = None, + auth: tuple[str, str] | None = None, **kwargs, ): """ @@ -84,8 +84,8 @@ def method(self, method: str) -> None: def query( self, query: str, - default_graph: Optional[str] = None, - named_graph: Optional[str] = None, + default_graph: str | None = None, + named_graph: str | None = None, ) -> Result: if not self.query_endpoint: raise SPARQLConnectorException("Query endpoint not set!") @@ -155,8 +155,8 @@ def query( def update( self, query: str, - default_graph: Optional[str] = None, - named_graph: Optional[str] = None, + default_graph: str | None = None, + named_graph: str | None = None, ) -> None: if not self.update_endpoint: raise SPARQLConnectorException("Query endpoint not set!") diff --git a/rdflib/plugins/stores/sparqlstore.py b/rdflib/plugins/stores/sparqlstore.py index 38727c97e..d6e5f1d77 100644 --- a/rdflib/plugins/stores/sparqlstore.py +++ b/rdflib/plugins/stores/sparqlstore.py @@ -12,7 +12,6 @@ from typing import ( TYPE_CHECKING, Any, - Optional, Union, cast, overload, @@ -127,12 +126,12 @@ class SPARQLStore(SPARQLConnector, Store): def __init__( self, - query_endpoint: Optional[str] = None, + query_endpoint: str | None = None, sparql11: bool = True, context_aware: bool = True, node_to_sparql: _NodeToSparql = _node_to_sparql, returnFormat: str = "xml", # noqa: N803 - auth: Optional[tuple[str, str]] = None, + auth: tuple[str, str] | None = None, **sparqlconnector_kwargs, ): super(SPARQLStore, self).__init__( @@ -150,7 +149,7 @@ def __init__( self._queries = 0 # type error: Missing return statement - def open(self, configuration: str, create: bool = False) -> Optional[int]: # type: ignore[return] + def open(self, configuration: str, create: bool = False) -> int | None: # type: ignore[return] """This method is included so that calls to this Store via Graph, e.g. Graph("SPARQLStore"), can set the required parameters """ @@ -187,7 +186,7 @@ def addN(self, quads: Iterable[_QuadType]) -> None: # noqa: N802 # type error: Signature of "remove" incompatible with supertype "Store" def remove( # type: ignore[override] - self, _: _TriplePatternType, context: Optional[_ContextType] + self, _: _TriplePatternType, context: _ContextType | None ) -> None: raise TypeError("The SPARQL store is read only") @@ -224,9 +223,9 @@ def _inject_prefixes(self, query: str, extra_bindings: Mapping[str, Any]) -> str def query( # type: ignore[override] self, query: Union[Query, str], - initNs: Optional[Mapping[str, Any]] = None, # noqa: N803 - initBindings: Optional[Mapping[str, Identifier]] = None, # noqa: N803 - queryGraph: Optional[str] = None, # noqa: N803 + initNs: Mapping[str, Any] | None = None, # noqa: N803 + initBindings: Mapping[str, Identifier] | None = None, # noqa: N803 + queryGraph: str | None = None, # noqa: N803 DEBUG: bool = False, # noqa: N803 ) -> Result: self.debug = DEBUG @@ -252,7 +251,7 @@ def query( # type: ignore[override] # type error: Return type "Iterator[tuple[tuple[Node, Node, Node], None]]" of "triples" incompatible with return type "Iterator[tuple[tuple[Node, Node, Node], Iterator[Optional[Graph]]]]" def triples( # type: ignore[override] - self, spo: _TriplePatternType, context: Optional[_ContextType] = None + self, spo: _TriplePatternType, context: _ContextType | None = None ) -> Iterator[tuple[_TripleType, None]]: """ - tuple **(s, o, p)** @@ -411,24 +410,24 @@ def triples_choices( tuple[ list[_SubjectType] | tuple[_SubjectType], _PredicateType, - Optional[_ObjectType], + _ObjectType | None, ] | tuple[ - Optional[_SubjectType], + _SubjectType | None, list[_PredicateType] | tuple[_PredicateType], - Optional[_ObjectType], + _ObjectType | None, ] | tuple[ - Optional[_SubjectType], + _SubjectType | None, _PredicateType, list[_ObjectType] | tuple[_ObjectType], ] ), - context: Optional[_ContextType] = None, + context: _ContextType | None = None, ) -> Generator[ tuple[ _TripleType, - Iterator[Optional[_ContextType]], + Iterator[_ContextType | None], ], None, None, @@ -442,7 +441,7 @@ def triples_choices( """ raise NotImplementedError("Triples choices currently not supported") - def __len__(self, context: Optional[_ContextType] = None) -> int: + def __len__(self, context: _ContextType | None = None) -> int: if not self.sparql11: raise NotImplementedError( "For performance reasons, this is not" @@ -465,7 +464,7 @@ def __len__(self, context: Optional[_ContextType] = None) -> int: # type error: Return type "Generator[Identifier, None, None]" of "contexts" incompatible with return type "Generator[Graph, None, None]" in supertype "Store" def contexts( # type: ignore[override] - self, triple: Optional[_TripleType] = None + self, triple: _TripleType | None = None ) -> Generator[_ContextIdentifierType, None, None]: """ Iterates over results to "SELECT ?NAME { GRAPH ?NAME { ?s ?p ?o } }" @@ -505,11 +504,11 @@ def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None: del self.nsBindings[bound_prefix] self.nsBindings[prefix] = namespace - def prefix(self, namespace: URIRef) -> Optional[str]: + def prefix(self, namespace: URIRef) -> str | None: """ """ return dict([(v, k) for k, v in self.nsBindings.items()]).get(namespace) - def namespace(self, prefix: str) -> Optional[URIRef]: + def namespace(self, prefix: str) -> URIRef | None: return self.nsBindings.get(prefix) def namespaces(self) -> Iterator[tuple[str, URIRef]]: @@ -526,9 +525,9 @@ def remove_graph(self, graph: Graph) -> None: def _is_contextual(self, graph: None) -> te.Literal[False]: ... @overload - def _is_contextual(self, graph: Optional[Union[Graph, str]]) -> bool: ... + def _is_contextual(self, graph: Graph | str | None) -> bool: ... - def _is_contextual(self, graph: Optional[Union[Graph, str]]) -> bool: + def _is_contextual(self, graph: Graph | str | None) -> bool: """Returns `True` if the "GRAPH" keyword must appear in the final SPARQL query sent to the endpoint. """ @@ -541,8 +540,8 @@ def _is_contextual(self, graph: Optional[Union[Graph, str]]) -> bool: def subjects( self, - predicate: Optional[_PredicateType] = None, - object: Optional[_ObjectType] = None, + predicate: _PredicateType | None = None, + object: _ObjectType | None = None, ) -> Generator[_SubjectType, None, None]: """A generator of subjects with the given predicate and object""" for t, c in self.triples((None, predicate, object)): @@ -550,8 +549,8 @@ def subjects( def predicates( self, - subject: Optional[_SubjectType] = None, - object: Optional[_ObjectType] = None, + subject: _SubjectType | None = None, + object: _ObjectType | None = None, ) -> Generator[_PredicateType, None, None]: """A generator of predicates with the given subject and object""" for t, c in self.triples((subject, None, object)): @@ -559,29 +558,29 @@ def predicates( def objects( self, - subject: Optional[_SubjectType] = None, - predicate: Optional[_PredicateType] = None, + subject: _SubjectType | None = None, + predicate: _PredicateType | None = None, ) -> Generator[_ObjectType, None, None]: """A generator of objects with the given subject and predicate""" for t, c in self.triples((subject, predicate, None)): yield t[2] def subject_predicates( - self, object: Optional[_ObjectType] = None + self, object: _ObjectType | None = None ) -> Generator[tuple[_SubjectType, _PredicateType], None, None]: """A generator of (subject, predicate) tuples for the given object""" for t, c in self.triples((None, None, object)): yield t[0], t[1] def subject_objects( - self, predicate: Optional[_PredicateType] = None + self, predicate: _PredicateType | None = None ) -> Generator[tuple[_SubjectType, _ObjectType], None, None]: """A generator of (subject, object) tuples for the given predicate""" for t, c in self.triples((None, predicate, None)): yield t[0], t[2] def predicate_objects( - self, subject: Optional[_SubjectType] = None + self, subject: _SubjectType | None = None ) -> Generator[tuple[_PredicateType, _ObjectType], None, None]: """A generator of (predicate, object) tuples for the given subject""" for t, c in self.triples((subject, None, None)): @@ -663,8 +662,8 @@ class SPARQLUpdateStore(SPARQLStore): def __init__( self, - query_endpoint: Optional[str] = None, - update_endpoint: Optional[str] = None, + query_endpoint: str | None = None, + update_endpoint: str | None = None, sparql11: bool = True, context_aware: bool = True, postAsEncoded: bool = True, # noqa: N803 @@ -694,7 +693,7 @@ def __init__( self.postAsEncoded = postAsEncoded self.autocommit = autocommit self.dirty_reads = dirty_reads - self._edits: Optional[list[str]] = None + self._edits: list[str] | None = None self._updates = 0 def query(self, *args: Any, **kwargs: Any) -> Result: @@ -767,7 +766,7 @@ def rollback(self) -> None: def add( self, spo: _TripleType, - context: Optional[_ContextType] = None, + context: _ContextType | None = None, quoted: bool = False, ) -> None: """Add a triple to the store of triples.""" @@ -816,7 +815,7 @@ def addN(self, quads: Iterable[_QuadType]) -> None: # noqa: N802 # type error: Signature of "remove" incompatible with supertype "Store" def remove( # type: ignore[override] - self, spo: _TriplePatternType, context: Optional[_ContextType] + self, spo: _TriplePatternType, context: _ContextType | None ) -> None: """Remove a triple from the store""" if not self.update_endpoint: @@ -868,10 +867,10 @@ def _update(self, update): # type error: Signature of "update" incompatible with supertype "Store" def update( # type: ignore[override] self, - query: Union[Update, str], + query: Update | str, initNs: dict[str, Any] = {}, # noqa: N803 initBindings: dict[str, Identifier] = {}, # noqa: N803 - queryGraph: Optional[str] = None, # noqa: N803 + queryGraph: str | None = None, # noqa: N803 DEBUG: bool = False, # noqa: N803 ): """ @@ -1010,8 +1009,8 @@ def remove_graph(self, graph: Graph) -> None: def subjects( self, - predicate: Optional[_PredicateType] = None, - object: Optional[_ObjectType] = None, + predicate: _PredicateType | None = None, + object: _ObjectType | None = None, ) -> Generator[_SubjectType, None, None]: """A generator of subjects with the given predicate and object""" for t, c in self.triples((None, predicate, object)): @@ -1019,8 +1018,8 @@ def subjects( def predicates( self, - subject: Optional[_SubjectType] = None, - object: Optional[_ObjectType] = None, + subject: _SubjectType | None = None, + object: _ObjectType | None = None, ) -> Generator[_PredicateType, None, None]: """A generator of predicates with the given subject and object""" for t, c in self.triples((subject, None, object)): @@ -1028,29 +1027,29 @@ def predicates( def objects( self, - subject: Optional[_SubjectType] = None, - predicate: Optional[_PredicateType] = None, + subject: _SubjectType | None = None, + predicate: _PredicateType | None = None, ) -> Generator[_ObjectType, None, None]: """A generator of objects with the given subject and predicate""" for t, c in self.triples((subject, predicate, None)): yield t[2] def subject_predicates( - self, object: Optional[_ObjectType] = None + self, object: _ObjectType | None = None ) -> Generator[tuple[_SubjectType, _PredicateType], None, None]: """A generator of (subject, predicate) tuples for the given object""" for t, c in self.triples((None, None, object)): yield t[0], t[1] def subject_objects( - self, predicate: Optional[_PredicateType] = None + self, predicate: _PredicateType | None = None ) -> Generator[tuple[_SubjectType, _ObjectType], None, None]: """A generator of (subject, object) tuples for the given predicate""" for t, c in self.triples((None, predicate, None)): yield t[0], t[2] def predicate_objects( - self, subject: Optional[_SubjectType] = None + self, subject: _SubjectType | None = None ) -> Generator[tuple[_PredicateType, _ObjectType], None, None]: """A generator of (predicate, object) tuples for the given subject""" for t, c in self.triples((subject, None, None)): diff --git a/rdflib/query.py b/rdflib/query.py index a1504992c..f0a3a4e9d 100644 --- a/rdflib/query.py +++ b/rdflib/query.py @@ -192,12 +192,12 @@ def get(self, name: str, default: QueryResultValueType) -> QueryResultValueType: @overload def get( - self, name: str, default: Optional[QueryResultValueType] = ... - ) -> Optional[QueryResultValueType]: ... + self, name: str, default: QueryResultValueType | None = ... + ) -> QueryResultValueType | None: ... def get( self, name: str, default: Optional[QueryResultValueType] = None - ) -> Optional[QueryResultValueType]: + ) -> QueryResultValueType | None: try: return self[name] except KeyError: @@ -232,13 +232,13 @@ def __init__(self, type_: str): self.type = type_ #: variables contained in the result. - self.vars: Optional[list[Variable]] = None + self.vars: list[Variable] | None = None self._bindings: MutableSequence[Mapping[Variable, QueryResultValueType]] = None # type: ignore[assignment] - self._genbindings: Optional[ - Iterator[Mapping[Variable, QueryResultValueType]] - ] = None - self.askAnswer: Optional[bool] = None - self.graph: Optional[Graph] = None + self._genbindings: Iterator[Mapping[Variable, QueryResultValueType]] | None = ( + None + ) + self.askAnswer: bool | None = None + self.graph: Graph | None = None @property def bindings(self) -> MutableSequence[Mapping[Variable, QueryResultValueType]]: @@ -268,9 +268,9 @@ def bindings( @staticmethod def parse( - source: Optional[IO] = None, - format: Optional[str] = None, - content_type: Optional[str] = None, + source: IO | None = None, + format: str | None = None, + content_type: str | None = None, **kwargs: Any, ) -> Result: from rdflib import plugin @@ -291,11 +291,11 @@ def parse( def serialize( self, - destination: Optional[Union[str, IO]] = None, + destination: str | IO | None = None, encoding: str = "utf-8", format: str = "xml", **args: Any, - ) -> Optional[bytes]: + ) -> bytes | None: """ Serialize the query result. diff --git a/rdflib/serializer.py b/rdflib/serializer.py index 6f1230d59..761a4f436 100644 --- a/rdflib/serializer.py +++ b/rdflib/serializer.py @@ -12,7 +12,7 @@ from __future__ import annotations -from typing import IO, TYPE_CHECKING, Any, Optional, TypeVar, Union +from typing import IO, TYPE_CHECKING, Any, TypeVar, Union from rdflib.term import URIRef @@ -29,13 +29,13 @@ class Serializer: def __init__(self, store: Graph): self.store: Graph = store self.encoding: str = "utf-8" - self.base: Optional[str] = None + self.base: str | None = None def serialize( self, stream: IO[bytes], - base: Optional[str] = None, - encoding: Optional[str] = None, + base: str | None = None, + encoding: str | None = None, **args: Any, ) -> None: """Abstract method""" diff --git a/rdflib/store.py b/rdflib/store.py index 2844bbd9f..9402746df 100644 --- a/rdflib/store.py +++ b/rdflib/store.py @@ -29,7 +29,7 @@ import pickle from io import BytesIO -from typing import TYPE_CHECKING, Any, Optional +from typing import TYPE_CHECKING, Any from rdflib.events import Dispatcher, Event @@ -109,7 +109,7 @@ def __init__(self) -> None: self._ids: dict[Any, str] = {} self._get_object = self._objects.__getitem__ - def _get_ids(self, key: Any) -> Optional[str]: + def _get_ids(self, key: Any) -> str | None: try: return self._ids.get(key) except TypeError: @@ -129,9 +129,7 @@ def loads(self, s: bytes) -> Node: except KeyError as e: raise UnpicklingError("Could not find Node class for %s" % e) - def dumps( - self, obj: Node, protocol: Optional[Any] = None, bin: Optional[Any] = None - ): + def dumps(self, obj: Node, protocol: Any | None = None, bin: Any | None = None): src = BytesIO() p = Pickler(src) # NOTE on type error: https://github.com/python/mypy/issues/2427 @@ -164,15 +162,15 @@ class Store: def __init__( self, - configuration: Optional[str] = None, - identifier: Optional[Identifier] = None, + configuration: str | None = None, + identifier: Identifier | None = None, ): """ identifier: URIRef of the Store. Defaults to CWD configuration: string containing information open can use to connect to datastore. """ - self.__node_pickler: Optional[NodePickler] = None + self.__node_pickler: NodePickler | None = None self.dispatcher = Dispatcher() if configuration: self.open(configuration) @@ -198,7 +196,7 @@ def node_pickler(self) -> NodePickler: def create(self, configuration: str) -> None: self.dispatcher.dispatch(StoreCreatedEvent(configuration=configuration)) - def open(self, configuration: str, create: bool = False) -> Optional[int]: + def open(self, configuration: str, create: bool = False) -> int | None: """ Opens the store specified by the configuration string. If create is True a store will be created if it does not already @@ -264,7 +262,7 @@ def addN(self, quads: Iterable[_QuadType]) -> None: # noqa: N802 def remove( self, triple: _TriplePatternType, - context: Optional[_ContextType] = None, + context: _ContextType | None = None, ) -> None: """Remove the set of triples matching the pattern from the store""" self.dispatcher.dispatch(TripleRemovedEvent(triple=triple, context=context)) @@ -275,24 +273,24 @@ def triples_choices( tuple[ list[_SubjectType] | tuple[_SubjectType], _PredicateType, - Optional[_ObjectType], + _ObjectType | None, ] | tuple[ - Optional[_SubjectType], + _SubjectType | None, list[_PredicateType] | tuple[_PredicateType], - Optional[_ObjectType], + _ObjectType | None, ] | tuple[ - Optional[_SubjectType], + _SubjectType | None, _PredicateType, list[_ObjectType] | tuple[_ObjectType], ] ), - context: Optional[_ContextType] = None, + context: _ContextType | None = None, ) -> Generator[ tuple[ _TripleType, - Iterator[Optional[_ContextType]], + Iterator[_ContextType | None], ], None, None, @@ -303,9 +301,9 @@ def triples_choices( time from the default 'fallback' implementation, which will iterate over each term in the list and dispatch to triples """ - subject: Optional[_SubjectType] | list[_SubjectType] | tuple[_SubjectType] + subject: _SubjectType | list[_SubjectType] | tuple[_SubjectType] | None predicate: _PredicateType | list[_PredicateType] | tuple[_PredicateType] - object_: Optional[_ObjectType] | list[_ObjectType] | tuple[_ObjectType] + object_: _ObjectType | list[_ObjectType] | tuple[_ObjectType] | None subject, predicate, object_ = triple if isinstance(object_, (list, tuple)): # MyPy thinks these are unreachable due to the triple pattern signature. @@ -355,8 +353,8 @@ def triples_choices( def triples( # type: ignore[return] self, triple_pattern: _TriplePatternType, - context: Optional[_ContextType] = None, - ) -> Iterator[tuple[_TripleType, Iterator[Optional[_ContextType]]]]: + context: _ContextType | None = None, + ) -> Iterator[tuple[_TripleType, Iterator[_ContextType | None]]]: """ A generator over all the triples matching the pattern. Pattern can include any objects for used for comparing against nodes in the store, @@ -372,7 +370,7 @@ def triples( # type: ignore[return] # variants of triples will be done if / when optimization is needed # type error: Missing return statement - def __len__(self, context: Optional[_ContextType] = None) -> int: # type: ignore[empty-body] + def __len__(self, context: _ContextType | None = None) -> int: # type: ignore[empty-body] """ Number of statements in the store. This should only account for non- quoted (asserted) statements if the context is not specified, @@ -384,7 +382,7 @@ def __len__(self, context: Optional[_ContextType] = None) -> int: # type: ignor # type error: Missing return statement def contexts( # type: ignore[empty-body] - self, triple: Optional[_TripleType] = None + self, triple: _TripleType | None = None ) -> Generator[_ContextType, None, None]: """ Generator over all contexts in the graph. If triple is specified, @@ -450,10 +448,10 @@ def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None: :param override: rebind, even if the given namespace is already bound to another prefix. """ - def prefix(self, namespace: URIRef) -> Optional[str]: + def prefix(self, namespace: URIRef) -> str | None: """""" - def namespace(self, prefix: str) -> Optional[URIRef]: + def namespace(self, prefix: str) -> URIRef | None: """ """ def namespaces(self) -> Iterator[tuple[str, URIRef]]: diff --git a/rdflib/term.py b/rdflib/term.py index aecf41d5d..ce0127afd 100644 --- a/rdflib/term.py +++ b/rdflib/term.py @@ -52,7 +52,6 @@ from typing import ( TYPE_CHECKING, Any, - Optional, TypeVar, overload, ) @@ -142,7 +141,7 @@ class Node(metaclass=ABCMeta): __slots__ = () @abstractmethod - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: ... + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: ... @abstractmethod def __getnewargs__(self) -> tuple[Any, ...]: ... @@ -242,8 +241,8 @@ def __ge__(self, other: Any) -> bool: def startswith( self, prefix: str | tuple[str, ...] | Any, - start: Optional[Any] = None, - end: Optional[Any] = None, + start: Any | None = None, + end: Any | None = None, ) -> bool: if isinstance(prefix, (str, tuple)): return super(Identifier, self).startswith(prefix, start, end) @@ -259,7 +258,7 @@ def startswith( def __getnewargs__(self) -> tuple[Any, ...]: return (str(self),) - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: raise NotImplementedError() @property @@ -277,7 +276,7 @@ class IdentifiedNode(Identifier): __slots__ = () - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: raise NotImplementedError() def toPython(self) -> str: # noqa: N802 @@ -304,7 +303,7 @@ class URIRef(IdentifiedNode): __neg__: Callable[[URIRef], NegatedPath] __truediv__: Callable[[URIRef, URIRef | Path], SequencePath] - def __new__(cls, value: str, base: Optional[str] = None) -> URIRef: + def __new__(cls, value: str, base: str | None = None) -> URIRef: if base is not None: ends_in_hash = value.endswith("#") # type error: Argument "allow_fragments" to "urljoin" has incompatible type "int"; expected "bool" @@ -325,7 +324,7 @@ def __new__(cls, value: str, base: Optional[str] = None) -> URIRef: rt = str.__new__(cls, value, "utf-8") # type: ignore[call-overload] return rt - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: """ This will do a limited check for valid URIs, essentially just making sure that the string includes no illegal @@ -477,8 +476,8 @@ class BNode(IdentifiedNode): def __new__( cls, - value: Optional[str] = None, - _sn_gen: Optional[Callable[[], str] | Generator] = None, + value: str | None = None, + _sn_gen: Callable[[], str] | Generator | None = None, _prefix: str = _unique_id(), ) -> BNode: """ @@ -512,7 +511,7 @@ def __new__( # type error: Incompatible return value type (got "Identifier", expected "BNode") return Identifier.__new__(cls, value) # type: ignore[return-value] - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: # note - for two strings, concat with + is faster than f"{x}{y}" return "_:" + self @@ -527,7 +526,7 @@ def __repr__(self) -> str: return f"{clsName}({str.__repr__(self)})" def skolemize( - self, authority: Optional[str] = None, basepath: Optional[str] = None + self, authority: str | None = None, basepath: str | None = None ) -> URIRef: """Create a URIRef "skolem" representation of the BNode, in accordance with http://www.w3.org/TR/rdf11-concepts/#section-skolemization @@ -635,18 +634,18 @@ class Literal(Identifier): """ _value: Any - _language: Optional[str] + _language: str | None # NOTE: _datatype should maybe be of type URIRef, and not optional. - _datatype: Optional[URIRef] - _ill_typed: Optional[bool] + _datatype: URIRef | None + _ill_typed: bool | None __slots__ = ("_language", "_datatype", "_value", "_ill_typed") def __new__( cls, lexical_or_value: Any, - lang: Optional[str] = None, - datatype: Optional[str] = None, - normalize: Optional[bool] = None, + lang: str | None = None, + datatype: str | None = None, + normalize: bool | None = None, ) -> Literal: if lang == "": lang = None # no empty lang-tags in RDF @@ -666,7 +665,7 @@ def __new__( datatype = URIRef(datatype) value = None - ill_typed: Optional[bool] = None + ill_typed: bool | None = None if isinstance(lexical_or_value, Literal): # create from another Literal instance @@ -748,7 +747,7 @@ def normalize(self) -> Literal: return self @property - def ill_typed(self) -> Optional[bool]: + def ill_typed(self) -> bool | None: """ For `recognized datatype IRIs `_, @@ -765,11 +764,11 @@ def value(self) -> Any: return self._value @property - def language(self) -> Optional[str]: + def language(self) -> str | None: return self._language @property - def datatype(self) -> Optional[URIRef]: + def datatype(self) -> URIRef | None: return self._datatype def __reduce__( @@ -1471,7 +1470,7 @@ def eq(self, other: Any) -> bool: def neq(self, other: Any) -> bool: return not self.eq(other) - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: r''' Returns a representation in the N3 format. @@ -1532,7 +1531,7 @@ def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: def _literal_n3( self, use_plain: bool = False, - qname_callback: Optional[Callable[[URIRef], Optional[str]]] = None, + qname_callback: Callable[[URIRef], str | None] | None = None, ) -> str: """ Using plain literal (shorthand) output:: @@ -1996,9 +1995,9 @@ def _well_formed_negative_integer(lexical: str | bytes, value: Any) -> bool: def _py2literal( obj: Any, pType: Any, # noqa: N803 - castFunc: Optional[Callable[[Any], Any]], # noqa: N803 - dType: Optional[_StrT], # noqa: N803 -) -> tuple[Any, Optional[_StrT]]: + castFunc: Callable[[Any], Any] | None, # noqa: N803 + dType: _StrT | None, # noqa: N803 +) -> tuple[Any, _StrT | None]: if castFunc is not None: return castFunc(obj), dType elif dType is not None: @@ -2008,14 +2007,14 @@ def _py2literal( def _castPythonToLiteral( # noqa: N802 - obj: Any, datatype: Optional[str] -) -> tuple[Any, Optional[str]]: + obj: Any, datatype: str | None +) -> tuple[Any, str | None]: """ Casts a tuple of a python type and a special datatype URI to a tuple of the lexical value and a datatype URI (or None) """ - castFunc: Optional[Callable[[Any], str | bytes]] # noqa: N806 - dType: Optional[str] # noqa: N806 + castFunc: Callable[[Any], str | bytes] | None # noqa: N806 + dType: str | None # noqa: N806 for (pType, dType), castFunc in _SpecificPythonToXSDRules: # noqa: N806 if isinstance(obj, pType) and dType == datatype: return _py2literal(obj, pType, castFunc, dType) @@ -2038,7 +2037,7 @@ def _castPythonToLiteral( # noqa: N802 # both map to the abstract integer type, # rather than some concrete bit-limited datatype _GenericPythonToXSDRules: list[ - tuple[type[Any], tuple[Optional[Callable[[Any], str | bytes]], Optional[str]]] + tuple[type[Any], tuple[Callable[[Any], str | bytes] | None, str | None]] ] = [ (str, (None, None)), (float, (None, _XSD_DOUBLE)), @@ -2070,7 +2069,7 @@ def _castPythonToLiteral( # noqa: N802 _OriginalGenericPythonToXSDRules = list(_GenericPythonToXSDRules) _SpecificPythonToXSDRules: list[ - tuple[tuple[type[Any], str], Optional[Callable[[Any], str | bytes]]] + tuple[tuple[type[Any], str], Callable[[Any], str | bytes] | None] ] = [ ((date, _XSD_GYEAR), lambda val: val.strftime("%Y").zfill(4)), ((date, _XSD_GYEARMONTH), lambda val: val.strftime("%Y-%m").zfill(7)), @@ -2082,7 +2081,7 @@ def _castPythonToLiteral( # noqa: N802 _OriginalSpecificPythonToXSDRules = list(_SpecificPythonToXSDRules) -XSDToPython: dict[Optional[str], Optional[Callable[[str], Any]]] = { +XSDToPython: dict[str | None, Callable[[str], Any] | None] = { None: None, # plain literals map directly to value space URIRef(_XSD_PFX + "time"): parse_time, URIRef(_XSD_PFX + "date"): parse_xsd_date, @@ -2142,7 +2141,7 @@ def _castPythonToLiteral( # noqa: N802 URIRef(_XSD_PFX + "unsignedByte"): _well_formed_unsignedbyte, } -_toPythonMapping: dict[Optional[str], Optional[Callable[[str], Any]]] = {} # noqa: N816 +_toPythonMapping: dict[str | None, Callable[[str], Any] | None] = {} # noqa: N816 _toPythonMapping.update(XSDToPython) @@ -2162,7 +2161,7 @@ def _reset_bindings() -> None: def _castLexicalToPython( # noqa: N802 - lexical: str | bytes, datatype: Optional[URIRef] + lexical: str | bytes, datatype: URIRef | None ) -> Any: """ Map a lexical form to the value-space for the given datatype @@ -2226,8 +2225,8 @@ def _strip_and_collapse_whitespace(lexical_or_value: _AnyT) -> _AnyT: def bind( datatype: str, pythontype: type[Any], - constructor: Optional[Callable[[str], Any]] = None, - lexicalizer: Optional[Callable[[Any], str | bytes]] = None, + constructor: Callable[[str], Any] | None = None, + lexicalizer: Callable[[Any], str | bytes] | None = None, datatype_specific: bool = False, ) -> None: """ @@ -2285,7 +2284,7 @@ def __repr__(self) -> str: def toPython(self) -> str: # noqa: N802 return "?" + self - def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + def n3(self, namespace_manager: NamespaceManager | None = None) -> str: return "?" + self def __reduce__(self) -> tuple[type[Variable], tuple[str]]: diff --git a/rdflib/tools/chunk_serializer.py b/rdflib/tools/chunk_serializer.py index 82cf8d26d..d09b5c0a3 100644 --- a/rdflib/tools/chunk_serializer.py +++ b/rdflib/tools/chunk_serializer.py @@ -11,7 +11,7 @@ from collections.abc import Generator from contextlib import ExitStack, contextmanager from pathlib import Path -from typing import TYPE_CHECKING, BinaryIO, Optional +from typing import TYPE_CHECKING, BinaryIO from rdflib.graph import Graph from rdflib.plugins.serializers.nt import _nt_row @@ -27,9 +27,9 @@ def serialize_in_chunks( g: Graph, max_triples: int = 10000, - max_file_size_kb: Optional[int] = None, + max_file_size_kb: int | None = None, file_name_stem: str = "chunk", - output_dir: Optional[Path] = None, + output_dir: Path | None = None, write_prefixes: bool = False, ) -> None: """ diff --git a/rdflib/tools/csv2rdf.py b/rdflib/tools/csv2rdf.py index 9f3d517ed..a4ef08189 100644 --- a/rdflib/tools/csv2rdf.py +++ b/rdflib/tools/csv2rdf.py @@ -19,7 +19,7 @@ import sys import time import warnings -from typing import Any, Optional, Union +from typing import Any, Union from urllib.parse import quote import rdflib @@ -91,7 +91,7 @@ """ # bah - ugly global -uris: dict[Any, tuple[URIRef, Optional[URIRef]]] = {} +uris: dict[Any, tuple[URIRef, URIRef | None]] = {} def toProperty(label: str): # noqa: N802 @@ -131,7 +131,7 @@ def csv_reader(csv_data, dialect=csv.excel, **kwargs): yield row -def prefixuri(x, prefix, class_: Optional[URIRef] = None): +def prefixuri(x, prefix, class_: URIRef | None = None): if prefix: r = rdflib.URIRef(prefix + quote(x.encode("utf8").replace(" ", "_"), safe="")) else: @@ -153,7 +153,7 @@ def __call__(self, x: Any): class NodeUri(NodeMaker): def __init__(self, prefix, class_): - self.class_: Optional[URIRef] = None + self.class_: URIRef | None = None self.prefix = prefix if class_: self.class_ = rdflib.URIRef(class_) diff --git a/rdflib/tools/rdfpipe.py b/rdflib/tools/rdfpipe.py index 118cd8b98..b8350c759 100644 --- a/rdflib/tools/rdfpipe.py +++ b/rdflib/tools/rdfpipe.py @@ -9,7 +9,7 @@ import logging import sys from optparse import OptionParser -from typing import BinaryIO, Optional +from typing import BinaryIO import rdflib from rdflib import plugin @@ -191,7 +191,7 @@ def main(): pfx, uri = ns_kw.split("=") ns_bindings[pfx] = uri - outfile: Optional[BinaryIO] = sys.stdout.buffer + outfile: BinaryIO | None = sys.stdout.buffer if opts.no_out: outfile = None diff --git a/rdflib/util.py b/rdflib/util.py index 7045a9472..8309541ff 100644 --- a/rdflib/util.py +++ b/rdflib/util.py @@ -31,7 +31,6 @@ from typing import ( TYPE_CHECKING, Any, - Optional, TypeVar, Union, overload, @@ -79,7 +78,7 @@ def list2set(seq: Iterable[_HashableT]) -> list[_HashableT]: return [x for x in seq if x not in seen and not seen.add(x)] # type: ignore[func-returns-value] -def first(seq: Iterable[_AnyT]) -> Optional[_AnyT]: +def first(seq: Iterable[_AnyT]) -> _AnyT | None: """ return the first element in a python sequence for graphs, use graph.value instead @@ -108,8 +107,8 @@ def more_than(sequence: Iterable[Any], number: int) -> int: def to_term( - s: Optional[str], default: Optional[rdflib.term.Identifier] = None -) -> Optional[rdflib.term.Identifier]: + s: str | None, default: rdflib.term.Identifier | None = None +) -> rdflib.term.Identifier | None: """ Creates and returns an Identifier of type corresponding to the pattern of the given positional argument string ``s``: @@ -138,10 +137,10 @@ def to_term( def from_n3( s: str, - default: Optional[str] = None, - backend: Optional[str] = None, - nsm: Optional[rdflib.namespace.NamespaceManager] = None, -) -> Optional[Union[rdflib.term.Node, str]]: + default: str | None = None, + backend: str | None = None, + nsm: rdflib.namespace.NamespaceManager | None = None, +) -> Union[rdflib.term.Node, str] | None: r''' Creates the Identifier corresponding to the given n3 string. @@ -346,7 +345,7 @@ def parse_date_time(val: str) -> int: } -def guess_format(fpath: str, fmap: Optional[dict[str, str]] = None) -> Optional[str]: +def guess_format(fpath: str, fmap: dict[str, str] | None = None) -> str | None: """ Guess RDF serialization based on file suffix. Uses ``SUFFIX_FORMAT_MAP`` unless ``fmap`` is provided. Examples: @@ -407,7 +406,7 @@ def _get_ext(fpath: str, lower: bool = True) -> str: def find_roots( graph: Graph, prop: rdflib.term.URIRef, - roots: Optional[set[_SubjectType | _ObjectType]] = None, + roots: set[_SubjectType | _ObjectType] | None = None, ) -> set[_SubjectType | _ObjectType]: """ Find the roots in some sort of transitive hierarchy. @@ -437,10 +436,10 @@ def get_tree( root: Union[_SubjectType, _ObjectType], prop: rdflib.term.URIRef, mapper: Callable[[rdflib.term.Node], rdflib.term.Node] = lambda x: x, - sortkey: Optional[Callable[[Any], Any]] = None, - done: Optional[set[rdflib.term.Node]] = None, + sortkey: Callable[[Any], Any] | None = None, + done: set[rdflib.term.Node] | None = None, dir: str = "down", -) -> Optional[tuple[rdflib.term.Node, list[Any]]]: +) -> tuple[rdflib.term.Node, list[Any]] | None: """ Return a nested list/tuple structure representing the tree built by the transitive property given, starting from the root given @@ -482,18 +481,14 @@ def get_tree( @overload -def _coalesce(*args: Optional[_AnyT], default: _AnyT) -> _AnyT: ... +def _coalesce(*args: _AnyT | None, default: _AnyT) -> _AnyT: ... @overload -def _coalesce( - *args: Optional[_AnyT], default: Optional[_AnyT] = ... -) -> Optional[_AnyT]: ... +def _coalesce(*args: _AnyT | None, default: _AnyT | None = ...) -> _AnyT | None: ... -def _coalesce( - *args: Optional[_AnyT], default: Optional[_AnyT] = None -) -> Optional[_AnyT]: +def _coalesce(*args: _AnyT | None, default: _AnyT | None = None) -> _AnyT | None: """ This is a null coalescing function, it will return the first non-`None` argument passed to it, otherwise it will return ``default`` which is `None` diff --git a/rdflib/void.py b/rdflib/void.py index 0fa6a7c48..876614974 100644 --- a/rdflib/void.py +++ b/rdflib/void.py @@ -1,7 +1,6 @@ from __future__ import annotations from collections import defaultdict -from typing import Optional from rdflib.graph import Graph, _ObjectType, _PredicateType, _SubjectType from rdflib.namespace import RDF, VOID @@ -10,8 +9,8 @@ def generateVoID( # noqa: N802 g: Graph, - dataset: Optional[IdentifiedNode] = None, - res: Optional[Graph] = None, + dataset: IdentifiedNode | None = None, + res: Graph | None = None, distinctForPartitions: bool = True, # noqa: N803 ): """ diff --git a/scratch_skolem.py b/scratch_skolem.py new file mode 100644 index 000000000..1a24e65de --- /dev/null +++ b/scratch_skolem.py @@ -0,0 +1,73 @@ +from __future__ import annotations + +import threading +from dataclasses import dataclass, field +from typing import Any, final +from uuid import uuid4 +from weakref import WeakValueDictionary + + +class InternedBlankNode(str): + _intern_cache: WeakValueDictionary[str, InternedBlankNode] = WeakValueDictionary() + _lock = threading.Lock() + + __slots__ = ("__weakref__",) + + def __new__(cls, value: str | None = None) -> InternedBlankNode: + if value is None: + value = str(uuid4()).replace("-", "0") + + with cls._lock: + if value in cls._intern_cache: + return cls._intern_cache[value] + + instance = super().__new__(cls, value) + object.__setattr__(instance, "value", value) + cls._intern_cache[value] = instance + return instance + + +@final +@dataclass(frozen=True, eq=False) +class BlankNode(InternedBlankNode): + """ + An RDF blank node representing an anonymous resource. + + Specification: https://www.w3.org/TR/rdf12-concepts/#section-blank-nodes + + This implementation uses object interning to ensure that blank nodes + with the same identifier reference the same object instance, optimizing + memory usage. The class is marked final to ensure the :py:meth:`IRI.__new__` + implementation cannot be overridden. + + :param value: + A blank node identifier. If :py:obj:`None` is provided, an identifier + will be generated. + """ + + value: str = field(default_factory=lambda: str(uuid4()).replace("-", "0")) + + def __str__(self) -> str: + return f"_:{self.value}" + + def __reduce__(self) -> str | tuple[Any, ...]: + return self.__class__, (self.value,) + + +__all__ = ["BlankNode"] + + +if __name__ == "__main__": + import timeit + + bnode = BlankNode("123") + with_time = timeit.timeit(lambda: hash(bnode), number=1000000) + print(f"BlankNode - Average time per hash: {with_time / 1000000:.9f} seconds") + + print(id(bnode), id(bnode.value)) + + bnode2 = BlankNode("123") + print(id(bnode), id(bnode2)) + print(id(bnode.value), id(bnode2.value)) + + print(bnode is bnode2) diff --git a/test/conftest.py b/test/conftest.py index a983fe7c2..16c21b308 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -11,10 +11,6 @@ from collections.abc import Collection, Generator, Iterable from pathlib import Path -from typing import ( - Optional, - Union, -) from rdflib import Graph from test.utils.audit import AuditHookDispatcher @@ -97,9 +93,7 @@ def exit_stack() -> Generator[ExitStack, None, None]: yield stack -EXTRA_MARKERS: dict[ - tuple[Optional[str], str], Collection[Union[pytest.MarkDecorator, str]] -] = { +EXTRA_MARKERS: dict[tuple[str | None, str], Collection[pytest.MarkDecorator | str]] = { ("rdflib/__init__.py", "rdflib"): [pytest.mark.webtest], ("rdflib/term.py", "rdflib.term.Literal.normalize"): [pytest.mark.webtest], ("rdflib/extras/infixowl.py", "rdflib.extras.infixowl"): [pytest.mark.webtest], diff --git a/test/test_graph/test_graph.py b/test/test_graph/test_graph.py index 63180be65..772f613e4 100644 --- a/test/test_graph/test_graph.py +++ b/test/test_graph/test_graph.py @@ -3,7 +3,7 @@ import logging import os from pathlib import Path -from typing import Callable, Optional +from typing import Callable from urllib.error import HTTPError, URLError import pytest @@ -63,8 +63,8 @@ def test_property_namespace_manager() -> None: assert ("test", URIRef("example:test:")) in nss -def get_store_names() -> set[Optional[str]]: - names: set[Optional[str]] = {*get_unique_plugin_names(Store)} +def get_store_names() -> set[str | None]: + names: set[str | None] = {*get_unique_plugin_names(Store)} names.difference_update( { "default", @@ -87,7 +87,7 @@ def get_store_names() -> set[Optional[str]]: @pytest.fixture(scope="function", params=get_store_names()) def make_graph(tmp_path: Path, request) -> GraphFactory: - store_name: Optional[str] = request.param + store_name: str | None = request.param def make_graph() -> Graph: if store_name is None: @@ -372,7 +372,7 @@ def test_guess_format_for_parse_http( make_graph: GraphFactory, http_file_server: HTTPFileServer, file: Path, - content_type: Optional[str], + content_type: str | None, expected_result: OutcomePrimitive[int], ) -> None: graph = make_graph() diff --git a/test/test_graph/test_graph_context.py b/test/test_graph/test_graph_context.py index 7d0a90f7c..b6c597c90 100644 --- a/test/test_graph/test_graph_context.py +++ b/test/test_graph/test_graph_context.py @@ -5,7 +5,6 @@ import sys import unittest from tempfile import mkdtemp, mkstemp -from typing import Optional import pytest @@ -16,7 +15,7 @@ class ContextTestCase(unittest.TestCase): store = "default" slow = True - tmppath: Optional[str] = None + tmppath: str | None = None def setUp(self): try: diff --git a/test/test_graph/test_graph_store.py b/test/test_graph/test_graph_store.py index 4fae00aa2..2a57b2ba8 100644 --- a/test/test_graph/test_graph_store.py +++ b/test/test_graph/test_graph_store.py @@ -11,7 +11,6 @@ TYPE_CHECKING, Any, Callable, - Optional, Union, ) from unittest.mock import patch @@ -88,11 +87,11 @@ def _p( graph_factory: GraphFactory, ops: GraphOperations, expected_bindings: NamespaceBindings, - expected_bindings_overrides: Optional[ - dict[tuple[type[Graph], type[Store]], NamespaceBindings] + expected_bindings_overrides: dict[ + tuple[type[Graph], type[Store]], NamespaceBindings ] = None, *, - id: Optional[str] = None, + id: str | None = None, ): if expected_bindings_overrides is not None: expected_bindings = expected_bindings_overrides.get( diff --git a/test/test_graph/test_variants.py b/test/test_graph/test_variants.py index 95884d9b5..de4fa246e 100644 --- a/test/test_graph/test_variants.py +++ b/test/test_graph/test_variants.py @@ -13,8 +13,6 @@ from re import Pattern from typing import ( ClassVar, - Optional, - Union, cast, ) @@ -47,8 +45,8 @@ class GraphAsserts: A specification of asserts that must be checked against a graph. """ - quad_count: Optional[int] = None - has_subject_iris: Optional[list[str]] = None + quad_count: int | None = None + has_subject_iris: list[str] | None = None def check(self, graph: Dataset) -> None: """ @@ -79,7 +77,7 @@ class GraphVariantsMeta(GraphAsserts): Meta information about a set of variants. """ - public_id: Optional[str] = None + public_id: str | None = None exact_match: bool = False @@ -123,9 +121,7 @@ def __post_init__(self) -> None: def pytest_param( self, - marks: Optional[ - Union[MarkDecorator, Collection[Union[MarkDecorator, Mark]]] - ] = None, + marks: MarkDecorator | Collection[MarkDecorator | Mark] | None = None, ) -> ParameterSet: if marks is None: marks = cast(tuple[MarkDecorator], tuple()) @@ -144,7 +140,7 @@ def load(self, variant_key: str, graph_type: type[_GraphT]) -> _GraphT: return variant.load(public_id=self.public_id, graph_type=graph_type) @classmethod - def _decompose_path(cls, file_path: Path, basedir: Optional[Path]): + def _decompose_path(cls, file_path: Path, basedir: Path | None): if basedir: file_path = file_path.absolute().resolve().relative_to(basedir) name_noext, ext = os.path.splitext(file_path) @@ -158,7 +154,7 @@ def _decompose_path(cls, file_path: Path, basedir: Optional[Path]): @classmethod def for_files( - cls, file_paths: Iterable[Path], basedir: Optional[Path] = None + cls, file_paths: Iterable[Path], basedir: Path | None = None ) -> dict[str, GraphVariants]: graph_sources: defaultdict[str, dict[str, GraphSource]] = defaultdict(dict) graph_meta: dict[str, GraphVariantsMeta] = {} @@ -191,7 +187,7 @@ def for_files( @classmethod def for_directory( - cls, directory: Path, basedir: Optional[Path] = None + cls, directory: Path, basedir: Path | None = None ) -> dict[str, GraphVariants]: file_paths = [] for file_path in directory.glob("*"): @@ -208,7 +204,7 @@ def for_directory( **GraphVariants.for_files(EXTRA_FILES, TEST_DIR), } -EXPECTED_FAILURES: dict[tuple[str, Optional[str]], MarkDecorator] = { +EXPECTED_FAILURES: dict[tuple[str, str | None], MarkDecorator] = { ("variants/schema_only_base", ".ttl"): pytest.mark.xfail( reason="Some issue with handling base URI that does not end with a slash", raises=ValueError, @@ -302,9 +298,7 @@ def make_variant_source_cases() -> Iterable[ParameterSet]: @pytest.mark.parametrize(["graph_variants", "variant_key"], make_variant_source_cases()) -def test_variant_source( - graph_variants: GraphVariants, variant_key: Optional[str] -) -> None: +def test_variant_source(graph_variants: GraphVariants, variant_key: str | None) -> None: """ All variants of a graph are isomorphic with the preferred variant, and thus eachother. diff --git a/test/test_literal/test_literal.py b/test/test_literal/test_literal.py index 33cb9ec8d..f53825772 100644 --- a/test/test_literal/test_literal.py +++ b/test/test_literal/test_literal.py @@ -5,7 +5,7 @@ import logging from collections.abc import Generator from decimal import Decimal -from typing import Any, Callable, Optional, Union +from typing import Any, Callable, Union from rdflib.xsd_datetime import Duration from test.utils import affix_tuples @@ -161,8 +161,8 @@ def test_cant_pass_invalid_lang( ) def test_ill_typed_literals( lexical: Union[bytes, str], - datatype: Optional[URIRef], - is_ill_typed: Optional[bool], + datatype: URIRef | None, + is_ill_typed: bool | None, ) -> None: """ ill_typed has the correct value. @@ -898,7 +898,7 @@ def unlexify(s: str) -> str: ], ) def test_literal_construction_value_class( - lexical: str, literal_type: URIRef, value_cls: Optional[type] + lexical: str, literal_type: URIRef, value_cls: type | None ) -> None: literal = Literal(lexical, datatype=literal_type) if value_cls is not None: diff --git a/test/test_misc/test_input_source.py b/test/test_misc/test_input_source.py index 3673d94f7..3c67ac07d 100644 --- a/test/test_misc/test_input_source.py +++ b/test/test_misc/test_input_source.py @@ -113,7 +113,7 @@ class LocationParam(enum.Enum): @contextmanager def from_path( - self, path: Optional[Path], url: Optional[str] + self, path: Path | None, url: str | None ) -> Generator[str, None, None]: """ Yields a value of the type indicated by the enum value which provides the data from the file at ``path``. @@ -183,13 +183,13 @@ def from_path(self, path: Path) -> Generator[Union[bytes, str, dict], None, None @contextmanager def call_create_input_source( input: Union[HTTPFileInfo, Path], - source_param: Optional[SourceParam] = None, + source_param: SourceParam | None = None, # source_slot: SourceSlot, - public_id: Optional[str] = None, - location_param: Optional[LocationParam] = None, - file_param: Optional[FileParam] = None, - data_param: Optional[DataParam] = None, - format: Optional[str] = None, + public_id: str | None = None, + location_param: LocationParam | None = None, + file_param: FileParam | None = None, + data_param: DataParam | None = None, + format: str | None = None, ) -> Generator[InputSource, None, None]: """ Calls create_input_source() with parameters of the specified types. @@ -203,10 +203,10 @@ def call_create_input_source( data_param, ) - source: Optional[SourceParamType] = None - location: Optional[str] = None - file: Optional[FileParamType] = None - data: Optional[DataParamType] = None + source: SourceParamType | None = None + location: str | None = None + file: FileParamType | None = None + data: DataParamType | None = None input_url = None if isinstance(input, HTTPFileInfo): @@ -273,9 +273,9 @@ class InputSourceChecker: type_: type[InputSource] stream_check: StreamCheck - encoding: Optional[Holder[Optional[str]]] - public_id: Optional[str] - system_id: Optional[str] + encoding: Holder[str | None] | None + public_id: str | None + system_id: str | None # extra_checks: list[Callable[[InputSource], None]] = field(factory=list) def check( @@ -386,13 +386,13 @@ class CreateInputSourceTestParams: """ input_path: Path - source_param: Optional[SourceParam] - public_id: Optional[str] - location_param: Optional[LocationParam] - file_param: Optional[FileParam] - data_param: Optional[DataParam] - format: Optional[str] - expected_result: Union[ExceptionChecker, InputSourceChecker] + source_param: SourceParam | None + public_id: str | None + location_param: LocationParam | None + file_param: FileParam | None + data_param: DataParam | None + format: str | None + expected_result: ExceptionChecker | InputSourceChecker def as_tuple(self) -> CreateInputSourceTestParamsTuple: return ( @@ -433,7 +433,7 @@ def as_pytest_param( marks: Union[ pytest.MarkDecorator, Collection[Union[pytest.MarkDecorator, pytest.Mark]] ] = (), - id: Optional[str] = None, + id: str | None = None, ) -> ParameterSet: if id is None: id = f"{self.input_path.as_posix()}:source_param={self.source_param}:public_id={self.public_id}:location_param={self.location_param}:file_param={self.file_param}:data_param={self.data_param}:format={self.format}:{self.expected_result}" @@ -490,10 +490,10 @@ def generate_create_input_source_cases() -> Iterable[ParameterSet]: def make_params( param: enum.Enum, stream_check: StreamCheck, - expected_encoding: Optional[Holder[Optional[str]]], - format: Optional[str] = default_format, - id: Optional[str] = None, - public_id: Optional[str] = None, + expected_encoding: Holder[str | None] | None, + format: str | None = default_format, + id: str | None = None, + public_id: str | None = None, marks: Union[ pytest.MarkDecorator, Collection[Union[pytest.MarkDecorator, pytest.Mark]] ] = (), @@ -535,7 +535,7 @@ def make_params( # These do not have working characther streams. Maybe they # should, but they don't. continue - expected_encoding: Optional[Holder[Optional[str]]] + expected_encoding: Holder[str | None] | None if param in ( SourceParam.PATH, SourceParam.PATH_STRING, @@ -640,8 +640,8 @@ def test_create_input_source( logging.info("expected_result = %s", test_params.expected_result) - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - input_source: Optional[InputSource] = None + catcher: pytest.ExceptionInfo[Exception] | None = None + input_source: InputSource | None = None with ExitStack() as xstack: if isinstance(test_params.expected_result, ExceptionChecker): catcher = xstack.enter_context(test_params.expected_result.context()) diff --git a/test/test_misc/test_networking_redirect.py b/test/test_misc/test_networking_redirect.py index f9c8c6df7..fa18a10d0 100644 --- a/test/test_misc/test_networking_redirect.py +++ b/test/test_misc/test_networking_redirect.py @@ -3,7 +3,7 @@ from collections.abc import Iterable from contextlib import ExitStack from copy import deepcopy -from typing import Any, Optional, TypeVar, Union +from typing import Any, TypeVar, Union from urllib.error import HTTPError from urllib.request import HTTPRedirectHandler, Request @@ -196,8 +196,8 @@ def test_make_redirect_request( """ `_make_redirect_request` correctly handles redirects. """ - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - result: Optional[Request] = None + catcher: pytest.ExceptionInfo[Exception] | None = None + result: Request | None = None with ExitStack() as stack: if isinstance(expected_result, ExceptionChecker): catcher = stack.enter_context(expected_result.context()) diff --git a/test/test_namespace/test_definednamespace.py b/test/test_namespace/test_definednamespace.py index f9a853af2..f6d20871f 100644 --- a/test/test_namespace/test_definednamespace.py +++ b/test/test_namespace/test_definednamespace.py @@ -8,7 +8,6 @@ from contextlib import ExitStack from dataclasses import dataclass from pathlib import Path -from typing import Optional import pytest @@ -260,7 +259,7 @@ class DFNSWarnFail(DefinedNamespace): @dataclass class DFNSInfo: dfns: type[DefinedNamespace] - suffix: Optional[str] + suffix: str | None has_attrs: bool = True @@ -329,7 +328,7 @@ def test_value(dfns: type[DefinedNamespace], attr_name: str, is_defined: bool) - dfns_info = get_dfns_info(dfns) if dfns_info.has_attrs is False: is_defined = False - resolved: Optional[str] = None + resolved: str | None = None with ExitStack() as xstack: warnings_record = xstack.enter_context(warnings.catch_warnings(record=True)) if dfns_info.suffix is None or (not is_defined and dfns._fail is True): @@ -390,7 +389,7 @@ def test_hasattr( logging.debug("dfns_info = %s", dfns_info) if dfns_info.has_attrs is False: is_defined = False - has_attr: Optional[bool] = None + has_attr: bool | None = None has_attr = hasattr(dfns, attr_name) if dfns_info.suffix is not None and (is_defined or dfns._fail is False): assert has_attr is True @@ -400,7 +399,7 @@ def test_hasattr( def test_dir(dfns: type[DefinedNamespace]) -> None: dfns_info = get_dfns_info(dfns) - does_contain: Optional[bool] = None + does_contain: bool | None = None with ExitStack() as xstack: # dir should work for DefinedNamespace as this is called by sphinx to # document it. diff --git a/test/test_namespace/test_namespace.py b/test/test_namespace/test_namespace.py index 409d703f3..36c04486b 100644 --- a/test/test_namespace/test_namespace.py +++ b/test/test_namespace/test_namespace.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Optional +from typing import Any from warnings import warn import pytest @@ -316,7 +316,7 @@ def test_expand_curie( checker = OutcomeChecker.from_primitive(expected_result) - result: Optional[URIRef] = None + result: URIRef | None = None with checker.context(): result = g.namespace_manager.expand_curie(curie) checker.check(result) diff --git a/test/test_namespace/test_namespacemanager.py b/test/test_namespace/test_namespacemanager.py index 8e0dde1ce..d3fc94951 100644 --- a/test/test_namespace/test_namespacemanager.py +++ b/test/test_namespace/test_namespacemanager.py @@ -3,7 +3,7 @@ import logging from collections.abc import Mapping from contextlib import ExitStack -from typing import TYPE_CHECKING, Any, Optional, Union +from typing import TYPE_CHECKING, Any, Union import pytest @@ -135,7 +135,7 @@ def test_invalid_selector() -> None: def check_graph_ns( graph: Graph, expected_nsmap: dict[str, Any], - check_namespaces: Optional[NamespaceSet] = None, + check_namespaces: NamespaceSet | None = None, ) -> None: expected_namespaces = { (prefix, URIRef(f"{uri}")) for prefix, uri in expected_nsmap.items() @@ -164,7 +164,7 @@ def test_graph_bind_namespaces( selector: Any, expected_result: Union[dict[str, Any], type[Exception]], ) -> None: - namespaces: Optional[NamespaceSet] = None + namespaces: NamespaceSet | None = None with ExitStack() as xstack: if not isinstance(expected_result, dict): xstack.enter_context(pytest.raises(expected_result)) @@ -284,7 +284,7 @@ def test_nman_bind_namespaces( ], ) def test_bound_namespaces_subset( - selector: Optional[Any], expected_bindings: dict[str, str] + selector: Any | None, expected_bindings: dict[str, str] ) -> None: if selector is not None: graph = Graph(bind_namespaces=selector) @@ -366,9 +366,9 @@ def test_compute_qname( uri: str, generate: bool, bind_namespaces: _NamespaceSetString, - manager_prefixes: Optional[Mapping[str, Namespace]], - graph_prefixes: Optional[Mapping[str, Namespace]], - store_prefixes: Optional[Mapping[str, Namespace]], + manager_prefixes: Mapping[str, Namespace] | None, + graph_prefixes: Mapping[str, Namespace] | None, + store_prefixes: Mapping[str, Namespace] | None, expected_result: OutcomePrimitive[tuple[str, URIRef, str]], ) -> None: """ @@ -434,7 +434,7 @@ def test_compute_qname_strict( uri: str, generate: bool, bind_namespaces: _NamespaceSetString, - additional_prefixes: Optional[Mapping[str, Namespace]], + additional_prefixes: Mapping[str, Namespace] | None, expected_result: OutcomePrimitive[tuple[str, str, str]], ) -> None: graph = Graph(bind_namespaces=bind_namespaces) @@ -545,7 +545,7 @@ def test_expand_curie( def test_generate_curie( test_nsm_function: NamespaceManager, uri: str, - generate: Optional[bool], + generate: bool | None, expected_result: OutcomePrimitive[str], ) -> None: """ diff --git a/test/test_roundtrip.py b/test/test_roundtrip.py index 593252689..d05bc98ef 100644 --- a/test/test_roundtrip.py +++ b/test/test_roundtrip.py @@ -27,7 +27,7 @@ import os.path from collections.abc import Iterable from pathlib import Path -from typing import Callable, Optional, Union +from typing import Callable, Union from xml.sax import SAXParseException import pytest @@ -210,7 +210,7 @@ def collect_files( - directory: Path, exclude_names: Optional[set[str]] = None, pattern: str = "**/*" + directory: Path, exclude_names: set[str] | None = None, pattern: str = "**/*" ) -> list[tuple[Path, str]]: result = [] for path in directory.glob(pattern): @@ -236,7 +236,7 @@ def roundtrip( testfmt: str, source: Path, graph_type: type[Graph] = ConjunctiveGraph, - checks: Optional[set[Check]] = None, + checks: set[Check] | None = None, same_public_id: bool = False, ) -> None: g1 = graph_type() @@ -298,7 +298,7 @@ def roundtrip( logger.debug("OK") -_formats: Optional[set[str]] = None +_formats: set[str] | None = None def get_formats() -> set[str]: @@ -314,9 +314,9 @@ def get_formats() -> set[str]: def make_cases( files: Iterable[tuple[Path, str]], - formats: Optional[set[str]] = None, + formats: set[str] | None = None, hext_okay: bool = False, - checks: Optional[set[Check]] = None, + checks: set[Check] | None = None, graph_type: type[Graph] = ConjunctiveGraph, same_public_id: bool = False, ) -> Iterable[ParameterSet]: diff --git a/test/test_serializers/test_serializer.py b/test/test_serializers/test_serializer.py index 61f769968..dfcba36e4 100644 --- a/test/test_serializers/test_serializer.py +++ b/test/test_serializers/test_serializer.py @@ -12,7 +12,6 @@ from pathlib import Path, PosixPath, PurePath from typing import ( IO, - Optional, TextIO, Union, cast, @@ -302,10 +301,10 @@ class GraphFormatInfo: name: GraphFormat graph_types: set[GraphType] encodings: set[str] - serializer_list: Optional[list[str]] = field( + serializer_list: list[str] | None = field( default=None, repr=False, hash=False, compare=False ) - deserializer_list: Optional[list[str]] = field( + deserializer_list: list[str] | None = field( default=None, repr=False, hash=False, compare=False ) serializers: list[str] = field(default_factory=list, init=False) @@ -370,14 +369,14 @@ def make_serialize_parse_tests() -> Generator[ParameterSet, None, None]: This function generates test parameters for test_serialize_parse. """ xfails: dict[ - tuple[str, GraphType, DestinationType, Optional[str]], + tuple[str, GraphType, DestinationType, str | None], Union[MarkDecorator, Mark], ] = {} for serializer_name, destination_type in itertools.product( serializer_dict.keys(), DESTINATION_TYPES ): format = serializer_dict[serializer_name] - encodings: set[Optional[str]] = {*format.info.encodings, None} + encodings: set[str | None] = {*format.info.encodings, None} for encoding, graph_type in itertools.product( encodings, format.info.graph_types ): @@ -430,7 +429,7 @@ def test_serialize_parse( tmp_path: Path, simple_graph: Graph, simple_dataset: Dataset, - args: tuple[str, GraphType, DestinationType, Optional[str]], + args: tuple[str, GraphType, DestinationType, str | None], ) -> None: """ Serialization works correctly with the given arguments and generates output @@ -489,7 +488,7 @@ def check_serialized(format: GraphFormat, graph: Graph, data: str) -> None: @dataclass class SerializeArgs: format: str - opt_dest_ref: Optional[DestRef] + opt_dest_ref: DestRef | None @property def dest_ref(self) -> DestRef: @@ -645,7 +644,7 @@ def test_serialize_to_strdest( encoding = "utf-8" def path_factory( - tmp_path: Path, type: DestinationType, encoding: Optional[str] + tmp_path: Path, type: DestinationType, encoding: str | None ) -> Path: return tmp_path / f"{name_prefix}file-{type.name}-{encoding}" diff --git a/test/test_sparql/test_result.py b/test/test_sparql/test_result.py index 8d69058a9..438aff9da 100644 --- a/test/test_sparql/test_result.py +++ b/test/test_sparql/test_result.py @@ -15,7 +15,6 @@ IO, TYPE_CHECKING, BinaryIO, - Optional, TextIO, Union, ) @@ -251,7 +250,7 @@ def serialize_select(select_result: Result, format: str, encoding: str) -> bytes def make_select_result_parse_serialized_tests() -> Iterator[ParameterSet]: - xfails: dict[tuple[str, Optional[SourceType], str], Union[MarkDecorator, Mark]] = {} + xfails: dict[tuple[str, SourceType | None, str], MarkDecorator | Mark] = {} format_infos = [ format_info for format_info in ResultFormat.info_set() @@ -361,7 +360,7 @@ def test_serialize_to_strdest( encoding = "utf-8" def path_factory( - tmp_path: Path, type: DestinationType, encoding: Optional[str] + tmp_path: Path, type: DestinationType, encoding: str | None ) -> Path: return tmp_path / f"{name_prefix}file-{type.name}-{encoding}" diff --git a/test/test_store/test_store_berkeleydb.py b/test/test_store/test_store_berkeleydb.py index 513877936..8b2f65679 100644 --- a/test/test_store/test_store_berkeleydb.py +++ b/test/test_store/test_store_berkeleydb.py @@ -3,7 +3,6 @@ import logging import tempfile from collections.abc import Iterable -from typing import Optional import pytest @@ -159,7 +158,7 @@ def test_multigraph(get_graph: tuple[str, ConjunctiveGraph]): def test_open_shut(get_graph: tuple[str, ConjunctiveGraph]): - g: Optional[ConjunctiveGraph] + g: ConjunctiveGraph | None path, g = get_graph assert len(g) == 3, "Initially we must have 3 triples from setUp" g.close() diff --git a/test/test_store/test_store_sparqlstore_query.py b/test/test_store/test_store_sparqlstore_query.py index d0f46bc51..d17823087 100644 --- a/test/test_store/test_store_sparqlstore_query.py +++ b/test/test_store/test_store_sparqlstore_query.py @@ -3,7 +3,6 @@ import itertools import logging from collections.abc import Iterable -from typing import Optional import pytest from _pytest.mark.structures import ParameterSet @@ -42,7 +41,7 @@ def make_test_query_construct_format_cases() -> Iterable[ParameterSet]: ), ) ] - response_format_encodings: list[tuple[str, str, set[Optional[str]]]] = [ + response_format_encodings: list[tuple[str, str, set[str | None]]] = [ ( "application/rdf+xml", "utf-8", diff --git a/test/test_store/test_store_sparqlstore_sparqlconnector.py b/test/test_store/test_store_sparqlstore_sparqlconnector.py index acc14389f..9e3e97444 100644 --- a/test/test_store/test_store_sparqlstore_sparqlconnector.py +++ b/test/test_store/test_store_sparqlstore_sparqlconnector.py @@ -2,7 +2,6 @@ import json import logging -from typing import Optional import pytest @@ -20,7 +19,7 @@ ], ) def test_query_url_construct_format( - function_httpmock: ServedBaseHTTPServerMock, graph_identifier: Optional[str] + function_httpmock: ServedBaseHTTPServerMock, graph_identifier: str | None ) -> None: """ This tests that query string params (foo & bar) are appended to the endpoint diff --git a/test/test_tools/test_chunk_serializer.py b/test/test_tools/test_chunk_serializer.py index c3f2ef70b..821bbbe9b 100644 --- a/test/test_tools/test_chunk_serializer.py +++ b/test/test_tools/test_chunk_serializer.py @@ -4,7 +4,7 @@ import os from contextlib import ExitStack from pathlib import Path -from typing import TYPE_CHECKING, Any, Optional, Union +from typing import TYPE_CHECKING, Any, Union import pytest @@ -91,7 +91,7 @@ def test_chuking( max_file_size_kb: Union[ellipsis, int, None], write_prefixes: bool, set_output_dir: bool, - expected_file_count: Optional[Union[int, tuple[Optional[int], Optional[int]]]], + expected_file_count: int | tuple[int | None, int | None] | None, ) -> None: test_graph = cached_graph((test_graph_path,)) kwargs: dict[str, Any] = {"write_prefixes": write_prefixes} diff --git a/test/test_util.py b/test/test_util.py index 53007f41a..1c03e3560 100644 --- a/test/test_util.py +++ b/test/test_util.py @@ -4,7 +4,7 @@ import time from contextlib import ExitStack from pathlib import Path -from typing import TYPE_CHECKING, Any, Optional, Union +from typing import TYPE_CHECKING, Any, Union import pytest @@ -385,7 +385,7 @@ def test__coalesce_typing() -> None: type checking for _coalesce behaves as expected. """ str_value: str - optional_str_value: Optional[str] + optional_str_value: str | None optional_str_value = _coalesce(None, "a", None) assert optional_str_value == "a" @@ -438,10 +438,10 @@ def test__coalesce_typing() -> None: def test_find_roots( graph_sources: tuple[Path, ...], prop: URIRef, - roots: Optional[set[_SubjectType | _ObjectType]], + roots: set[_SubjectType | _ObjectType] | None, expected_result: Union[set[URIRef], type[Exception]], ) -> None: - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + catcher: pytest.ExceptionInfo[Exception] | None = None graph = cached_graph(graph_sources) @@ -566,7 +566,7 @@ def test_get_tree( dir: str, expected_result: Union[tuple[IdentifiedNode, list[Any]], type[Exception]], ) -> None: - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + catcher: pytest.ExceptionInfo[Exception] | None = None graph = cached_graph(graph_sources) @@ -668,7 +668,7 @@ def test_iri2uri(iri: str, expected_result: Union[set[str], type[Exception]]) -> """ Tests that """ - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + catcher: pytest.ExceptionInfo[Exception] | None = None with ExitStack() as xstack: if isinstance(expected_result, type) and issubclass(expected_result, Exception): diff --git a/test/test_w3c_spec/test_nquads_w3c.py b/test/test_w3c_spec/test_nquads_w3c.py index ae67213e0..86a38c0cb 100644 --- a/test/test_w3c_spec/test_nquads_w3c.py +++ b/test/test_w3c_spec/test_nquads_w3c.py @@ -5,7 +5,6 @@ import logging from contextlib import ExitStack -from typing import Optional import pytest @@ -35,7 +34,7 @@ def check_entry(entry: ManifestEntry) -> None: logger.debug( "action = %s\n%s", action_path, action_path.read_text(encoding=ENCODING) ) - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + catcher: pytest.ExceptionInfo[Exception] | None = None dataset = Dataset() with ExitStack() as xstack: if entry.type_ == RDFT.TestNQuadsNegativeSyntax: diff --git a/test/test_w3c_spec/test_nt_w3c.py b/test/test_w3c_spec/test_nt_w3c.py index 263b08a53..d040295a4 100644 --- a/test/test_w3c_spec/test_nt_w3c.py +++ b/test/test_w3c_spec/test_nt_w3c.py @@ -5,7 +5,6 @@ import logging from contextlib import ExitStack -from typing import Optional import pytest @@ -35,7 +34,7 @@ def check_entry(entry: ManifestEntry) -> None: logger.debug( "action = %s\n%s", action_path, action_path.read_text(encoding=ENCODING) ) - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + catcher: pytest.ExceptionInfo[Exception] | None = None graph = Graph() with ExitStack() as xstack: if entry.type_ == RDFT.TestNTriplesNegativeSyntax: diff --git a/test/test_w3c_spec/test_rdfxml_w3c.py b/test/test_w3c_spec/test_rdfxml_w3c.py index 560988924..76221f784 100644 --- a/test/test_w3c_spec/test_rdfxml_w3c.py +++ b/test/test_w3c_spec/test_rdfxml_w3c.py @@ -2,7 +2,6 @@ import logging from contextlib import ExitStack -from typing import Optional import pytest @@ -32,7 +31,7 @@ def check_entry(entry: ManifestEntry) -> None: logger.debug( "action = %s\n%s", action_path, action_path.read_text(encoding=ENCODING) ) - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + catcher: pytest.ExceptionInfo[Exception] | None = None graph = Graph() with ExitStack() as xstack: if entry.type_ == RDFT.TestXMLNegativeSyntax: diff --git a/test/test_w3c_spec/test_trig_w3c.py b/test/test_w3c_spec/test_trig_w3c.py index faca987b0..dc73e8f2a 100644 --- a/test/test_w3c_spec/test_trig_w3c.py +++ b/test/test_w3c_spec/test_trig_w3c.py @@ -6,7 +6,6 @@ import logging from contextlib import ExitStack -from typing import Optional import pytest @@ -46,7 +45,7 @@ def check_entry(entry: ManifestEntry) -> None: logger.debug( "action = %s\n%s", action_path, action_path.read_text(encoding=ENCODING) ) - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + catcher: pytest.ExceptionInfo[Exception] | None = None dataset = Dataset() with ExitStack() as xstack: if entry.type_ in (RDFT.TestTrigNegativeSyntax, RDFT.TestTrigNegativeEval): diff --git a/test/test_w3c_spec/test_turtle_w3c.py b/test/test_w3c_spec/test_turtle_w3c.py index 490e6a1fc..59194b0b0 100644 --- a/test/test_w3c_spec/test_turtle_w3c.py +++ b/test/test_w3c_spec/test_turtle_w3c.py @@ -5,7 +5,6 @@ import logging from contextlib import ExitStack -from typing import Optional import pytest @@ -45,7 +44,7 @@ def check_entry(entry: ManifestEntry) -> None: logger.debug( "action = %s\n%s", action_path, action_path.read_text(encoding=ENCODING) ) - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + catcher: pytest.ExceptionInfo[Exception] | None = None graph = Graph() with ExitStack() as xstack: if entry.type_ in (RDFT.TestTurtleNegativeSyntax, RDFT.TestTurtleNegativeEval): diff --git a/test/utils/__init__.py b/test/utils/__init__.py index 1552f0af5..db57c92c2 100644 --- a/test/utils/__init__.py +++ b/test/utils/__init__.py @@ -317,7 +317,7 @@ def format_graph_set(cls, graph: Graph, indent: int = 1, sort: bool = False) -> @classmethod def assert_isomorphic( - cls, lhs: Graph, rhs: Graph, message: Optional[str] = None + cls, lhs: Graph, rhs: Graph, message: str | None = None ) -> None: """ This asserts that the two graphs are isomorphic, providing a nicely @@ -325,7 +325,7 @@ def assert_isomorphic( """ # TODO FIXME: This should possibly raise an error when used on a ConjunctiveGraph - def format_report(message: Optional[str] = None) -> str: + def format_report(message: str | None = None) -> str: in_both, in_lhs, in_rhs = rdflib.compare.graph_diff(lhs, rhs) preamle = "" if message is None else f"{message}\n" return ( @@ -345,7 +345,7 @@ def assert_cgraph_isomorphic( lhs: ConjunctiveGraph, rhs: ConjunctiveGraph, exclude_bnodes: bool, - message: Optional[str] = None, + message: str | None = None, ) -> None: def get_contexts(cgraph: ConjunctiveGraph) -> dict[URIRef, Graph]: result = {} @@ -459,9 +459,9 @@ def pytest_mark_filter( def affix_tuples( - prefix: Optional[tuple[Any, ...]], + prefix: tuple[Any, ...] | None, tuples: Iterable[tuple[Any, ...]], - suffix: Optional[tuple[Any, ...]], + suffix: tuple[Any, ...] | None, ) -> Generator[tuple[Any, ...], None, None]: if prefix is None: prefix = tuple() @@ -477,7 +477,7 @@ def ensure_suffix(value: str, suffix: str) -> str: return value -def idfns(*idfns: Callable[[Any], Optional[str]]) -> Callable[[Any], Optional[str]]: +def idfns(*idfns: Callable[[Any], str | None]) -> Callable[[Any], str | None]: """ Returns an ID function which will try each of the provided ID functions in order. @@ -487,7 +487,7 @@ def idfns(*idfns: Callable[[Any], Optional[str]]) -> Callable[[Any], Optional[st functions. """ - def _idfns(value: Any) -> Optional[str]: + def _idfns(value: Any) -> str | None: for idfn in idfns: result = idfn(value) if result is not None: diff --git a/test/utils/dawg_manifest.py b/test/utils/dawg_manifest.py index f4686021f..fdaa6bb04 100644 --- a/test/utils/dawg_manifest.py +++ b/test/utils/dawg_manifest.py @@ -40,9 +40,9 @@ class ManifestEntry: manifest: Manifest identifier: URIRef type_: IdentifiedNode = field(init=False) - action: Optional[IdentifiedNode] = field(init=False) - result: Optional[IdentifiedNode] = field(init=False) - result_cardinality: Optional[URIRef] = field(init=False) + action: IdentifiedNode | None = field(init=False) + result: IdentifiedNode | None = field(init=False) + result_cardinality: URIRef | None = field(init=False) def __post_init__(self) -> None: type_ = self.value(RDF.type, IdentifiedNode) @@ -65,8 +65,8 @@ def uri_mapper(self) -> URIMapper: def param( self, - mark_dict: Optional[MarksDictType] = None, - markers: Optional[Iterable[ManifestEntryMarkerType]] = None, + mark_dict: MarksDictType | None = None, + markers: Iterable[ManifestEntryMarkerType] | None = None, ) -> ParameterSet: id = f"{self.identifier}" marks: MarkListType = [] @@ -82,7 +82,7 @@ def param( def value( self, predicate: _PredicateType, value_type: type[IdentifierT] - ) -> Optional[IdentifierT]: + ) -> IdentifierT | None: value = self.graph.value(self.identifier, predicate) if value is not None: assert isinstance(value, value_type) @@ -100,14 +100,14 @@ class Manifest: uri_mapper: URIMapper graph: Graph identifier: IdentifiedNode - report_prefix: Optional[str] = None + report_prefix: str | None = None @classmethod def from_graph( cls, uri_mapper: URIMapper, graph: Graph, - report_prefix: Optional[str] = None, + report_prefix: str | None = None, ) -> Generator[Manifest, None, None]: for identifier in graph.subjects(RDF.type, MF.Manifest): assert isinstance(identifier, IdentifiedNode) @@ -125,7 +125,7 @@ def from_sources( cls, uri_mapper: URIMapper, *sources: GraphSourceType, - report_prefix: Optional[str] = None, + report_prefix: str | None = None, ) -> Generator[Manifest, None, None]: for source in sources: logging.debug("source(%s) = %r", id(source), source) @@ -161,8 +161,8 @@ def included(self) -> Generator[Manifest, None, None]: def entires( self, entry_type: type[ManifestEntryT], - exclude: Optional[POFiltersType] = None, - include: Optional[POFiltersType] = None, + exclude: POFiltersType | None = None, + include: POFiltersType | None = None, ) -> Generator[ManifestEntryT, None, None]: for entries in self.graph.objects(self.identifier, MF.entries): for entry_iri in self.graph.items(entries): @@ -177,10 +177,10 @@ def entires( def params( self, entry_type: type[ManifestEntryT], - exclude: Optional[POFiltersType] = None, - include: Optional[POFiltersType] = None, - mark_dict: Optional[MarksDictType] = None, - markers: Optional[Iterable[ManifestEntryMarkerType]] = None, + exclude: POFiltersType | None = None, + include: POFiltersType | None = None, + mark_dict: MarksDictType | None = None, + markers: Iterable[ManifestEntryMarkerType] | None = None, ) -> Generator[ParameterSet, None, None]: for entry in self.entires(entry_type, exclude, include): yield entry.param(mark_dict, markers) @@ -190,11 +190,11 @@ def params_from_sources( uri_mapper: URIMapper, entry_type: type[ManifestEntryT], *sources: GraphSourceType, - exclude: Optional[POFiltersType] = None, - include: Optional[POFiltersType] = None, - mark_dict: Optional[MarksDictType] = None, - markers: Optional[Iterable[ManifestEntryMarkerType]] = None, - report_prefix: Optional[str] = None, + exclude: POFiltersType | None = None, + include: POFiltersType | None = None, + mark_dict: MarksDictType | None = None, + markers: Iterable[ManifestEntryMarkerType] | None = None, + report_prefix: str | None = None, ) -> Generator[ParameterSet, None, None]: for manifest in Manifest.from_sources( uri_mapper, *sources, report_prefix=report_prefix diff --git a/test/utils/destination.py b/test/utils/destination.py index 11fe104e6..0b5dda57b 100644 --- a/test/utils/destination.py +++ b/test/utils/destination.py @@ -5,7 +5,7 @@ from contextlib import contextmanager from dataclasses import dataclass from pathlib import Path, PurePath -from typing import IO, Callable, Optional, TextIO, Union +from typing import IO, Callable, TextIO, Union DestParmType = Union[Path, PurePath, str, IO[bytes], TextIO] @@ -29,11 +29,11 @@ class DestinationType(str, enum.Enum): def make_ref( self, tmp_path: Path, - encoding: Optional[str] = None, - path_factory: Callable[[Path, DestinationType, Optional[str]], Path] = ( + encoding: str | None = None, + path_factory: Callable[[Path, DestinationType, str | None], Path] = ( lambda tmp_path, type, encoding: tmp_path / f"file-{type.name}-{encoding}" ), - ) -> Generator[Optional[DestRef], None, None]: + ) -> Generator[DestRef | None, None, None]: path = path_factory(tmp_path, self, encoding) # path = tmp_path / f"file-{self.name}" if self is DestinationType.RETURN: diff --git a/test/utils/earl.py b/test/utils/earl.py index 1375d2f52..ea11000b6 100644 --- a/test/utils/earl.py +++ b/test/utils/earl.py @@ -14,7 +14,6 @@ from typing import ( TYPE_CHECKING, Callable, - Optional, TypeVar, cast, ) @@ -62,7 +61,7 @@ class EARLReport: ) def add_test_outcome( - self, test_id: URIRef, outcome: URIRef, info: Optional[Literal] = None + self, test_id: URIRef, outcome: URIRef, info: Literal | None = None ): triples: set[_TripleType] = set() assertion = BNode(f"{test_id}") @@ -174,8 +173,8 @@ def pytest_addoption(parser: _pytest.config.argparsing.Parser): def convert_optional( - optional: Optional[FromT], converter: Callable[[FromT], ToT] -) -> Optional[ToT]: + optional: FromT | None, converter: Callable[[FromT], ToT] +) -> ToT | None: if optional is not None: return converter(optional) return None @@ -205,7 +204,7 @@ def pytest_configure(config: pytest.Config): def pytest_unconfigure(config: pytest.Config): - earl_reporter: Optional[EARLReporter] = config.pluginmanager.get_plugin( + earl_reporter: EARLReporter | None = config.pluginmanager.get_plugin( PYTEST_PLUGIN_NAME ) logger.debug("earl_reporter = %s", earl_reporter) @@ -225,7 +224,7 @@ class TestResult(enum.Enum): class TestReportHelper: @classmethod - def get_rdf_test_uri(cls, report: TestReport) -> Optional[URIRef]: + def get_rdf_test_uri(cls, report: TestReport) -> URIRef | None: return next( ( cast(URIRef, item[1]) @@ -236,7 +235,7 @@ def get_rdf_test_uri(cls, report: TestReport) -> Optional[URIRef]: ) @classmethod - def get_manifest_entry(cls, report: TestReport) -> Optional[ManifestEntry]: + def get_manifest_entry(cls, report: TestReport) -> ManifestEntry | None: return next( ( cast(ManifestEntry, item[1]) @@ -258,13 +257,13 @@ class EARLReporter: assertor_iri: URIRef output_dir: Path output_suffix: str - output_file: Optional[Path] = None - assertor_name: Optional[Literal] = None - assertor_homepage: Optional[URIRef] = None + output_file: Path | None = None + assertor_name: Literal | None = None + assertor_homepage: URIRef | None = None add_datetime: bool = True extra_triples: set[_TripleType] = field(default_factory=set) prefix_reports: dict[str, EARLReport] = field(init=True, default_factory=dict) - report: Optional[EARLReport] = field(init=True, default=None) + report: EARLReport | None = field(init=True, default=None) def __post_init__(self) -> None: if self.assertor_homepage is not None: @@ -320,7 +319,7 @@ def make_report_with_prefix(self, report_prefix: str) -> EARLReport: output_file = self.output_dir / f"{report_prefix}{self.output_suffix}.ttl" return EARLReport(self, output_file) - def get_report_for(self, entry: Optional[ManifestEntry]) -> Optional[EARLReport]: + def get_report_for(self, entry: ManifestEntry | None) -> EARLReport | None: if self.report: return self.report if entry is None: @@ -364,8 +363,8 @@ def pytest_runtest_makereport( @classmethod def get_rdf_test_uri( - cls, rdf_test_uri: Optional[URIRef], manifest_entry: Optional[ManifestEntry] - ) -> Optional[URIRef]: + cls, rdf_test_uri: URIRef | None, manifest_entry: ManifestEntry | None + ) -> URIRef | None: if rdf_test_uri is not None: return rdf_test_uri if manifest_entry is not None: diff --git a/test/utils/graph.py b/test/utils/graph.py index dda5d7dca..8533e521b 100644 --- a/test/utils/graph.py +++ b/test/utils/graph.py @@ -5,7 +5,7 @@ from functools import lru_cache from pathlib import Path from runpy import run_path -from typing import Any, Optional, Union +from typing import Any, Union import rdflib.util import test.data @@ -21,11 +21,11 @@ class GraphSource: path: Path format: str - public_id: Optional[str] = None + public_id: str | None = None @classmethod - def guess_format(cls, path: Path) -> Optional[str]: - format: Optional[str] + def guess_format(cls, path: Path) -> str | None: + format: str | None if path.suffix == ".py": format = "python" else: @@ -34,7 +34,7 @@ def guess_format(cls, path: Path) -> Optional[str]: @classmethod def from_path( - cls, path: Path, public_id: Optional[str] = None, format: Optional[str] = None + cls, path: Path, public_id: str | None = None, format: str | None = None ) -> GraphSource: if format is None: format = cls.guess_format(path) @@ -51,7 +51,7 @@ def from_paths(cls, *paths: Path) -> tuple[GraphSource, ...]: @classmethod def from_source( - cls, source: GraphSourceType, public_id: Optional[str] = None + cls, source: GraphSourceType, public_id: str | None = None ) -> GraphSource: logging.debug("source(%s) = %r", id(source), source) if isinstance(source, Path): @@ -65,8 +65,8 @@ def public_id_or_path_uri(self) -> str: def load( self, - graph: Optional[_GraphT] = None, - public_id: Optional[str] = None, + graph: _GraphT | None = None, + public_id: str | None = None, # type error: Incompatible default for argument "graph_type" (default has type "type[Graph]", argument has type "tpe[_GraphT]") # see https://github.com/python/mypy/issues/3737 graph_type: type[_GraphT] = Graph, # type: ignore[assignment] @@ -84,7 +84,7 @@ def load( return graph @classmethod - def idfn(cls, val: Any) -> Optional[str]: + def idfn(cls, val: Any) -> str | None: """ ID function for GraphSource objects. @@ -103,8 +103,8 @@ def idfn(cls, val: Any) -> Optional[str]: def load_sources( *sources: GraphSourceType, - graph: Optional[_GraphT] = None, - public_id: Optional[str] = None, + graph: _GraphT | None = None, + public_id: str | None = None, graph_type: type[_GraphT] = Graph, # type: ignore[assignment] ) -> _GraphT: if graph is None: @@ -117,7 +117,7 @@ def load_sources( @lru_cache(maxsize=None) def cached_graph( sources: tuple[Union[GraphSource, Path], ...], - public_id: Optional[str] = None, + public_id: str | None = None, graph_type: type[_GraphT] = Graph, # type: ignore[assignment] ) -> _GraphT: return load_sources(*sources, public_id=public_id, graph_type=graph_type) @@ -125,7 +125,7 @@ def cached_graph( def load_from_python( path: Path, - graph: Optional[_GraphT] = None, + graph: _GraphT | None = None, graph_type: type[_GraphT] = Graph, # type: ignore[assignment] ) -> _GraphT: if graph is None: diff --git a/test/utils/http.py b/test/utils/http.py index ba5b9f07d..cd97b47ff 100644 --- a/test/utils/http.py +++ b/test/utils/http.py @@ -10,7 +10,6 @@ from threading import Thread from typing import ( NamedTuple, - Optional, TypeVar, Union, ) @@ -58,7 +57,7 @@ class MockHTTPRequest(NamedTuple): parsed_path: ParseResult path_query: PathQueryT headers: email.message.Message - body: Optional[bytes] + body: bytes | None MOCK_HTTP_REQUEST_WILDCARD = MockHTTPRequest( @@ -76,7 +75,7 @@ class MockHTTPResponse(NamedTuple): headers: HeadersT -def get_random_ip(ip_prefix: Optional[list[str]] = None) -> str: +def get_random_ip(ip_prefix: list[str] | None = None) -> str: if ip_prefix is None: parts = ["127"] for _ in range(4 - len(parts)): @@ -86,7 +85,7 @@ def get_random_ip(ip_prefix: Optional[list[str]] = None) -> str: @contextmanager def ctx_http_handler( - handler: type[BaseHTTPRequestHandler], host: Optional[str] = "127.0.0.1" + handler: type[BaseHTTPRequestHandler], host: str | None = "127.0.0.1" ) -> Iterator[HTTPServer]: host = get_random_ip() if host is None else host server = HTTPServer((host, 0), handler) diff --git a/test/utils/httpfileserver.py b/test/utils/httpfileserver.py index f8bb32fb0..1575cc8af 100644 --- a/test/utils/httpfileserver.py +++ b/test/utils/httpfileserver.py @@ -8,7 +8,6 @@ from functools import lru_cache from http.server import BaseHTTPRequestHandler, HTTPServer from pathlib import Path -from typing import Optional from urllib.parse import parse_qs, urljoin, urlparse from uuid import uuid4 @@ -129,7 +128,7 @@ def url(self) -> str: def add_file_with_caching( self, proto_file: ProtoFileResource, - proto_redirects: Optional[Sequence[ProtoRedirectResource]] = None, + proto_redirects: Sequence[ProtoRedirectResource] | None = None, suffix: str = "", ) -> HTTPFileInfo: return self.add_file(proto_file, proto_redirects, suffix) @@ -137,7 +136,7 @@ def add_file_with_caching( def add_file( self, proto_file: ProtoFileResource, - proto_redirects: Optional[Sequence[ProtoRedirectResource]] = None, + proto_redirects: Sequence[ProtoRedirectResource] | None = None, suffix: str = "", ) -> HTTPFileInfo: url_path = f"/file/{uuid4().hex}{suffix}" diff --git a/test/utils/httpservermock.py b/test/utils/httpservermock.py index 90cabbf52..1b3fe6948 100644 --- a/test/utils/httpservermock.py +++ b/test/utils/httpservermock.py @@ -10,7 +10,6 @@ from typing import ( TYPE_CHECKING, Any, - Optional, TypeVar, cast, ) @@ -123,7 +122,7 @@ def call_count(self) -> int: class ServedBaseHTTPServerMock( BaseHTTPServerMock, AbstractContextManager["ServedBaseHTTPServerMock"] ): - def __init__(self, host: Optional[str] = "127.0.0.1") -> None: + def __init__(self, host: str | None = "127.0.0.1") -> None: super().__init__() host = get_random_ip() if host is None else host self.server = HTTPServer((host, 0), self.Handler) @@ -152,9 +151,9 @@ def __enter__(self) -> ServedBaseHTTPServerMock: def __exit__( self, - __exc_type: Optional[type[BaseException]], - __exc_value: Optional[BaseException], - __traceback: Optional[TracebackType], + __exc_type: type[BaseException] | None, + __exc_value: BaseException | None, + __traceback: TracebackType | None, ) -> te.Literal[False]: self.stop() return False diff --git a/test/utils/iri.py b/test/utils/iri.py index 2bc673e16..e85fa729b 100644 --- a/test/utils/iri.py +++ b/test/utils/iri.py @@ -11,7 +11,7 @@ from dataclasses import dataclass from nturl2path import url2pathname as nt_url2pathname from pathlib import Path, PurePath, PurePosixPath, PureWindowsPath -from typing import Callable, Optional, TypeVar, Union +from typing import Callable, TypeVar, Union from urllib.parse import quote, unquote, urljoin, urlparse, urlsplit, urlunsplit from urllib.request import BaseHandler, OpenerDirector, Request from urllib.response import addinfourl @@ -24,7 +24,7 @@ def file_uri_to_path( file_uri: str, path_class: type[PurePathT] = PurePath, # type: ignore[assignment] - url2pathname: Optional[Callable[[str], str]] = None, + url2pathname: Callable[[str], str] | None = None, ) -> PurePathT: """ This function returns a pathlib.PurePath object for the supplied file URI. @@ -127,7 +127,7 @@ def to_remote(self, local: Union[str, PurePath]) -> str: return self._map(local, to_local=False) def _map(self, value: Union[str, PurePath], to_local: bool = True) -> str: - error: Optional[ValueError] = None + error: ValueError | None = None mapping: URIMapping uri = value.as_uri() if isinstance(value, PurePath) else value for mapping in self.mappings: diff --git a/test/utils/literal.py b/test/utils/literal.py index 75792f97a..861d1bfb2 100644 --- a/test/utils/literal.py +++ b/test/utils/literal.py @@ -3,7 +3,7 @@ import builtins import logging from dataclasses import dataclass -from typing import Any, Optional, Union +from typing import Any, Union from xml.dom.minidom import DocumentFragment from rdflib.term import Literal, URIRef @@ -46,7 +46,7 @@ def check(self, actual: Literal) -> None: assert self.datatype == actual.datatype, "Literal datatype does not match" -def literal_idfn(value: Any) -> Optional[str]: +def literal_idfn(value: Any) -> str | None: if callable(value): try: literal = value() diff --git a/test/utils/manifest.py b/test/utils/manifest.py index 4b2ff2100..edb1427a4 100644 --- a/test/utils/manifest.py +++ b/test/utils/manifest.py @@ -21,9 +21,9 @@ class RDFTest(NamedTuple): name: str comment: str data: Identifier - graphdata: Optional[GraphDataType] + graphdata: GraphDataType | None action: Identifier - result: Optional[ResultType] + result: ResultType | None syntax: bool @@ -75,8 +75,8 @@ def _str(x): name = g.value(e, MF.name) comment = g.value(e, RDFS.comment) data = None - graphdata: Optional[GraphDataType] = None - res: Optional[ResultType] = None + graphdata: GraphDataType | None = None + res: ResultType | None = None syntax = True if _type in (MF.QueryEvaluationTest, MF.CSVResultFormatTest): @@ -100,8 +100,8 @@ def _str(x): ) r = g.value(e, MF.result) - resdata: Optional[Node] = g.value(r, UT.data) - resgraphdata: list[tuple[Optional[Node], Optional[Node]]] = [] + resdata: Node | None = g.value(r, UT.data) + resgraphdata: list[tuple[Node | None, Node | None]] = [] for gd in g.objects(r, UT.graphData): resgraphdata.append( (g.value(gd, UT.graph), g.value(gd, RDFS.label)) diff --git a/test/utils/outcome.py b/test/utils/outcome.py index 706df4b59..5ed2a5206 100644 --- a/test/utils/outcome.py +++ b/test/utils/outcome.py @@ -11,7 +11,6 @@ Any, Generic, NoReturn, - Optional, TypeVar, Union, cast, @@ -57,7 +56,7 @@ def check(self, actual: AnyT) -> None: @contextlib.contextmanager @abc.abstractmethod - def context(self) -> Generator[Optional[ExceptionInfo[Exception]], None, None]: + def context(self) -> Generator[ExceptionInfo[Exception] | None, None, None]: """ The context in which the test code should run. @@ -91,7 +90,7 @@ def _from_special( type[Exception], Exception, ], - ) -> Optional[OutcomeChecker[AnyT]]: + ) -> OutcomeChecker[AnyT] | None: if isinstance(primitive, OutcomeChecker): return primitive if isinstance(primitive, type) and issubclass(primitive, Exception): @@ -188,8 +187,8 @@ class ExceptionChecker(OutcomeChecker[AnyT]): """ type: type[Exception] - match: Optional[Union[Pattern[str], str]] = None - attributes: Optional[dict[str, Any]] = None + match: Pattern[str] | str | None = None + attributes: dict[str, Any] | None = None def check(self, actual: AnyT) -> NoReturn: raise RuntimeError("ExceptionResult.check_result should never be called") diff --git a/test/utils/sparql_checker.py b/test/utils/sparql_checker.py index 4e1c1083e..d76831b96 100644 --- a/test/utils/sparql_checker.py +++ b/test/utils/sparql_checker.py @@ -52,14 +52,14 @@ class QueryType(enum.Enum): @dataclass class TypeInfo: id: Identifier - query_type: Optional[QueryType] + query_type: QueryType | None syntax: bool = False skipped: bool = False negative: bool = False ns: Union[type[QT], type[UT], None] = field(init=False, default=None) - query_property: Optional[URIRef] = field(init=False, default=None) - graph_data_property: Optional[URIRef] = field(init=False, default=None) - expected_outcome_property: Optional[URIRef] = field(init=False, default=None) + query_property: URIRef | None = field(init=False, default=None) + graph_data_property: URIRef | None = field(init=False, default=None) + expected_outcome_property: URIRef | None = field(init=False, default=None) def __post_init__(self) -> None: if self.query_type is QueryType.QUERY: @@ -98,7 +98,7 @@ def make_dict(cls, *test_types: TypeInfo) -> dict[Identifier, TypeInfo]: @dataclass(frozen=True) class GraphData: graph_id: URIRef - label: Optional[Literal] = None + label: Literal | None = None @classmethod def from_graph(cls, graph: Graph, identifier: Identifier) -> GraphData: @@ -134,12 +134,12 @@ def load_into(self, manifest: Manifest, dataset: Dataset) -> None: @dataclass class SPARQLEntry(ManifestEntry): type_info: TypeInfo = field(init=False) - query: Optional[IdentifiedNode] = field(init=False, default=None) - action_data: Optional[IdentifiedNode] = field(init=False, default=None) - action_graph_data: Optional[set[GraphData]] = field(init=False, default=None) - result_data: Optional[IdentifiedNode] = field(init=False, default=None) - result_graph_data: Optional[set[GraphData]] = field(init=False, default=None) - expected_outcome: Optional[URIRef] = field(init=False, default=None) + query: IdentifiedNode | None = field(init=False, default=None) + action_data: IdentifiedNode | None = field(init=False, default=None) + action_graph_data: set[GraphData] | None = field(init=False, default=None) + result_data: IdentifiedNode | None = field(init=False, default=None) + result_graph_data: set[GraphData] | None = field(init=False, default=None) + expected_outcome: URIRef | None = field(init=False, default=None) def __post_init__(self) -> None: super().__post_init__() @@ -193,7 +193,7 @@ def __post_init__(self) -> None: self.result_graph_data.add(graph_data) def load_dataset( - self, data: Optional[IdentifiedNode], graph_data_set: Optional[set[GraphData]] + self, data: IdentifiedNode | None, graph_data_set: set[GraphData] | None ) -> Dataset: dataset = Dataset() if data is not None: @@ -289,7 +289,7 @@ def check_syntax(monkeypatch: MonkeyPatch, entry: SPARQLEntry) -> None: assert entry.query is not None assert entry.type_info.query_type is not None query_text = entry.query_text() - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + catcher: pytest.ExceptionInfo[Exception] | None = None with ExitStack() as xstack: if entry.type_info.negative: catcher = xstack.enter_context(pytest.raises(Exception)) diff --git a/test/utils/test/test_iri.py b/test/utils/test/test_iri.py index cd09cb66c..813cbd27d 100644 --- a/test/utils/test/test_iri.py +++ b/test/utils/test/test_iri.py @@ -3,7 +3,7 @@ import logging from contextlib import ExitStack from pathlib import PurePath, PurePosixPath, PureWindowsPath -from typing import Optional, Union +from typing import Union import pytest @@ -54,7 +54,7 @@ def test_file_uri_to_path( """ Tests that """ - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + catcher: pytest.ExceptionInfo[Exception] | None = None with ExitStack() as xstack: if isinstance(expected_result, type) and issubclass(expected_result, Exception): @@ -107,7 +107,7 @@ def test_rebase_url( """ Tests that """ - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + catcher: pytest.ExceptionInfo[Exception] | None = None with ExitStack() as xstack: if isinstance(expected_result, type) and issubclass(expected_result, Exception): diff --git a/test/utils/test/test_outcome.py b/test/utils/test/test_outcome.py index a3666b5e2..fb1e18447 100644 --- a/test/utils/test/test_outcome.py +++ b/test/utils/test/test_outcome.py @@ -2,7 +2,7 @@ from collections.abc import Callable from contextlib import ExitStack -from typing import Any, NoReturn, Optional, Union, cast +from typing import Any, NoReturn, Union, cast import pytest @@ -58,7 +58,7 @@ def _raise( def test_checker( action: Union[Callable[[], Any], Any], checker: ExceptionChecker, - expected_exception: Optional[type[BaseException]], + expected_exception: type[BaseException] | None, ) -> None: """ Given the action, the checker raises the expected exception, or does diff --git a/test/utils/test/test_result.py b/test/utils/test/test_result.py index e50cb82eb..dbc05794c 100644 --- a/test/utils/test/test_result.py +++ b/test/utils/test/test_result.py @@ -1,7 +1,7 @@ from __future__ import annotations from contextlib import ExitStack -from typing import Optional, Union +from typing import Union import pytest @@ -232,7 +232,7 @@ def test_bindings_equal( rhs: BindingsCollectionType, expected_result: Union[bool, type[Exception]], ) -> None: - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + catcher: pytest.ExceptionInfo[Exception] | None = None with ExitStack() as xstack: if isinstance(expected_result, type) and issubclass(expected_result, Exception): diff --git a/test/utils/test/test_testutils.py b/test/utils/test/test_testutils.py index 2a74a4056..c3a383f05 100644 --- a/test/utils/test/test_testutils.py +++ b/test/utils/test/test_testutils.py @@ -4,7 +4,7 @@ from contextlib import ExitStack from dataclasses import dataclass from pathlib import PurePosixPath, PureWindowsPath -from typing import Any, Optional, Union +from typing import Any, Union import pytest @@ -21,8 +21,8 @@ def check( file_uri: str, - expected_windows_path: Optional[str], - expected_posix_path: Optional[str], + expected_windows_path: str | None, + expected_posix_path: str | None, ) -> None: if expected_windows_path is not None: expected_windows_path_object = PureWindowsPath(expected_windows_path) @@ -98,8 +98,8 @@ def check( ) def test_paths( file_uri: str, - expected_windows_path: Optional[str], - expected_posix_path: Optional[str], + expected_windows_path: str | None, + expected_posix_path: str | None, ) -> None: check(file_uri, expected_windows_path, expected_posix_path) @@ -478,7 +478,7 @@ def test_assert_cgraph_isomorphic( lhs_graph.parse(data=lhs, format=format) rhs_graph = graph_type() rhs_graph.parse(data=rhs, format=format) - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + catcher: pytest.ExceptionInfo[Exception] | None = None with ExitStack() as xstack: if expected_result is not None: catcher = xstack.enter_context(pytest.raises(expected_result)) diff --git a/test/utils/urlopen.py b/test/utils/urlopen.py index c82f860b9..849f7c749 100644 --- a/test/utils/urlopen.py +++ b/test/utils/urlopen.py @@ -3,13 +3,12 @@ import urllib.request from collections.abc import Generator from contextlib import contextmanager -from typing import Optional from urllib.request import OpenerDirector, install_opener @contextmanager def context_urlopener(opener: OpenerDirector) -> Generator[OpenerDirector, None, None]: - old_opener: Optional[OpenerDirector] = urllib.request._opener # type: ignore[attr-defined] + old_opener: OpenerDirector | None = urllib.request._opener # type: ignore[attr-defined] try: install_opener(opener) yield opener diff --git a/test_reports/rdflib_w3c_sparql10-HEAD.ttl b/test_reports/rdflib_w3c_sparql10-HEAD.ttl index 78997b01c..b8369a94d 100644 --- a/test_reports/rdflib_w3c_sparql10-HEAD.ttl +++ b/test_reports/rdflib_w3c_sparql10-HEAD.ttl @@ -1795,7 +1795,7 @@ earl:assertedBy ; earl:mode earl:automatic ; earl:result [ a earl:TestResult ; - earl:outcome earl:failed ] ; + earl:outcome earl:passed ] ; earl:subject ; earl:test . @@ -1859,7 +1859,7 @@ earl:assertedBy ; earl:mode earl:automatic ; earl:result [ a earl:TestResult ; - earl:outcome earl:failed ] ; + earl:outcome earl:passed ] ; earl:subject ; earl:test . @@ -1907,7 +1907,7 @@ earl:assertedBy ; earl:mode earl:automatic ; earl:result [ a earl:TestResult ; - earl:outcome earl:failed ] ; + earl:outcome earl:passed ] ; earl:subject ; earl:test . @@ -2787,7 +2787,7 @@ earl:assertedBy ; earl:mode earl:automatic ; earl:result [ a earl:TestResult ; - earl:outcome earl:failed ] ; + earl:outcome earl:passed ] ; earl:subject ; earl:test .