diff --git a/pyproject.toml b/pyproject.toml index 87b46de..637dac7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,7 @@ dependencies = [ "idna==3.10", "importlib_metadata==8.5.0", "Jinja2==3.1.4", + "jsonpath-ng>=1.7.0", "kink==0.8.1", "markdown-it-py==3.0.0", "MarkupSafe==3.0.2", diff --git a/server/const/err_enums.py b/server/const/err_enums.py index 7f9e66d..562deb9 100644 --- a/server/const/err_enums.py +++ b/server/const/err_enums.py @@ -25,3 +25,11 @@ class ErrCodes(Enum): # DB Service DB_ERROR = 80 + + # Source Service + SOURCE_NOT_FOUND = 100 + UNSUPPORTED_FILE_TYPE = 101 + JSON_PATH_DATA_NOT_FOUND = 102 + JSON_PATH_DATA_NOT_UNIQUE = 103 + JSON_PATH_NOT_ARRAY = 104 + JSON_PATH_NOT_PROVIDED = 105 diff --git a/server/models/mapping.py b/server/models/mapping.py new file mode 100644 index 0000000..7f458ab --- /dev/null +++ b/server/models/mapping.py @@ -0,0 +1,244 @@ +from dataclasses import dataclass +from enum import StrEnum + + +class MappingNodeType(StrEnum): + ENTITY = "entity" + LITERAL = "literal" + URIRef = "uri_ref" + + +@dataclass(kw_only=True) +class MappingNode: + """ + A node in a mapping graph. + + Attributes: + id (str): The ID of the node + type (MappingNodeType): The type of the node + label (str): The label of the node + uri_pattern (str): The URI pattern of the node + rdf_type (list[str]): The RDF type/s of the node + """ + + id: str + type: MappingNodeType + label: str + uri_pattern: str + rdf_type: list[str] + + def to_dict(self): + return { + "id": self.id, + "type": self.type, + "label": self.label, + "uri_pattern": self.uri_pattern, + "rdf_type": self.rdf_type, + } + + @classmethod + def from_dict(cls, data): + if "id" not in data: + raise ValueError("id is required") + if "type" not in data: + raise ValueError("type is required") + if "label" not in data: + raise ValueError("label is required") + if "uri_pattern" not in data: + raise ValueError("uri_pattern is required") + if "rdf_type" not in data: + data["rdf_type"] = [] + return cls( + id=data["id"], + type=data["type"], + label=data["label"], + uri_pattern=data["uri_pattern"], + rdf_type=data["rdf_type"], + ) + + +@dataclass(kw_only=True) +class MappingLiteral: + """ + A literal in a mapping graph. + + Attributes: + id (str): The ID of the literal + type (MappingNodeType): The type of the literal + label (str): The label of the literal + value (str): The value of the literal + literal_type (str): The type of the literal + """ + + id: str + type: MappingNodeType + label: str + value: str + literal_type: str + + def to_dict(self): + return { + "id": self.id, + "type": self.type, + "label": self.label, + "value": self.value, + "literal_type": self.literal_type, + } + + @classmethod + def from_dict(cls, data): + if "id" not in data: + raise ValueError("id is required") + if "type" not in data: + raise ValueError("type is required") + if "label" not in data: + raise ValueError("label is required") + if "value" not in data: + raise ValueError("value is required") + if "literal_type" not in data: + raise ValueError("literal_type is required") + return cls( + id=data["id"], + type=data["type"], + label=data["label"], + value=data["value"], + literal_type=data["literal_type"], + ) + + +@dataclass(kw_only=True) +class MappingURIRef: + """ + A URI reference in a mapping graph. + + Attributes: + id (str): The ID of the URI reference + type (MappingNodeType): The type of the URI reference + uri (str): The URI of the URI reference + """ + + id: str + type: MappingNodeType + uri_pattern: str + + def to_dict(self): + return { + "id": self.id, + "type": self.type, + "uri_pattern": self.uri_pattern, + } + + @classmethod + def from_dict(cls, data): + if "id" not in data: + raise ValueError("id is required") + if "type" not in data: + raise ValueError("type is required") + if "uri_pattern" not in data: + raise ValueError("uri_pattern is required") + return cls( + id=data["id"], + type=data["type"], + uri_pattern=data["uri_pattern"], + ) + + +@dataclass(kw_only=True) +class MappingEdge: + """ + An edge in a mapping graph. + + Attributes: + id (str): The ID of the edge + source (str): The ID of the source node + target (str): The ID of the target node + predicate_uri (str): The URI of the predicate + """ + + id: str + source: str + target: str + predicate_uri: str + + def to_dict(self): + return { + "id": self.id, + "source": self.source, + "target": self.target, + "predicate_uri": self.predicate_uri, + } + + @classmethod + def from_dict(cls, data): + if "id" not in data: + raise ValueError("id is required") + if "source" not in data: + raise ValueError("source is required") + if "target" not in data: + raise ValueError("target is required") + if "predicate_uri" not in data: + raise ValueError("predicate_uri is required") + return cls( + id=data["id"], + source=data["source"], + target=data["target"], + predicate_uri=data["predicate_uri"], + ) + + +@dataclass(kw_only=True) +class MappingGraph: + """ + A mapping graph. + + Attributes: + uuid (str): The UUID of the graph + nodes (list[MappingNode]): The nodes in the graph + edges (list[MappingEdge]): The edges in the graph + """ + + uuid: str + source_id: str + nodes: list[ + MappingNode | MappingLiteral | MappingURIRef + ] + edges: list[MappingEdge] + + def to_dict(self): + return { + "uuid": self.uuid, + "source_id": self.source_id, + "nodes": [ + node.to_dict() for node in self.nodes + ], + "edges": [ + edge.to_dict() for edge in self.edges + ], + } + + @classmethod + def from_dict(cls, data): + if "uuid" not in data: + raise ValueError("uuid is required") + if "source_id" not in data: + raise ValueError("source_id is required") + if "nodes" not in data: + raise ValueError("nodes is required") + if "edges" not in data: + raise ValueError("edges is required") + return cls( + uuid=data["uuid"], + source_id=data["source_id"], + nodes=[ + MappingNode.from_dict(node) + if node["type"] == MappingNodeType.ENTITY + else MappingLiteral.from_dict(node) + if node["type"] == MappingNodeType.LITERAL + else MappingURIRef.from_dict(node) + for node in data["nodes"] + ], + edges=[ + MappingEdge.from_dict(edge) + for edge in data["edges"] + ], + ) diff --git a/server/models/source.py b/server/models/source.py new file mode 100644 index 0000000..b3d0e06 --- /dev/null +++ b/server/models/source.py @@ -0,0 +1,80 @@ +from dataclasses import dataclass +from enum import StrEnum + + +class SourceType(StrEnum): + """ + Enumeration of the types of sources. + """ + + CSV = "csv" + JSON = "json" + + +@dataclass +class Source: + """ + Data class representing a source. + + Attributes: + - type: SourceType + The type of the source. + - references: list[str] + The list of references that the source has. These references used for assisting user during the mapping process. + For example, if the source is a CSV file, the references can be the column names + - file_uuid: str + The UUID of the file. Depending on the type, this can point to a file or connection args to a database. + - extra: dict + Extra information that can be used for the source + """ + + uuid: str + type: SourceType + references: list[str] + file_uuid: str + extra: dict + + def to_dict(self) -> dict: + """ + Convert the object to a dictionary. + + Returns: + dict: Dictionary representation of the object + """ + return { + "uuid": self.uuid, + "type": self.type, + "references": self.references, + "file_uuid": self.file_uuid, + "extra": self.extra if self.extra else {}, + } + + @staticmethod + def from_dict(data: dict) -> "Source": + """ + Create a Source object from a dictionary. + + Args: + data (dict): Dictionary containing the data + + Returns: + Source: Source object + """ + + if "uuid" not in data: + raise ValueError("Missing 'uuid' in data") + + if "type" not in data: + raise ValueError("Missing 'type' in data") + if "references" not in data: + raise ValueError("Missing 'references' in data") + if "file_uuid" not in data: + raise ValueError("Missing 'file_uuid' in data") + + return Source( + uuid=data["uuid"], + type=SourceType(data["type"]), + references=data["references"], + file_uuid=data["file_uuid"], + extra=data.get("extra", {}), + ) diff --git a/server/models/workspace.py b/server/models/workspace.py index 6e0c22e..7879199 100644 --- a/server/models/workspace.py +++ b/server/models/workspace.py @@ -17,7 +17,6 @@ class WorkspaceModel: type (WorkspaceType): type of the workspace location (str): location of the workspace enabled_features (list[str]): list of enabled features - sources (list[str]): ids of sources mappings (list[str]): ids of mappings ontologies (list[str]): ids of ontologies prefixes (dict[str, str]): prefixes @@ -30,7 +29,6 @@ class WorkspaceModel: type: WorkspaceType location: str enabled_features: list[str] - sources: list[str] mappings: list[str] prefixes: dict[str, str] ontologies: list[str] @@ -65,7 +63,6 @@ def create_with_defaults( type=type, location=location, enabled_features=[], - sources=[], mappings=[], prefixes={}, ontologies=[], @@ -86,7 +83,6 @@ def to_dict(self) -> dict: "type": self.type.value, "location": self.location, "enabled_features": self.enabled_features, - "sources": self.sources, "mappings": self.mappings, "prefixes": self.prefixes, "ontologies": self.ontologies, @@ -111,7 +107,6 @@ def from_dict(cls, data: dict) -> "WorkspaceModel": type=WorkspaceType(data["type"]), location=data["location"], enabled_features=data["enabled_features"], - sources=data["sources"], mappings=data["mappings"], prefixes=data["prefixes"], ontologies=data["ontologies"], @@ -125,7 +120,6 @@ def copy_with( type: WorkspaceType | None = None, location: str | None = None, enabled_features: list[str] | None = None, - sources: list[str] | None = None, mappings: list[str] | None = None, prefixes: dict[str, str] | None = None, ontologies: list[str] | None = None, @@ -140,7 +134,6 @@ def copy_with( type (WorkspaceType): type of the workspace location (str): location of the workspace enabled_features (list[str]): list of enabled features - sources (list[str]): ids of sources mappings (list[str]): ids of mappings prefixes (dict[str, str]): prefixes ontologies (list[str]): ids of ontologies @@ -163,9 +156,6 @@ def copy_with( enabled_features=enabled_features if enabled_features is not None else self.enabled_features, - sources=sources - if sources is not None - else self.sources, mappings=mappings if mappings is not None else self.mappings, diff --git a/server/service_protocols/mapping_service_protocol/__init__.py b/server/service_protocols/mapping_service_protocol/__init__.py new file mode 100644 index 0000000..95dceea --- /dev/null +++ b/server/service_protocols/mapping_service_protocol/__init__.py @@ -0,0 +1,70 @@ +from abc import ABC, abstractmethod + +from server.models.mapping import MappingGraph + + +class MappingServiceProtocol(ABC): + @abstractmethod + def get_mapping(self, mapping_id: str) -> MappingGraph: + """ + Get a mapping by ID + + Args: + mapping_id (str): ID of the mapping + + Returns: + MappingGraph: Mapping + """ + pass + + @abstractmethod + def create_mapping(self, graph: MappingGraph) -> str: + """ + Create a new mapping + + Args: + graph (MappingGraph): Mapping + + Returns: + str: ID of the mapping + """ + pass + + @abstractmethod + def update_mapping( + self, mapping_id: str, graph: MappingGraph + ) -> None: + """ + Update a mapping + + Args: + mapping_id (str): ID of the mapping + graph (MappingGraph): Mapping + + Returns: + None + """ + pass + + @abstractmethod + def delete_mapping(self, mapping_id: str) -> None: + """ + Delete a mapping + + Args: + mapping_id (str): ID of the mapping + + Returns: + None + """ + pass + + @abstractmethod + def list_mappings(self) -> list[str]: + """ + List all mappings + + Returns: + list[str]: List of mapping IDs + """ + pass diff --git a/server/service_protocols/source_service_protocol/__init__.py b/server/service_protocols/source_service_protocol/__init__.py new file mode 100644 index 0000000..280b756 --- /dev/null +++ b/server/service_protocols/source_service_protocol/__init__.py @@ -0,0 +1,75 @@ +from abc import ABC, abstractmethod + +from server.models.source import Source, SourceType + + +class SourceServiceProtocol(ABC): + @abstractmethod + def get_source(self, source_id: str) -> Source: + """ + Get a source by ID + + Args: + source_id (str): ID of the source + + Returns: + Source: Source + """ + pass + + @abstractmethod + def download_source(self, source_id: str) -> bytes: + """ + Download a source + + Args: + source_id (str): ID of the source + + Returns: + bytes: Content of the source + """ + + @abstractmethod + def create_source( + self, type: SourceType, content: bytes + ) -> str: + """ + Create a new source + + Args: + type (SourceType): Type of the source + content (bytes): Content of the source, depending on the type this can be a file or connection args + + Returns: + str: ID of the source + """ + pass + + @abstractmethod + def update_source( + self, source_id: str, source: Source + ) -> None: + """ + Update a source + + Args: + source_id (str): ID of the source + source (Source): Source + + Returns: + None + """ + pass + + @abstractmethod + def delete_source(self, source_id: str) -> None: + """ + Delete a source + + Args: + source_id (str): ID of the source + + Returns: + None + """ + pass diff --git a/server/services/__init__.py b/server/services/__init__.py index de7b0bf..4496fe1 100644 --- a/server/services/__init__.py +++ b/server/services/__init__.py @@ -11,11 +11,19 @@ from server.services.local.local_ontology_service import ( LocalOntologyService, ) +from server.services.local.local_source_service import ( + LocalSourceService, +) +from server.services.local.local_workspace_service import ( + LocalWorkspaceService, +) __all__ = [ "ConfigService", "DBService", "WorkspaceMetadataService", + "LocalWorkspaceService", "LocalFSService", "LocalOntologyService", + "LocalSourceService", ] diff --git a/server/services/local/local_source_service.py b/server/services/local/local_source_service.py new file mode 100644 index 0000000..06dc21f --- /dev/null +++ b/server/services/local/local_source_service.py @@ -0,0 +1,196 @@ +import json +import logging +from uuid import uuid4 + +import jsonpath_ng +from kink import inject + +from server.exceptions import ErrCodes +from server.facades import ServerException +from server.models.source import Source, SourceType +from server.service_protocols.source_service_protocol import ( + SourceServiceProtocol, +) +from server.services.local.local_fs_service import ( + LocalFSService, +) +from server.utils.schema_extractor import SchemaExtractor + + +@inject(alias=SourceServiceProtocol) +class LocalSourceService(SourceServiceProtocol): + def __init__( + self, + fs_service: LocalFSService, + schema_extractor: SchemaExtractor, + ) -> None: + self.schema_extractor = schema_extractor + self.fs_service = fs_service + self.logger = logging.getLogger(__name__) + + def get_source(self, source_id: str) -> Source: + self.logger.info(f"Getting source {source_id}") + + try: + source_file_raw = ( + self.fs_service.download_file_with_uuid( + source_id + ) + ) + except ServerException as e: + if e.code == ErrCodes.FILE_NOT_FOUND: + self.logger.error( + f"Source {source_id} not found" + ) + raise ServerException( + f"Source {source_id} not found", + code=ErrCodes.SOURCE_NOT_FOUND, + ) + self.logger.error( + f"Failed to get source {source_id}" + ) + raise e + except Exception as e: + self.logger.error( + f"Unexpected error while getting source {source_id}", + exc_info=e, + ) + raise ServerException( + "Unexpected error", + ErrCodes.UNKNOWN_ERROR, + ) + + return Source.from_dict( + json.loads(source_file_raw.decode("utf-8")) + ) + + def download_source(self, source_id: str) -> bytes: + self.logger.info(f"Getting source {source_id}") + source = self.get_source(source_id) + source_file_raw = ( + self.fs_service.download_file_with_uuid( + source.file_uuid + ) + ) + return source_file_raw + + def adjust_json_source( + self, content: bytes, json_path: str + ) -> bytes: + try: + json_path_exp = jsonpath_ng.parse(json_path) + json_data = json.loads(content.decode("utf-8")) + # This should match to single array element + matches = json_path_exp.find(json_data) + if len(matches) == 0: + raise ServerException( + "No matches found for the given json path", + ErrCodes.JSON_PATH_DATA_NOT_FOUND, + ) + if len(matches) > 1: + raise ServerException( + "Multiple matches found for the given json path", + ErrCodes.JSON_PATH_DATA_NOT_UNIQUE, + ) + if not isinstance(matches[0].value, list): + raise ServerException( + "The json path does not point to an array", + ErrCodes.JSON_PATH_NOT_ARRAY, + ) + return json.dumps(matches[0].value).encode( + "utf-8" + ) + except json.JSONDecodeError as e: + self.logger.error( + "Failed to decode the json content", + exc_info=e, + ) + raise ServerException( + "Failed to decode the json content", + ErrCodes.FILE_CORRUPTED, + ) + + def create_source( + self, + type: SourceType, + content: bytes, + extra: dict = {}, + ) -> str: + self.logger.info(f"Creating source of type {type}") + + if type == SourceType.JSON: + if "json_path" not in extra: + raise ServerException( + "JSON path is required for JSON source", + ErrCodes.JSON_PATH_NOT_PROVIDED, + ) + content = self.adjust_json_source( + content, extra["json_path"] + ) + + try: + references = ( + self.schema_extractor.extract_schema( + content, type + ) + ) + except KeyError: + self.logger.error( + f"Unsupported file type {type}", + ) + raise ServerException( + "Unsupported file type", + ErrCodes.UNSUPPORTED_FILE_TYPE, + ) + except Exception as e: + self.logger.error( + "Unexpected error while extracting schema", + exc_info=e, + ) + raise ServerException( + "Unexpected error", + ErrCodes.UNKNOWN_ERROR, + ) + self.logger.info( + f"Extracted references: {references}" + ) + source_uuid = str(uuid4()) + file_metadata = self.fs_service.upload_file( + f"{source_uuid}_file", + content, + ) + self.logger.info( + f"Uploaded file with uuid {file_metadata.uuid}" + ) + + source = Source( + uuid=source_uuid, + type=type, + references=references, + file_uuid=file_metadata.uuid, + extra={}, + ) + + self.fs_service.upload_file( + source_uuid, + json.dumps(source.to_dict()).encode("utf-8"), + uuid=source_uuid, + ) + + self.logger.info(f"Created source {source_uuid}") + + return source_uuid + + def update_source( + self, source_id: str, source: Source + ) -> None: + raise NotImplementedError() + + def delete_source(self, source_id: str) -> None: + self.logger.info(f"Deleting source {source_id}") + source = self.get_source(source_id) + self.fs_service.delete_file_with_uuid( + source.file_uuid + ) + self.fs_service.delete_file_with_uuid(source_id) + self.logger.info(f"Deleted source {source_id}") diff --git a/server/utils/schema_extractor/__init__.py b/server/utils/schema_extractor/__init__.py new file mode 100644 index 0000000..5bf9005 --- /dev/null +++ b/server/utils/schema_extractor/__init__.py @@ -0,0 +1,45 @@ +from kink import inject + +from utils.schema_extractor.i_schema_extractor import ( + ISchemaExtractor, +) +from utils.schema_extractor.json_schema_extractor import ( + JSONSchemaExtractor, # noqa: F401 +) +from utils.schema_extractor.tabular_schema_extractor import ( + TabularSchemaExtractor, # noqa: F401 +) + + +@inject +class SchemaExtractor: + def __init__( + self, schema_extractors: list[ISchemaExtractor] + ) -> None: + self.type_mapping: dict[str, ISchemaExtractor] = {} + for schema_extractor in schema_extractors: + for file_type in schema_extractor.file_types: + if file_type in self.type_mapping: + raise KeyError( + f"File type {file_type} is already registered to" + f" {self.type_mapping[file_type].name} / Can't register" + f" to {schema_extractor.name} as well" + ) + self.type_mapping[file_type] = ( + schema_extractor + ) + + def extract_schema( + self, + file: bytes, + file_extension: str, + ): + if file_extension not in self.type_mapping: + raise KeyError( + f"File type {file_extension} is not registered to any schema" + " extractor" + ) + + return self.type_mapping[ + file_extension + ].extract_schema(file, file_extension, "") diff --git a/server/utils/schema_extractor/i_schema_extractor.py b/server/utils/schema_extractor/i_schema_extractor.py new file mode 100644 index 0000000..2f06613 --- /dev/null +++ b/server/utils/schema_extractor/i_schema_extractor.py @@ -0,0 +1,20 @@ +from abc import ABC, abstractmethod + + +class ISchemaExtractor(ABC): + def __init__( + self, + name: str, + file_types: list[str], + ): + self.name = name + self.file_types = file_types + + @abstractmethod + def extract_schema( + self, + file: bytes, + file_extension: str, + name_prefix: str, + ) -> list[str]: + pass diff --git a/server/utils/schema_extractor/json_schema_extractor.py b/server/utils/schema_extractor/json_schema_extractor.py new file mode 100644 index 0000000..ad8ed69 --- /dev/null +++ b/server/utils/schema_extractor/json_schema_extractor.py @@ -0,0 +1,65 @@ +import json +from io import BytesIO + +from kink import inject +from utils.schema_extractor.i_schema_extractor import ( + ISchemaExtractor, +) + + +@inject(alias=ISchemaExtractor) +class JSONSchemaExtractor(ISchemaExtractor): + def __init__( + self, + temp_storage: str, + ): + super().__init__("JSON Schema Extractor", ["json"]) + + def read_file(self, file: bytes) -> list[dict]: + with BytesIO(file) as f: + data = json.load(f) + + if isinstance(data, dict): + raise ValueError( + "The root path you provided does not return an array of" + " objects" + ) + + return data + + def getPaths(self, obj, parent="") -> set: + result = set() + if isinstance(obj, dict): + for key, value in obj.items(): + result.update( + self.getPaths( + value, parent + "." + str(key) + ) + ) + elif isinstance(obj, list): + for i, value in enumerate(obj): + result.update( + self.getPaths( + value, parent + "[" + str(i) + "]" + ) + ) + else: + result.add(parent) + return result + + def extract_schema( + self, + file: bytes, + file_extension: str, + name_prefix: str, + ): + data = self.read_file(file) + + sets = [self.getPaths(obj, "") for obj in data] + + return list( + map( + lambda x: "{}{}".format(name_prefix, x[1:]), + set.union(*sets), + ) + ) diff --git a/server/utils/schema_extractor/tabular_schema_extractor.py b/server/utils/schema_extractor/tabular_schema_extractor.py new file mode 100644 index 0000000..9038d10 --- /dev/null +++ b/server/utils/schema_extractor/tabular_schema_extractor.py @@ -0,0 +1,69 @@ +from io import BytesIO +from kink import inject + +import pandas as pd + +from utils.schema_extractor.i_schema_extractor import ( + ISchemaExtractor, +) + + +@inject(alias=ISchemaExtractor) +class TabularSchemaExtractor(ISchemaExtractor): + def __init__( + self, + temp_storage: str, + ): + super().__init__( + "Tabular Schema Extractor", + [ + "csv", + "tsv", + "xls", + "xlsx", + ], + ) + + def read_file( + self, file: bytes, file_extension: str + ) -> pd.DataFrame: + if file_extension == "csv": + return pd.read_csv( + BytesIO(file), + ) + + if file_extension == "tsv": + return pd.read_csv( + BytesIO(file), + sep="\t", + ) + + if ( + file_extension == "xls" + or file_extension == "xlsx" + ): + df = pd.read_excel( + BytesIO(file), + ) + + if len(df.sheet_names) > 1: + raise ValueError( + "Multiple sheets are not supported yet" + ) + + return df + + raise KeyError("File extension not supported") + + def extract_schema( + self, + file: bytes, + file_extension: str, + name_prefix: str, + ): + df = self.read_file(file, file_extension) + + return [ + "{}{}".format(name_prefix, column_name) + for column_name in df.columns + ] diff --git a/uv.lock b/uv.lock index 1cd26a0..b4a89b3 100644 --- a/uv.lock +++ b/uv.lock @@ -346,6 +346,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/31/80/3a54838c3fb461f6fec263ebf3a3a41771bd05190238de3486aae8540c36/jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d", size = 133271 }, ] +[[package]] +name = "jsonpath-ng" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ply" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/86/08646239a313f895186ff0a4573452038eed8c86f54380b3ebac34d32fb2/jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c", size = 37838 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/5a/73ecb3d82f8615f32ccdadeb9356726d6cae3a4bbc840b437ceb95708063/jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6", size = 30105 }, +] + [[package]] name = "kink" version = "0.8.1" @@ -477,6 +489,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ed/8c/87ddf1fcb55d11f9f847e3c69bb1c6f8e46e2f40ab1a2d2abadb2401b007/pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5", size = 11617166 }, ] +[[package]] +name = "ply" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3", size = 159130 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/58/35da89ee790598a0700ea49b2a66594140f44dec458c07e8e3d4979137fc/ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce", size = 49567 }, +] + [[package]] name = "protobuf" version = "5.28.3" @@ -752,6 +773,7 @@ dependencies = [ { name = "idna" }, { name = "importlib-metadata" }, { name = "jinja2" }, + { name = "jsonpath-ng" }, { name = "kink" }, { name = "markdown-it-py" }, { name = "markupsafe" }, @@ -829,6 +851,7 @@ requires-dist = [ { name = "idna", specifier = "==3.10" }, { name = "importlib-metadata", specifier = "==8.5.0" }, { name = "jinja2", specifier = "==3.1.4" }, + { name = "jsonpath-ng", specifier = ">=1.7.0" }, { name = "kink", specifier = "==0.8.1" }, { name = "markdown-it-py", specifier = "==3.0.0" }, { name = "markupsafe", specifier = "==3.0.2" },