From 92d0fe10f3ad5380d3786e5170aced1200aa65c7 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 11 May 2023 07:20:14 -0700 Subject: [PATCH 001/135] first commit --- schematic/configuration/__init__.py | 0 schematic/configuration/configuration.py | 155 ++++++++++++++++++++ schematic/configuration/dataclasses.py | 172 +++++++++++++++++++++++ 3 files changed, 327 insertions(+) create mode 100644 schematic/configuration/__init__.py create mode 100644 schematic/configuration/configuration.py create mode 100644 schematic/configuration/dataclasses.py diff --git a/schematic/configuration/__init__.py b/schematic/configuration/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py new file mode 100644 index 000000000..bed338130 --- /dev/null +++ b/schematic/configuration/configuration.py @@ -0,0 +1,155 @@ +"""Configuration singleton for the Schematic Package""" + +from typing import Any, Optional +import os +import logging +import yaml +from .dataclasses import ( + FileNameConfig, + SynapseConfig, + ManifestConfig, + ModelConfig, + GoogleConfig, +) + +# Create a logger for the configuration class +logger = logging.getLogger(__name__) + + +class Configuration: + """ + This class is used as a singleton by the rest of the package. + It is instantiated only once at the bottom of this file, and that + instance is imported by other modules + """ + + def __init__(self) -> None: + self.config_path: Optional[str] = None + self._file_name_config = FileNameConfig() + self._synapse_config = SynapseConfig() + self._manifest_config = ManifestConfig() + self._model_config = ModelConfig() + self._google_config = GoogleConfig() + + def load_config(self, config_path: str) -> None: + """Loads a user created config file and overwrites any defaults listed in the file + + Args: + config_path (str): The path to the config file + """ + config_path = os.path.expanduser(config_path) + config_path = os.path.abspath(config_path) + self.config_path = config_path + with open(config_path, "r", encoding="utf-8") as file: + data = yaml.safe_load(file) + self._file_name_config = FileNameConfig(**data.get("definitions", {})) + self._synapse_config = SynapseConfig(**data.get("synapse", {})) + self._manifest_config = ManifestConfig(**data.get("manifest", {})) + self._model_config = ModelConfig(**data.get("model", {})) + self._google_config = GoogleConfig(**data.get("google", {})) + + def _normalize_path(self, path: str) -> str: + """ + + Args: + path (str): The path to normalize + + Returns: + str: The normalized path + """ + + if self.config_path: + # Retrieve parent directory of the config to decode relative paths + parent_dir = os.path.dirname(self.config_path) + else: + # assume the parent dir would be the current work dir + parent_dir = os.getcwd() + + # Ensure absolute file paths + if not os.path.isabs(path): + path = os.path.join(parent_dir, path) + # And lastly, normalize file paths + return os.path.normpath(path) + + def _log_config_value_access( + self, value_name: str, config_value: Any + ) -> None: + """Logs when a configuration value is being accessed + + Args: + value_name (str): The name of the value to log + config_value (Any): The value from the configuration + """ + logger.info( + "The '%s' value is being taken from the user specified configuration file: '%s'.", + value_name, + config_value, + ) + + @property + def service_account_credentials_path(self) -> str: + """ + Returns: + str: + """ + value = self._normalize_path(self._file_name_config.service_acct_creds) + self._log_config_value_access("service_account_credentials_path", value) + return value + + @property + def synapse_configuration_path(self) -> str: + """ + Returns: + str: The path to the synapse configuration file + """ + value = self._normalize_path(self._file_name_config.synapse_config) + self._log_config_value_access("synapse_configuration_path", value) + return value + + @property + def google_required_background_color(self) -> dict[str, float]: + """ + Returns: + dict[str, float]: + """ + value = { + "red": 0.9215, + "green": 0.9725, + "blue": 0.9803, + } + self._log_config_value_access("google_required_background_color", value) + return value + + @property + def google_optional_background_color(self) -> dict[str, float]: + """ + Returns: + dict[str, float]: + """ + value = { + "red": 1.0, + "green": 1.0, + "blue": 0.9019, + } + self._log_config_value_access("google_required_background_color", value) + return value + + @property + def synapse_master_file_view_id(self) -> str: + """ + Returns: + str: The Synapse ID of the master file view + """ + value = self._synapse_config.master_fileview + self._log_config_value_access("synapse_master_fileview", value) + return value + + @property + def synapse_service_account_credentials_id(self) -> str: + """ + Returns: + str: + """ + value = self._synapse_config.service_acct_creds + self._log_config_value_access("synapse_service_account_credentials_id", value) + return value diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py new file mode 100644 index 000000000..ac4d7c6f6 --- /dev/null +++ b/schematic/configuration/dataclasses.py @@ -0,0 +1,172 @@ +"""Pydantic dataclasses""" + +import re +from pydantic.dataclasses import dataclass +from pydantic import validator + + +@dataclass() +class FileNameConfig: + """ + synapse_config: the name of the synapse config file + service_acct_creds: + """ + + synapse_config: str = ".synapseConfig" + service_acct_creds: str = "schematic_service_account_creds.json" + + @validator("synapse_config", "service_acct_creds") + @classmethod + def validate_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (str): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if len(value) == 0: + raise ValueError(f"{value} is an empty string") + return value + + +@dataclass() +class SynapseConfig: + """ + master_fileview: Synapse id for the master file view + service_acct_creds: + manifest_folder: name of the folder manifests will be saved to locally + manifest_basename: the name of downloaded manifest files + """ + + master_fileview: str = "syn23643253" + service_acct_creds: str = "syn25171627" + manifest_folder: str = "manifests" + manifest_basename: str = "synapse_storage_manifest" + + @validator("master_fileview", "service_acct_creds") + @classmethod + def validate_synapse_id(cls, value: str) -> str: + """Check if string is a valid synapse id + + Args: + value (str): A string + + Raises: + ValueError: If the value isn't a valid Synapse id + + Returns: + (str): The input value + """ + if not re.search("^syn[0-9]+", value): + raise ValueError(f"{value} is not a valid Synapse id") + return value + + @validator("manifest_folder", "manifest_basename") + @classmethod + def validate_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (str): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if len(value) == 0: + raise ValueError(f"{value} is an empty string") + return value + + +@dataclass() +class ManifestConfig: + """ + title: + data_type: + """ + + title: str = "example" + data_type: list[str] = ["Biospecimen", "Patient"] + + @validator("title") + @classmethod + def validate_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (str): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if len(value) == 0: + raise ValueError(f"{value} is an empty string") + return value + + +@dataclass() +class ModelConfig: + """ + location: location of the schema jsonld, either a path, ro url + file_type: one of ["local"] + """ + + location: str = "tests/data/example.model.jsonld" + file_type: str = "local" + + @validator("title", "file_type") + @classmethod + def validate_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (str): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if len(value) == 0: + raise ValueError(f"{value} is an empty string") + return value + + +@dataclass() +class GoogleConfig: + """ + master_template_id: + strict_validation: + """ + + master_template_id: str = "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + strict_validation: bool = True + + @validator("master_template_id") + @classmethod + def validate_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (str): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if len(value) == 0: + raise ValueError(f"{value} is an empty string") + return value From 1e3762c8ecb81488b3998c86122cca07cd2e7e66 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 11 May 2023 08:18:26 -0700 Subject: [PATCH 002/135] temp commit --- schematic/configuration2/__init__.py | 0 schematic/configuration2/configuration.py | 201 ++++++++++++++++++++++ schematic/configuration2/dataclasses.py | 165 ++++++++++++++++++ 3 files changed, 366 insertions(+) create mode 100644 schematic/configuration2/__init__.py create mode 100644 schematic/configuration2/configuration.py create mode 100644 schematic/configuration2/dataclasses.py diff --git a/schematic/configuration2/__init__.py b/schematic/configuration2/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/schematic/configuration2/configuration.py b/schematic/configuration2/configuration.py new file mode 100644 index 000000000..263cde3fc --- /dev/null +++ b/schematic/configuration2/configuration.py @@ -0,0 +1,201 @@ +"""Configuration singleton for the Schematic Package""" + +from typing import Optional +import os +import yaml +from .dataclasses import ( + SynapseConfig, + ManifestConfig, + ModelConfig, + GoogleSheetsConfig, +) + + +class Configuration: + """ + This class is used as a singleton by the rest of the package. + It is instantiated only once at the bottom of this file, and that + instance is imported by other modules + """ + + def __init__(self) -> None: + self.config_path: Optional[str] = None + self._synapse_config = SynapseConfig() + self._manifest_config = ManifestConfig() + self._model_config = ModelConfig() + self._google_sheets_config = GoogleSheetsConfig() + + def load_config(self, config_path: str) -> None: + """Loads a user created config file and overwrites any defaults listed in the file + + Args: + config_path (str): The path to the config file + """ + config_path = os.path.expanduser(config_path) + config_path = os.path.abspath(config_path) + self.config_path = config_path + with open(config_path, "r", encoding="utf-8") as file: + data = yaml.safe_load(file) + self._synapse_config = SynapseConfig( + **data.get("asset_store", {}).get("synapse", {}) + ) + self._manifest_config = ManifestConfig(**data.get("manifest", {})) + self._model_config = ModelConfig(**data.get("model", {})) + self._google_sheets_config = GoogleSheetsConfig(**data.get("google_sheets", {})) + + def _normalize_path(self, path: str) -> str: + """ + + Args: + path (str): The path to normalize + + Returns: + str: The normalized path + """ + + if self.config_path: + # Retrieve parent directory of the config to decode relative paths + parent_dir = os.path.dirname(self.config_path) + else: + # assume the parent dir would be the current work dir + parent_dir = os.getcwd() + + # Ensure absolute file paths + if not os.path.isabs(path): + path = os.path.join(parent_dir, path) + # And lastly, normalize file paths + return os.path.normpath(path) + + @property + def synapse_configuration_path(self) -> str: + """ + Returns: + str: The path to the synapse configuration file + """ + return self._normalize_path(self._synapse_config.config_basename) + + @property + def synapse_manifest_basename(self) -> str: + """ + Returns: + str: + """ + return self._synapse_config.manifest_basename + + @property + def synapse_master_fileview_id(self) -> str: + """ + Returns: + str: + """ + return self._synapse_config.master_fileview_id + + @synapse_master_fileview_id.setter + def synapse_master_fileview_id(self, synapse_id: str) -> None: + """Sets the synapse_master_fileview_id + + Args: + synapse_id (str): The synapse id to set + """ + self._synapse_config.master_fileview_id = synapse_id + + @property + def synapse_manifest_folder(self) -> str: + """ + Returns: + str: + """ + return self._synapse_config.manifest_folder + + @property + def manifest_title(self) -> str: + """ + Returns: + str: + """ + return self._manifest_config.title + + @property + def manifest_data_type(self) -> list[str]: + """ + Returns: + list[str]: + """ + return self._manifest_config.data_type + + @property + def model_location(self) -> str: + """ + Returns: + str: + """ + return self._model_config.location + + @property + def model_file_type(self) -> str: + """ + Returns: + str: + """ + return self._model_config.file_type + + @property + def service_account_credentials_synapse_id(self) -> str: + """ + Returns: + str: + """ + return self._google_sheets_config.service_acct_creds_synapse_id + + @property + def service_account_credentials_path(self) -> str: + """ + Returns: + str: + """ + return self._normalize_path( + self._google_sheets_config.service_acct_creds_basename + ) + + @property + def google_sheets_master_template_id(self) -> str: + """ + Returns: + str: + """ + return self._google_sheets_config.master_template_id + + @property + def google_sheets_strict_validation(self) -> bool: + """ + Returns: + bool: + """ + return self._google_sheets_config.strict_validation + + @property + def google_required_background_color(self) -> dict[str, float]: + """ + Returns: + dict[str, float]: + """ + return { + "red": 0.9215, + "green": 0.9725, + "blue": 0.9803, + } + + @property + def google_optional_background_color(self) -> dict[str, float]: + """ + Returns: + dict[str, float]: + """ + return { + "red": 1.0, + "green": 1.0, + "blue": 0.9019, + } + +# This instantiates the singleton for the rest of the package +CONFIG = Configuration() diff --git a/schematic/configuration2/dataclasses.py b/schematic/configuration2/dataclasses.py new file mode 100644 index 000000000..772639de6 --- /dev/null +++ b/schematic/configuration2/dataclasses.py @@ -0,0 +1,165 @@ +"""Pydantic dataclasses""" + +import re +from pydantic.dataclasses import dataclass +from pydantic import validator + + +@dataclass() +class SynapseConfig: + """ + config_basename: teh basename of the synapse config file + manifest_basename: the name of downloaded manifest files + master_fileview_id: Synapse id for the master file view + manifest_folder: name of the folder manifests will be saved to locally + """ + + config_basename: str = "config_basename" + manifest_basename: str = "synapse_storage_manifest" + master_fileview_id: str = "syn23643253" + manifest_folder: str = "manifests" + + @validator("master_fileview_id") + @classmethod + def validate_synapse_id(cls, value: str) -> str: + """Check if string is a valid synapse id + + Args: + value (str): A string + + Raises: + ValueError: If the value isn't a valid Synapse id + + Returns: + (str): The input value + """ + if not re.search("^syn[0-9]+", value): + raise ValueError(f"{value} is not a valid Synapse id") + return value + + @validator("config_basename", "manifest_basename", "manifest_folder") + @classmethod + def validate_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (str): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if len(value) == 0: + raise ValueError(f"{value} is an empty string") + return value + + +@dataclass() +class ManifestConfig: + """ + title: + data_type: + """ + + title: str = "example" + data_type: list[str] = ["Biospecimen", "Patient"] + + @validator("title") + @classmethod + def validate_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (str): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if len(value) == 0: + raise ValueError(f"{value} is an empty string") + return value + + +@dataclass() +class ModelConfig: + """ + location: location of the schema jsonld, either a path, ro url + file_type: one of ["local"] + """ + + location: str = "tests/data/example.model.jsonld" + file_type: str = "local" + + @validator("title", "file_type") + @classmethod + def validate_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (str): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if len(value) == 0: + raise ValueError(f"{value} is an empty string") + return value + + +@dataclass() +class GoogleSheetsConfig: + """ + master_template_id: + strict_validation: + service_acct_creds_synapse_id: + service_acct_creds_basename: + """ + + service_acct_creds_synapse_id: str = "syn25171627" + service_acct_creds_basename: str = "schematic_service_account_creds.json" + master_template_id: str = "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + strict_validation: bool = True + + @validator("master_template_id", "service_acct_creds_basename") + @classmethod + def validate_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (str): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if len(value) == 0: + raise ValueError(f"{value} is an empty string") + return value + + @validator("service_acct_creds_synapse_id") + @classmethod + def validate_synapse_id(cls, value: str) -> str: + """Check if string is a valid synapse id + + Args: + value (str): A string + + Raises: + ValueError: If the value isn't a valid Synapse id + + Returns: + (str): The input value + """ + if not re.search("^syn[0-9]+", value): + raise ValueError(f"{value} is not a valid Synapse id") + return value From bab5b186039ac58f2f9a701cd9c7e85d545776c6 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 11 May 2023 08:18:53 -0700 Subject: [PATCH 003/135] temp move files --- schematic/configuration/__init__.py | 0 schematic/configuration/configuration.py | 155 -------------------- schematic/configuration/dataclasses.py | 172 ----------------------- 3 files changed, 327 deletions(-) delete mode 100644 schematic/configuration/__init__.py delete mode 100644 schematic/configuration/configuration.py delete mode 100644 schematic/configuration/dataclasses.py diff --git a/schematic/configuration/__init__.py b/schematic/configuration/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py deleted file mode 100644 index bed338130..000000000 --- a/schematic/configuration/configuration.py +++ /dev/null @@ -1,155 +0,0 @@ -"""Configuration singleton for the Schematic Package""" - -from typing import Any, Optional -import os -import logging -import yaml -from .dataclasses import ( - FileNameConfig, - SynapseConfig, - ManifestConfig, - ModelConfig, - GoogleConfig, -) - -# Create a logger for the configuration class -logger = logging.getLogger(__name__) - - -class Configuration: - """ - This class is used as a singleton by the rest of the package. - It is instantiated only once at the bottom of this file, and that - instance is imported by other modules - """ - - def __init__(self) -> None: - self.config_path: Optional[str] = None - self._file_name_config = FileNameConfig() - self._synapse_config = SynapseConfig() - self._manifest_config = ManifestConfig() - self._model_config = ModelConfig() - self._google_config = GoogleConfig() - - def load_config(self, config_path: str) -> None: - """Loads a user created config file and overwrites any defaults listed in the file - - Args: - config_path (str): The path to the config file - """ - config_path = os.path.expanduser(config_path) - config_path = os.path.abspath(config_path) - self.config_path = config_path - with open(config_path, "r", encoding="utf-8") as file: - data = yaml.safe_load(file) - self._file_name_config = FileNameConfig(**data.get("definitions", {})) - self._synapse_config = SynapseConfig(**data.get("synapse", {})) - self._manifest_config = ManifestConfig(**data.get("manifest", {})) - self._model_config = ModelConfig(**data.get("model", {})) - self._google_config = GoogleConfig(**data.get("google", {})) - - def _normalize_path(self, path: str) -> str: - """ - - Args: - path (str): The path to normalize - - Returns: - str: The normalized path - """ - - if self.config_path: - # Retrieve parent directory of the config to decode relative paths - parent_dir = os.path.dirname(self.config_path) - else: - # assume the parent dir would be the current work dir - parent_dir = os.getcwd() - - # Ensure absolute file paths - if not os.path.isabs(path): - path = os.path.join(parent_dir, path) - # And lastly, normalize file paths - return os.path.normpath(path) - - def _log_config_value_access( - self, value_name: str, config_value: Any - ) -> None: - """Logs when a configuration value is being accessed - - Args: - value_name (str): The name of the value to log - config_value (Any): The value from the configuration - """ - logger.info( - "The '%s' value is being taken from the user specified configuration file: '%s'.", - value_name, - config_value, - ) - - @property - def service_account_credentials_path(self) -> str: - """ - Returns: - str: - """ - value = self._normalize_path(self._file_name_config.service_acct_creds) - self._log_config_value_access("service_account_credentials_path", value) - return value - - @property - def synapse_configuration_path(self) -> str: - """ - Returns: - str: The path to the synapse configuration file - """ - value = self._normalize_path(self._file_name_config.synapse_config) - self._log_config_value_access("synapse_configuration_path", value) - return value - - @property - def google_required_background_color(self) -> dict[str, float]: - """ - Returns: - dict[str, float]: - """ - value = { - "red": 0.9215, - "green": 0.9725, - "blue": 0.9803, - } - self._log_config_value_access("google_required_background_color", value) - return value - - @property - def google_optional_background_color(self) -> dict[str, float]: - """ - Returns: - dict[str, float]: - """ - value = { - "red": 1.0, - "green": 1.0, - "blue": 0.9019, - } - self._log_config_value_access("google_required_background_color", value) - return value - - @property - def synapse_master_file_view_id(self) -> str: - """ - Returns: - str: The Synapse ID of the master file view - """ - value = self._synapse_config.master_fileview - self._log_config_value_access("synapse_master_fileview", value) - return value - - @property - def synapse_service_account_credentials_id(self) -> str: - """ - Returns: - str: - """ - value = self._synapse_config.service_acct_creds - self._log_config_value_access("synapse_service_account_credentials_id", value) - return value diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py deleted file mode 100644 index ac4d7c6f6..000000000 --- a/schematic/configuration/dataclasses.py +++ /dev/null @@ -1,172 +0,0 @@ -"""Pydantic dataclasses""" - -import re -from pydantic.dataclasses import dataclass -from pydantic import validator - - -@dataclass() -class FileNameConfig: - """ - synapse_config: the name of the synapse config file - service_acct_creds: - """ - - synapse_config: str = ".synapseConfig" - service_acct_creds: str = "schematic_service_account_creds.json" - - @validator("synapse_config", "service_acct_creds") - @classmethod - def validate_string_is_not_empty(cls, value: str) -> str: - """Check if string is not empty(has at least one char) - - Args: - value (str): A string - - Raises: - ValueError: If the value is zero characters long - - Returns: - (str): The input value - """ - if len(value) == 0: - raise ValueError(f"{value} is an empty string") - return value - - -@dataclass() -class SynapseConfig: - """ - master_fileview: Synapse id for the master file view - service_acct_creds: - manifest_folder: name of the folder manifests will be saved to locally - manifest_basename: the name of downloaded manifest files - """ - - master_fileview: str = "syn23643253" - service_acct_creds: str = "syn25171627" - manifest_folder: str = "manifests" - manifest_basename: str = "synapse_storage_manifest" - - @validator("master_fileview", "service_acct_creds") - @classmethod - def validate_synapse_id(cls, value: str) -> str: - """Check if string is a valid synapse id - - Args: - value (str): A string - - Raises: - ValueError: If the value isn't a valid Synapse id - - Returns: - (str): The input value - """ - if not re.search("^syn[0-9]+", value): - raise ValueError(f"{value} is not a valid Synapse id") - return value - - @validator("manifest_folder", "manifest_basename") - @classmethod - def validate_string_is_not_empty(cls, value: str) -> str: - """Check if string is not empty(has at least one char) - - Args: - value (str): A string - - Raises: - ValueError: If the value is zero characters long - - Returns: - (str): The input value - """ - if len(value) == 0: - raise ValueError(f"{value} is an empty string") - return value - - -@dataclass() -class ManifestConfig: - """ - title: - data_type: - """ - - title: str = "example" - data_type: list[str] = ["Biospecimen", "Patient"] - - @validator("title") - @classmethod - def validate_string_is_not_empty(cls, value: str) -> str: - """Check if string is not empty(has at least one char) - - Args: - value (str): A string - - Raises: - ValueError: If the value is zero characters long - - Returns: - (str): The input value - """ - if len(value) == 0: - raise ValueError(f"{value} is an empty string") - return value - - -@dataclass() -class ModelConfig: - """ - location: location of the schema jsonld, either a path, ro url - file_type: one of ["local"] - """ - - location: str = "tests/data/example.model.jsonld" - file_type: str = "local" - - @validator("title", "file_type") - @classmethod - def validate_string_is_not_empty(cls, value: str) -> str: - """Check if string is not empty(has at least one char) - - Args: - value (str): A string - - Raises: - ValueError: If the value is zero characters long - - Returns: - (str): The input value - """ - if len(value) == 0: - raise ValueError(f"{value} is an empty string") - return value - - -@dataclass() -class GoogleConfig: - """ - master_template_id: - strict_validation: - """ - - master_template_id: str = "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" - strict_validation: bool = True - - @validator("master_template_id") - @classmethod - def validate_string_is_not_empty(cls, value: str) -> str: - """Check if string is not empty(has at least one char) - - Args: - value (str): A string - - Raises: - ValueError: If the value is zero characters long - - Returns: - (str): The input value - """ - if len(value) == 0: - raise ValueError(f"{value} is an empty string") - return value From 6ef3f4a4d9d157f01769bffb89981b72f6cada90 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 11 May 2023 08:19:23 -0700 Subject: [PATCH 004/135] first commit --- config_example.yml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 config_example.yml diff --git a/config_example.yml b/config_example.yml new file mode 100644 index 000000000..95f6b6ad8 --- /dev/null +++ b/config_example.yml @@ -0,0 +1,22 @@ +asset_store: + synapse: + config_basename: ".synapseConfig" + manifest_basename: 'synapse_storage_manifest' + master_fileview_id: 'syn23643253' + manifest_folder: 'manifests' + +manifest: + title: 'example' + data_type: + - 'Biospecimen' + - 'Patient' + +model: + location: 'tests/data/example.model.jsonld' + file_type: 'local' + +google_sheets: + service_acct_creds_synapse_id: 'syn25171627' + service_acct_creds_basename: "schematic_service_account_creds.json" + master_template_id: '1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU' + strict_validation: true From fea71a57c6d423dfaca30aec2f6971a71c15a98c Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 11 May 2023 08:20:05 -0700 Subject: [PATCH 005/135] redo for new configuration class --- tests/test_configuration.py | 56 +++++++++++++++++++------------------ 1 file changed, 29 insertions(+), 27 deletions(-) diff --git a/tests/test_configuration.py b/tests/test_configuration.py index e3349c8de..549d9d91e 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -1,32 +1,34 @@ -import logging +"""Testing for Configuration module""" import pytest +from pydantic import ValidationError +from schematic.configuration.dataclasses import ( + SynapseConfig, + ManifestConfig, + ModelConfig, + GoogleSheetsConfig, +) -from schematic.configuration import Configuration -logging.basicConfig(level=logging.DEBUG) -logger = logging.getLogger(__name__) +class TestDataclasses: + """Testing for pydantic dataclasses""" + def test_synapse_config(self): + """Testing for SynapseConfig""" + assert isinstance(SynapseConfig(), SynapseConfig) + assert isinstance( + SynapseConfig( + config_basename="file_name", + manifest_basename="file_name", + master_fileview_id="syn1", + manifest_folder="folder_name", + ), + SynapseConfig, + ) -class TestConfiguration: - def test_load_yaml_valid(self, tmpdir): - mock_contents = """ - section: - key: value - """ - mock_file = tmpdir.join("mock.yml") - mock_file.write(mock_contents) - mock_object = {"section": {"key": "value"}} - - test_object = Configuration.load_yaml(str(mock_file)) - assert test_object == mock_object - - def test_load_yaml_invalid(self, tmpdir): - mock_contents = """ - section: - key: bad-value: - """ - mock_file = tmpdir.join("mock.yml") - mock_file.write(mock_contents) - - test_object = Configuration.load_yaml(str(mock_file)) - assert test_object is None + with pytest.raises(ValidationError): + SynapseConfig( + config_basename=None, + manifest_basename="file_name", + master_fileview_id="syn1", + manifest_folder="folder_name", + ) From e7c11649ed056562c21c1a1853e670ed064b5815 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 11 May 2023 09:27:22 -0700 Subject: [PATCH 006/135] all tests working, 100% coverage --- schematic/configuration2/dataclasses.py | 21 ++-- tests/data/test_config2.yml | 21 ++++ tests/test_configuration.py | 151 +++++++++++++++++++++++- 3 files changed, 182 insertions(+), 11 deletions(-) create mode 100644 tests/data/test_config2.yml diff --git a/schematic/configuration2/dataclasses.py b/schematic/configuration2/dataclasses.py index 772639de6..83c0a0365 100644 --- a/schematic/configuration2/dataclasses.py +++ b/schematic/configuration2/dataclasses.py @@ -1,11 +1,14 @@ """Pydantic dataclasses""" import re +from dataclasses import field from pydantic.dataclasses import dataclass -from pydantic import validator +from pydantic import validator, ConfigDict +# This turns on validation for value assignments after creation +pydantic_config = ConfigDict(validate_assignment=True) -@dataclass() +@dataclass(config=pydantic_config) class SynapseConfig: """ config_basename: teh basename of the synapse config file @@ -13,8 +16,8 @@ class SynapseConfig: master_fileview_id: Synapse id for the master file view manifest_folder: name of the folder manifests will be saved to locally """ - - config_basename: str = "config_basename" + validate_assignment = True + config_basename: str = ".synapseConfig" manifest_basename: str = "synapse_storage_manifest" master_fileview_id: str = "syn23643253" manifest_folder: str = "manifests" @@ -56,7 +59,7 @@ def validate_string_is_not_empty(cls, value: str) -> str: return value -@dataclass() +@dataclass(config=pydantic_config) class ManifestConfig: """ title: @@ -64,7 +67,7 @@ class ManifestConfig: """ title: str = "example" - data_type: list[str] = ["Biospecimen", "Patient"] + data_type: list[str] = field(default_factory= lambda: ["Biospecimen", "Patient"]) @validator("title") @classmethod @@ -85,7 +88,7 @@ def validate_string_is_not_empty(cls, value: str) -> str: return value -@dataclass() +@dataclass(config=pydantic_config) class ModelConfig: """ location: location of the schema jsonld, either a path, ro url @@ -95,7 +98,7 @@ class ModelConfig: location: str = "tests/data/example.model.jsonld" file_type: str = "local" - @validator("title", "file_type") + @validator("location", "file_type") @classmethod def validate_string_is_not_empty(cls, value: str) -> str: """Check if string is not empty(has at least one char) @@ -114,7 +117,7 @@ def validate_string_is_not_empty(cls, value: str) -> str: return value -@dataclass() +@dataclass(config=pydantic_config) class GoogleSheetsConfig: """ master_template_id: diff --git a/tests/data/test_config2.yml b/tests/data/test_config2.yml new file mode 100644 index 000000000..99b2d4546 --- /dev/null +++ b/tests/data/test_config2.yml @@ -0,0 +1,21 @@ +asset_store: + synapse: + config_basename: "file_name" + manifest_basename: 'file_name' + master_fileview_id: 'syn1' + manifest_folder: 'folder_name' + +manifest: + title: 'title' + data_type: + - 'data_type' + +model: + location: 'model.jsonld' + file_type: 'not_local' + +google_sheets: + service_acct_creds_synapse_id: 'syn1' + service_acct_creds_basename: "creds.json" + master_template_id: 'id' + strict_validation: false diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 549d9d91e..5f84ee9b0 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -1,18 +1,22 @@ """Testing for Configuration module""" + +import os import pytest from pydantic import ValidationError -from schematic.configuration.dataclasses import ( + +from schematic.configuration2.dataclasses import ( SynapseConfig, ManifestConfig, ModelConfig, GoogleSheetsConfig, ) +from schematic.configuration2.configuration import Configuration class TestDataclasses: """Testing for pydantic dataclasses""" - def test_synapse_config(self): + def test_synapse_config(self) -> None: """Testing for SynapseConfig""" assert isinstance(SynapseConfig(), SynapseConfig) assert isinstance( @@ -32,3 +36,146 @@ def test_synapse_config(self): master_fileview_id="syn1", manifest_folder="folder_name", ) + + with pytest.raises(ValidationError): + SynapseConfig( + config_basename="file_name", + manifest_basename="file_name", + master_fileview_id="syn", + manifest_folder="folder_name", + ) + + with pytest.raises(ValidationError): + SynapseConfig( + config_basename="", + manifest_basename="file_name", + master_fileview_id="syn", + manifest_folder="folder_name", + ) + + def test_manifest_config(self) -> None: + """Testing for ManifestConfig""" + assert isinstance(ManifestConfig(), ManifestConfig) + assert isinstance( + ManifestConfig(title="title", data_type=[]), + ManifestConfig, + ) + with pytest.raises(ValidationError): + ManifestConfig(title="title", data_type="type") + with pytest.raises(ValidationError): + ManifestConfig(title="", data_type="type") + + def test_model_config(self) -> None: + """Testing for ModelConfig""" + assert isinstance(ModelConfig(), ModelConfig) + assert isinstance( + ModelConfig(location="url", file_type="local"), + ModelConfig, + ) + with pytest.raises(ValidationError): + ModelConfig(location="", file_type="local") + + def test_google_sheets_config(self) -> None: + """Testing for ModelConfig""" + assert isinstance(GoogleSheetsConfig(), GoogleSheetsConfig) + assert isinstance( + GoogleSheetsConfig( + service_acct_creds_basename="file_name", + service_acct_creds_synapse_id="syn1", + master_template_id="id", + strict_validation=True, + ), + GoogleSheetsConfig, + ) + with pytest.raises(ValidationError): + GoogleSheetsConfig( + service_acct_creds_basename="file_name", + service_acct_creds_synapse_id="syn1", + master_template_id="id", + strict_validation="tru", + ) + with pytest.raises(ValidationError): + GoogleSheetsConfig( + service_acct_creds_basename="", + service_acct_creds_synapse_id="syn1", + master_template_id="id", + strict_validation=True, + ) + with pytest.raises(ValidationError): + GoogleSheetsConfig( + service_acct_creds_basename="file_name", + service_acct_creds_synapse_id="syn", + master_template_id="id", + strict_validation=True, + ) + + +class TestConfiguration: + """Testing Configuration class""" + + def test_init(self) -> None: + """Testing for Configuration.__init__""" + config = Configuration() + assert config.config_path is None + assert config.synapse_configuration_path != ".synapseConfig" + assert os.path.basename(config.synapse_configuration_path) == ".synapseConfig" + assert config.synapse_manifest_basename == "synapse_storage_manifest" + assert config.synapse_master_fileview_id == "syn23643253" + assert config.synapse_manifest_folder == "manifests" + assert config.manifest_title == "example" + assert config.manifest_data_type == ["Biospecimen", "Patient"] + assert config.model_location == "tests/data/example.model.jsonld" + assert config.model_file_type == "local" + assert config.service_account_credentials_synapse_id + assert ( + config.service_account_credentials_path + != "schematic_service_account_creds.json" + ) + assert ( + os.path.basename(config.service_account_credentials_path) + == "schematic_service_account_creds.json" + ) + assert ( + config.google_sheets_master_template_id + == "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + ) + assert config.google_sheets_strict_validation + assert config.google_required_background_color == { + "red": 0.9215, + "green": 0.9725, + "blue": 0.9803, + } + assert config.google_optional_background_color == { + "red": 1.0, + "green": 1.0, + "blue": 0.9019, + } + + def test_load_config(self) -> None: + """Testing for Configuration.load_config""" + config = Configuration() + config.load_config("tests/data/test_config2.yml") + assert os.path.basename(config.config_path) == "test_config2.yml" + assert os.path.basename(config.synapse_configuration_path) == "file_name" + assert config.synapse_manifest_basename == "file_name" + assert config.synapse_master_fileview_id == "syn1" + assert config.synapse_manifest_folder == "folder_name" + assert config.manifest_title == "title" + assert config.manifest_data_type == ["data_type"] + assert config.model_location == "model.jsonld" + assert config.model_file_type == "not_local" + assert config.service_account_credentials_synapse_id + assert ( + os.path.basename(config.service_account_credentials_path) == "creds.json" + ) + assert config.google_sheets_master_template_id == "id" + assert not config.google_sheets_strict_validation + + def test_set_synapse_master_fileview_id(self) -> None: + """Testing for Configuration synapse_master_fileview_id setter""" + config = Configuration() + assert config.synapse_master_fileview_id == "syn23643253" + config.synapse_master_fileview_id = "syn1" + assert config.synapse_master_fileview_id == "syn1" + with pytest.raises(ValidationError): + config.synapse_master_fileview_id = "syn" From b9f63831f58d159875a0b726930b9c4bff4f9dca Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 11 May 2023 09:30:10 -0700 Subject: [PATCH 007/135] removed old configuration files, renamed new ones --- schematic/configuration.py | 132 ------------------ .../__init__.py | 0 .../configuration.py | 0 .../dataclasses.py | 0 tests/data/test_config.yml | 32 +++-- tests/data/test_config2.yml | 21 --- tests/test_configuration.py | 8 +- 7 files changed, 21 insertions(+), 172 deletions(-) delete mode 100644 schematic/configuration.py rename schematic/{configuration2 => configuration}/__init__.py (100%) rename schematic/{configuration2 => configuration}/configuration.py (100%) rename schematic/{configuration2 => configuration}/dataclasses.py (100%) delete mode 100644 tests/data/test_config2.yml diff --git a/schematic/configuration.py b/schematic/configuration.py deleted file mode 100644 index b6ff6da02..000000000 --- a/schematic/configuration.py +++ /dev/null @@ -1,132 +0,0 @@ -from typing import Optional -import os -import yaml - - -class Configuration(object): - def __init__(self): - # path to config.yml file - self.CONFIG_PATH = None - # entire configuration data - self.DATA = None - - def __getattribute__(self, name): - value = super().__getattribute__(name) - if value is None and "SCHEMATIC_CONFIG_CONTENT" in os.environ: - self.load_config_content_from_env() - value = super().__getattribute__(name) - elif value is None and "SCHEMATIC_CONFIG" in os.environ: - self.load_config_from_env() - value = super().__getattribute__(name) - elif ( - value is None - and "SCHEMATIC_CONFIG" not in os.environ - and "SCHEMATIC_CONFIG_CONTENT" not in os.environ - ): - raise AttributeError( - "The '%s' configuration field was accessed, but it hasn't been " - "set yet, presumably because the schematic.CONFIG.load_config() " - "method hasn't been run yet. Alternatively, you can re-run this " - "code with the 'SCHEMATIC_CONFIG' environment variable set to " - "the config.yml file, which will be automatically loaded." % name - ) - return value - - def __getitem__(self, key): - return self.DATA[key] - - def get(self, key, default): - try: - value = self[key] - except AttributeError or KeyError: - value = default - return value - - def load_config_content(self, str_yaml: str) -> Optional[dict]: - try: - config_data = yaml.safe_load(str_yaml) - except yaml.YAMLError as exc: - print(exc) - return None - return config_data - - @staticmethod - def load_yaml(file_path: str) -> Optional[dict]: - with open(file_path, "r") as stream: - try: - config_data = yaml.safe_load(stream) - except yaml.YAMLError as exc: - print(exc) - return None - return config_data - - def normalize_path(self, path): - - if self.CONFIG_PATH: - # Retrieve parent directory of the config to decode relative paths - parent_dir = os.path.dirname(self.CONFIG_PATH) - else: - # assume the parent dir would be the current work dir - parent_dir = os.getcwd() - - # Ensure absolute file paths - if not os.path.isabs(path): - path = os.path.join(parent_dir, path) - # And lastly, normalize file paths - return os.path.normpath(path) - - def load_config_from_env(self): - schematic_config = os.environ["SCHEMATIC_CONFIG"] - print( - "Loading config YAML file specified in 'SCHEMATIC_CONFIG' " - "environment variable: %s" % schematic_config - ) - return self.load_config(schematic_config) - - def load_config_content_from_env(self): - schematic_config_content = os.environ["SCHEMATIC_CONFIG_CONTENT"] - - print("Loading content of config file: %s" % schematic_config_content) - - config_content_yaml = self.load_config_content(schematic_config_content) - self.DATA = config_content_yaml - - return self.DATA - - def load_config(self, config_path=None, asset_view=None): - # If config_path is None, try loading from environment - if config_path is None and "SCHEMATIC_CONFIG" in os.environ: - return self.load_config_from_env() - # Otherwise, raise an error - elif config_path is None and "SCHEMATIC_CONFIG" not in os.environ: - raise ValueError( - "No configuration file provided to the `config_path` argument " - "in `load_config`()`, nor was one specified in the " - "'SCHEMATIC_CONFIG' environment variable. Quitting now..." - ) - # Load configuration YAML file - config_path = os.path.expanduser(config_path) - config_path = os.path.abspath(config_path) - self.DATA = self.load_yaml(config_path) - self.CONFIG_PATH = config_path - # handle user input (for API endpoints) - if asset_view: - self.DATA["synapse"]["master_fileview"] = asset_view - - # Return self.DATA as a side-effect - return self.DATA - - @property - def SERVICE_ACCT_CREDS(self): - self._SERVICE_ACCT_CREDS = self.DATA["definitions"]["service_acct_creds"] - self._SERVICE_ACCT_CREDS = self.normalize_path(self._SERVICE_ACCT_CREDS) - return self._SERVICE_ACCT_CREDS - - @property - def SYNAPSE_CONFIG_PATH(self): - self._SYNAPSE_CONFIG_PATH = self.DATA["definitions"]["synapse_config"] - self._SYNAPSE_CONFIG_PATH = self.normalize_path(self._SYNAPSE_CONFIG_PATH) - return self._SYNAPSE_CONFIG_PATH - - -CONFIG = Configuration() diff --git a/schematic/configuration2/__init__.py b/schematic/configuration/__init__.py similarity index 100% rename from schematic/configuration2/__init__.py rename to schematic/configuration/__init__.py diff --git a/schematic/configuration2/configuration.py b/schematic/configuration/configuration.py similarity index 100% rename from schematic/configuration2/configuration.py rename to schematic/configuration/configuration.py diff --git a/schematic/configuration2/dataclasses.py b/schematic/configuration/dataclasses.py similarity index 100% rename from schematic/configuration2/dataclasses.py rename to schematic/configuration/dataclasses.py diff --git a/tests/data/test_config.yml b/tests/data/test_config.yml index 72c1dbf9c..99b2d4546 100644 --- a/tests/data/test_config.yml +++ b/tests/data/test_config.yml @@ -1,19 +1,21 @@ -definitions: - creds_path: "../../credentials.json" - token_pickle: "token.pickle" - synapse_config: "../../.synapseConfig" ### Note: this key is required for people who use Synapse token authentication approach. - service_acct_creds: "../../schematic_service_account_creds.json" ## Note: this key is required for google drive services +asset_store: + synapse: + config_basename: "file_name" + manifest_basename: 'file_name' + master_fileview_id: 'syn1' + manifest_folder: 'folder_name' -synapse: - master_fileview: "syn23643253" - manifest_basename: "synapse_storage_manifest" - manifest_folder: 'manifests' +manifest: + title: 'title' + data_type: + - 'data_type' model: - input: - location: 'example.model.jsonld' - file_type: 'local' + location: 'model.jsonld' + file_type: 'not_local' -style: - google_manifest: - master_template_id: '1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU' +google_sheets: + service_acct_creds_synapse_id: 'syn1' + service_acct_creds_basename: "creds.json" + master_template_id: 'id' + strict_validation: false diff --git a/tests/data/test_config2.yml b/tests/data/test_config2.yml deleted file mode 100644 index 99b2d4546..000000000 --- a/tests/data/test_config2.yml +++ /dev/null @@ -1,21 +0,0 @@ -asset_store: - synapse: - config_basename: "file_name" - manifest_basename: 'file_name' - master_fileview_id: 'syn1' - manifest_folder: 'folder_name' - -manifest: - title: 'title' - data_type: - - 'data_type' - -model: - location: 'model.jsonld' - file_type: 'not_local' - -google_sheets: - service_acct_creds_synapse_id: 'syn1' - service_acct_creds_basename: "creds.json" - master_template_id: 'id' - strict_validation: false diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 5f84ee9b0..6a65e3091 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -4,13 +4,13 @@ import pytest from pydantic import ValidationError -from schematic.configuration2.dataclasses import ( +from schematic.configuration.dataclasses import ( SynapseConfig, ManifestConfig, ModelConfig, GoogleSheetsConfig, ) -from schematic.configuration2.configuration import Configuration +from schematic.configuration.configuration import Configuration class TestDataclasses: @@ -154,8 +154,8 @@ def test_init(self) -> None: def test_load_config(self) -> None: """Testing for Configuration.load_config""" config = Configuration() - config.load_config("tests/data/test_config2.yml") - assert os.path.basename(config.config_path) == "test_config2.yml" + config.load_config("tests/data/test_config.yml") + assert os.path.basename(config.config_path) == "test_config.yml" assert os.path.basename(config.synapse_configuration_path) == "file_name" assert config.synapse_manifest_basename == "file_name" assert config.synapse_master_fileview_id == "syn1" From 148af01ba2bf5f62c7a303205f85f3c79ec57fdd Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 11 May 2023 09:33:41 -0700 Subject: [PATCH 008/135] fixed main init and conftest --- schematic/__init__.py | 8 ++++---- tests/conftest.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/schematic/__init__.py b/schematic/__init__.py index cca9173b5..eb9597f23 100644 --- a/schematic/__init__.py +++ b/schematic/__init__.py @@ -4,7 +4,7 @@ import click import click_log -from schematic.configuration import CONFIG +from schematic.configuration.configuration import CONFIG from schematic.loader import LOADER from schematic.utils.google_api_utils import download_creds_file from schematic.utils.cli_utils import query_dict @@ -35,10 +35,10 @@ def init(config): """Initialize authentication for schematic.""" try: logger.debug(f"Loading config file contents in '{config}'") - obj = CONFIG.load_config(config) - except ValueError as e: + CONFIG.load_config(config) + except ValueError as exc: logger.error("'--config' not provided or environment variable not set.") - logger.exception(e) + logger.exception(exc) sys.exit(1) # download crdentials file based on selected mode of authentication diff --git a/tests/conftest.py b/tests/conftest.py index 7af6eb9a5..7128c9e53 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,7 +9,7 @@ from dotenv import load_dotenv, find_dotenv from schematic.schemas.explorer import SchemaExplorer -from schematic.configuration import CONFIG +from schematic.configuration.configuration import CONFIG from schematic.utils.df_utils import load_df load_dotenv() From 2a5cc67661a3f966d0f43715da6dcc35b492bf93 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Fri, 12 May 2023 07:25:53 -0700 Subject: [PATCH 009/135] temp commit --- .gitignore | 6 +- great_expectations/great_expectations.yml | 113 ++++++++-------------- schematic/configuration/configuration.py | 17 ++-- schematic/manifest/commands.py | 38 ++++---- schematic/manifest/generator.py | 41 ++------ schematic/models/commands.py | 40 ++++---- schematic/schemas/generator.py | 23 +---- schematic/schemas/validator.py | 2 - schematic/store/synapse.py | 56 ++++------- schematic/utils/cli_utils.py | 90 ++--------------- schematic/utils/google_api_utils.py | 38 ++------ schematic/utils/io_utils.py | 2 +- schematic/utils/validate_utils.py | 2 +- schematic/visualization/commands.py | 16 +-- schematic_api/api/routes.py | 25 ++--- tests/conftest.py | 6 -- tests/data/test_config.yml | 26 ++--- tests/data/test_config2.yml | 21 ++++ tests/test_api.py | 10 +- tests/test_cli.py | 14 +-- tests/test_configuration.py | 4 +- tests/test_manifest.py | 3 +- tests/test_store.py | 21 ++-- tests/test_utils.py | 35 ------- 24 files changed, 220 insertions(+), 429 deletions(-) create mode 100644 tests/data/test_config2.yml diff --git a/.gitignore b/.gitignore index f5b3beb50..915c987fe 100644 --- a/.gitignore +++ b/.gitignore @@ -172,4 +172,8 @@ tests/data/mock_manifests/valid_test_manifest_censored.csv tests/data/mock_manifests/Rule_Combo_Manifest_censored.csv # Pickle file -tests/data/schema.gpickle \ No newline at end of file +tests/data/schema.gpickle + +# Created during testting +Example* +manifests/* \ No newline at end of file diff --git a/great_expectations/great_expectations.yml b/great_expectations/great_expectations.yml index 60b73a8f3..023149ed8 100644 --- a/great_expectations/great_expectations.yml +++ b/great_expectations/great_expectations.yml @@ -1,109 +1,78 @@ -# Welcome to Great Expectations! Always know what to expect from your data. -# -# Here you can define datasources, batch kwargs generators, integrations and -# more. This file is intended to be committed to your repo. For help with -# configuration please: -# - Read our docs: https://docs.greatexpectations.io/en/latest/reference/spare_parts/data_context_reference.html#configuration -# - Join our slack channel: http://greatexpectations.io/slack - -# config_version refers to the syntactic version of this config file, and is used in maintaining backwards compatibility -# It is auto-generated and usually does not need to be changed. -config_version: 3.0 - -# Datasources tell Great Expectations where your data lives and how to get it. -# You can use the CLI command `great_expectations datasource new` to help you -# add a new datasource. Read more at https://docs.greatexpectations.io/en/latest/reference/core_concepts/datasource.html +data_docs_sites: + local_site: + class_name: SiteBuilder + show_how_to_buttons: true + store_backend: + class_name: TupleFilesystemStoreBackend + base_directory: uncommitted/data_docs/local_site/ + root_directory: /home/alamb/repos/schematic/great_expectations + site_index_builder: + class_name: DefaultSiteIndexBuilder +anonymous_usage_statistics: + data_context_id: 1130ca8d-9731-45d9-bc05-2032535250c8 + enabled: true +include_rendered_content: + expectation_suite: false + expectation_validation_result: false + globally: false datasources: - Manifest: + pandas: + class_name: Datasource module_name: great_expectations.datasource - execution_engine: - module_name: great_expectations.execution_engine - class_name: PandasExecutionEngine data_connectors: default_runtime_data_connector_name: - module_name: great_expectations.datasource.data_connector - class_name: RuntimeDataConnector batch_identifiers: - - default_identifier - class_name: Datasource - example_datasource: - module_name: great_expectations.datasource + - default_identifier_name + class_name: RuntimeDataConnector execution_engine: - module_name: great_expectations.execution_engine class_name: PandasExecutionEngine + example_datasource: + class_name: Datasource + module_name: great_expectations.datasource data_connectors: default_runtime_data_connector_name: - module_name: great_expectations.datasource.data_connector - class_name: RuntimeDataConnector batch_identifiers: - default_identifier_name - class_name: Datasource -config_variables_file_path: uncommitted/config_variables.yml - -# The plugins_directory will be added to your python path for custom modules -# used to override and extend Great Expectations. -plugins_directory: plugins/ - + class_name: RuntimeDataConnector + module_name: great_expectations.datasource.data_connector + execution_engine: + class_name: PandasExecutionEngine + module_name: great_expectations.execution_engine +notebooks: +evaluation_parameter_store_name: evaluation_parameter_store +expectations_store_name: expectations_store +config_variables_file_path: +validations_store_name: validations_store +profiler_store_name: profiler_store stores: -# Stores are configurable places to store things like Expectations, Validations -# Data Docs, and more. These are for advanced users only - most users can simply -# leave this section alone. -# -# Three stores are required: expectations, validations, and -# evaluation_parameters, and must exist with a valid store entry. Additional -# stores can be configured for uses such as data_docs, etc. expectations_store: class_name: ExpectationsStore store_backend: class_name: TupleFilesystemStoreBackend base_directory: expectations/ - + root_directory: /home/alamb/repos/schematic/great_expectations validations_store: class_name: ValidationsStore store_backend: class_name: TupleFilesystemStoreBackend base_directory: uncommitted/validations/ - + root_directory: /home/alamb/repos/schematic/great_expectations evaluation_parameter_store: - # Evaluation Parameters enable dynamic expectations. Read more here: - # https://docs.greatexpectations.io/en/latest/reference/core_concepts/evaluation_parameters.html class_name: EvaluationParameterStore - checkpoint_store: class_name: CheckpointStore store_backend: class_name: TupleFilesystemStoreBackend suppress_store_backend_id: true base_directory: checkpoints/ - + root_directory: /home/alamb/repos/schematic/great_expectations profiler_store: class_name: ProfilerStore store_backend: class_name: TupleFilesystemStoreBackend suppress_store_backend_id: true base_directory: profilers/ - -expectations_store_name: expectations_store -validations_store_name: validations_store -evaluation_parameter_store_name: evaluation_parameter_store + root_directory: /home/alamb/repos/schematic/great_expectations +plugins_directory: +config_version: 3.0 checkpoint_store_name: checkpoint_store - -data_docs_sites: - # Data Docs make it simple to visualize data quality in your project. These - # include Expectations, Validations & Profiles. The are built for all - # Datasources from JSON artifacts in the local repo including validations & - # profiles from the uncommitted directory. Read more at https://docs.greatexpectations.io/en/latest/reference/core_concepts/data_docs.html - local_site: - class_name: SiteBuilder - # set to false to hide how-to buttons in Data Docs - show_how_to_buttons: true - store_backend: - class_name: TupleFilesystemStoreBackend - base_directory: uncommitted/data_docs/local_site/ - site_index_builder: - class_name: DefaultSiteIndexBuilder - -anonymous_usage_statistics: - data_context_id: 1130ca8d-9731-45d9-bc05-2032535250c8 - enabled: true -notebooks: diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index 263cde3fc..c3e11cd40 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -20,11 +20,13 @@ class Configuration: def __init__(self) -> None: self.config_path: Optional[str] = None + self._parent_directory = os.getcwd() self._synapse_config = SynapseConfig() self._manifest_config = ManifestConfig() self._model_config = ModelConfig() self._google_sheets_config = GoogleSheetsConfig() + def load_config(self, config_path: str) -> None: """Loads a user created config file and overwrites any defaults listed in the file @@ -34,6 +36,9 @@ def load_config(self, config_path: str) -> None: config_path = os.path.expanduser(config_path) config_path = os.path.abspath(config_path) self.config_path = config_path + + self._parent_directory = os.path.dirname(config_path) + with open(config_path, "r", encoding="utf-8") as file: data = yaml.safe_load(file) self._synapse_config = SynapseConfig( @@ -52,18 +57,8 @@ def _normalize_path(self, path: str) -> str: Returns: str: The normalized path """ - - if self.config_path: - # Retrieve parent directory of the config to decode relative paths - parent_dir = os.path.dirname(self.config_path) - else: - # assume the parent dir would be the current work dir - parent_dir = os.getcwd() - - # Ensure absolute file paths if not os.path.isabs(path): - path = os.path.join(parent_dir, path) - # And lastly, normalize file paths + path = os.path.join(self._parent_directory, path) return os.path.normpath(path) @property diff --git a/schematic/manifest/commands.py b/schematic/manifest/commands.py index 5332abdab..13b0f8db0 100644 --- a/schematic/manifest/commands.py +++ b/schematic/manifest/commands.py @@ -8,9 +8,8 @@ from typing import List from schematic.manifest.generator import ManifestGenerator -from schematic.utils.cli_utils import fill_in_from_config, query_dict, parse_synIDs +from schematic.utils.cli_utils import log_value_from_config, query_dict, parse_synIDs from schematic.help import manifest_commands -from schematic import CONFIG from schematic.schemas.generator import SchemaGenerator from schematic.utils.google_api_utils import export_manifest_csv, export_manifest_excel, export_manifest_drive_service from schematic.store.synapse import SynapseStorage @@ -19,7 +18,7 @@ click_log.basic_config(logger) CONTEXT_SETTINGS = dict(help_option_names=["--help", "-h"]) # help options - +from schematic.configuration.configuration import CONFIG # invoke_without_command=True -> forces the application not to show aids before losing them with a --h @click.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) @@ -37,7 +36,8 @@ def manifest(ctx, config): # use as `schematic manifest ...` """ try: logger.debug(f"Loading config file contents in '{config}'") - ctx.obj = CONFIG.load_config(config) + CONFIG.load_config(config) + ctx.obj = CONFIG except ValueError as e: logger.error("'--config' not provided or environment variable not set.") logger.exception(e) @@ -117,17 +117,18 @@ def get_manifest( """ Running CLI with manifest generation options. """ - # optional parameters that need to be passed to ManifestGenerator() - # can be read from config.yml as well - data_type = fill_in_from_config("data_type", data_type, ("manifest", "data_type")) - jsonld = fill_in_from_config("jsonld", jsonld, ("model", "input", "location")) - title = fill_in_from_config("title", title, ("manifest", "title"), allow_none=True) - json_schema = fill_in_from_config( - "json_schema", - json_schema, - ("model", "input", "validation_schema"), - allow_none=True, - ) + # Optional parameters that need to be passed to ManifestGenerator() + # If CLI parameters are None they are gotten from the CONFIG object and logged + if data_type is None: + data_type = CONFIG.manifest_data_type + log_value_from_config("data_type", data_type) + if jsonld is None: + jsonld = CONFIG.model_location + log_value_from_config("jsonld", jsonld) + if title is None: + title = CONFIG.manifest_title + log_value_from_config("title", title) + def create_single_manifest(data_type, output_csv=None, output_xlsx=None): # create object of type ManifestGenerator manifest_generator = ManifestGenerator( @@ -158,7 +159,7 @@ def create_single_manifest(data_type, output_csv=None, output_xlsx=None): output_path = None result = manifest_generator.get_manifest( - dataset_id=dataset_id, sheet_url=sheet_url, json_schema=json_schema, output_format = output_format, output_path = output_path + dataset_id=dataset_id, sheet_url=sheet_url, json_schema=None, output_format = output_format, output_path = output_path ) if sheet_url: @@ -253,9 +254,10 @@ def migrate_manifests( """ Running CLI with manifest migration options. """ - jsonld = fill_in_from_config("jsonld", jsonld, ("model", "input", "location")) + if jsonld is None: + jsonld = CONFIG.model_location + log_value_from_config("jsonld", jsonld) - full_scope = project_scope + [archive_project] synStore = SynapseStorage(project_scope = full_scope) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 280a8da15..e8e6faf36 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -20,7 +20,7 @@ # we shouldn't need to expose Synapse functionality explicitly from schematic.store.synapse import SynapseStorage -from schematic import CONFIG +from schematic.configuration.configuration import CONFIG from schematic.utils.google_api_utils import export_manifest_drive_service from openpyxl import load_workbook from pathlib import Path @@ -127,13 +127,9 @@ def _column_to_cond_format_eq_rule( col_letter = self._column_to_letter(column_idx) if not required: - bg_color = CONFIG["style"]["google_manifest"].get( - "opt_bg_color", {"red": 1.0, "green": 1.0, "blue": 0.9019,}, - ) + bg_color = CONFIG.google_optional_background_color else: - bg_color = CONFIG["style"]["google_manifest"].get( - "req_bg_color", {"red": 0.9215, "green": 0.9725, "blue": 0.9803,}, - ) + bg_color = CONFIG.google_required_background_color boolean_rule = { "condition": { @@ -173,22 +169,9 @@ def _gdrive_copy_file(self, origin_file_id, copy_title): ) def _create_empty_manifest_spreadsheet(self, title): - if CONFIG["style"]["google_manifest"]["master_template_id"]: - - # if provided with a template manifest google sheet, use it - spreadsheet_id = self._gdrive_copy_file( - CONFIG["style"]["google_manifest"]["master_template_id"], title - ) - - else: - spreadsheet_body = { - 'properties': { - 'title': title - }} - - # if no template, create an empty spreadsheet - spreadsheet_id = self.sheet_service.spreadsheets().create(body=spreadsheet_body, fields="spreadsheetId").execute().get("spreadsheetId") - + spreadsheet_id = self._gdrive_copy_file( + CONFIG.google_sheets_master_template_id, title + ) return spreadsheet_id def _get_cell_borders(self, cell_range): @@ -287,8 +270,7 @@ def _get_column_data_validation_values( # set validation strictness to config file default if None indicated. if strict == None: - strict = CONFIG["style"]["google_manifest"].get("strict_validation", True) - + strict = CONFIG.google_sheets_strict_validation #store valid values explicitly in workbook at the provided range to use as validation values if validation_type == "ONE_OF_RANGE": valid_values=self._store_valid_values_as_data_dictionary(column_id, valid_values, spreadsheet_id) @@ -893,14 +875,7 @@ def _request_notes_comments(self, i, req, json_schema): """ # check if attribute is required and set a corresponding color if req in json_schema["required"]: - bg_color = CONFIG["style"]["google_manifest"].get( - "req_bg_color", - { - "red": 0.9215, - "green": 0.9725, - "blue": 0.9803, - }, - ) + bg_color = CONFIG.google_required_background_color req_format_body = { "requests": [ diff --git a/schematic/models/commands.py b/schematic/models/commands.py index aba15decb..c3f8d53e9 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -11,10 +11,10 @@ from jsonschema import ValidationError from schematic.models.metadata import MetadataModel -from schematic.utils.cli_utils import get_from_config, fill_in_from_config, query_dict, parse_synIDs, parse_comma_str_to_list +from schematic.utils.cli_utils import log_value_from_config, query_dict, parse_synIDs, parse_comma_str_to_list from schematic.help import model_commands from schematic.exceptions import MissingConfigValueError -from schematic import CONFIG +from schematic.configuration.configuration import CONFIG logger = logging.getLogger('schematic') click_log.basic_config(logger) @@ -38,7 +38,8 @@ def model(ctx, config): # use as `schematic model ...` """ try: logger.debug(f"Loading config file contents in '{config}'") - ctx.obj = CONFIG.load_config(config) + CONFIG.load_config(config) + ctx.obj = CONFIG except ValueError as e: logger.error("'--config' not provided or environment variable not set.") logger.exception(e) @@ -109,13 +110,15 @@ def submit_manifest( """ Running CLI with manifest validation (optional) and submission options. """ - - jsonld = get_from_config(CONFIG.DATA, ("model", "input", "location")) + if jsonld is None: + jsonld = CONFIG.model_location + log_value_from_config("jsonld", jsonld) - model_file_type = get_from_config(CONFIG.DATA, ("model", "input", "file_type")) + file_type = CONFIG.model_file_type + log_value_from_config("file_type", file_type) metadata_model = MetadataModel( - inputMModelLocation=jsonld, inputMModelLocationType=model_file_type + inputMModelLocation=jsonld, inputMModelLocationType=file_type ) @@ -181,9 +184,10 @@ def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules """ Running CLI for manifest validation. """ - if not data_type: - data_type = fill_in_from_config("data_type", data_type, ("manifest", "data_type")) - + if data_type is None: + data_type = CONFIG.manifest_data_type + log_value_from_config("data_type", data_type) + try: len(data_type) == 1 except: @@ -193,19 +197,17 @@ def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules data_type = data_type[0] - json_schema = fill_in_from_config( - "json_schema", - json_schema, - ("model", "input", "validation_schema"), - allow_none=True, - ) t_validate = perf_counter() - jsonld = get_from_config(CONFIG.DATA, ("model", "input", "location")) - model_file_type = get_from_config(CONFIG.DATA, ("model", "input", "file_type")) + if jsonld is None: + jsonld = CONFIG.model_location + log_value_from_config("jsonld", jsonld) + + file_type = CONFIG.model_file_type + log_value_from_config("file_type", file_type) metadata_model = MetadataModel( - inputMModelLocation=jsonld, inputMModelLocationType=model_file_type + inputMModelLocation=jsonld, inputMModelLocationType=file_type ) errors, warnings = metadata_model.validateModelManifest( diff --git a/schematic/schemas/generator.py b/schematic/schemas/generator.py index e1f4b436c..8cb392470 100644 --- a/schematic/schemas/generator.py +++ b/schematic/schemas/generator.py @@ -12,7 +12,6 @@ from schematic.utils.schema_utils import load_schema_into_networkx from schematic.utils.validate_utils import validate_schema, rule_in_rule_list -from schematic import CONFIG logger = logging.getLogger(__name__) @@ -689,31 +688,19 @@ def get_json_schema_requirements(self, source_node: str, schema_name: str) -> Di if not json_schema["allOf"]: del json_schema["allOf"] - # Check if config value is provided; otherwise, set to None - json_schema_log_file = query_dict( - CONFIG.DATA, ("model", "input", "log_location") - ) - # If no config value and SchemaGenerator was initialized with # a JSON-LD path, construct - if json_schema_log_file is None and self.jsonld_path is not None: + if self.jsonld_path is not None: prefix = self.jsonld_path_root prefix_root, prefix_ext = os.path.splitext(prefix) if prefix_ext == ".model": prefix = prefix_root json_schema_log_file = f"{prefix}.{source_node}.schema.json" - if json_schema_log_file is None: - logger.info( - "The JSON schema file can be inspected by setting the following " - "nested key in the configuration: (model > input > log_location)." - ) - else: - json_schema_dirname = os.path.dirname(json_schema_log_file) - if json_schema_dirname != '': - os.makedirs(json_schema_dirname, exist_ok=True) - with open(json_schema_log_file, "w") as js_f: - json.dump(json_schema, js_f, indent=2) + logger.info( + "The JSON schema file can be inspected by setting the following " + "nested key in the configuration: (model > input > log_location)." + ) logger.info(f"JSON schema file log stored as {json_schema_log_file}") diff --git a/schematic/schemas/validator.py b/schematic/schemas/validator.py index f88613a3f..301dcbfe3 100644 --- a/schematic/schemas/validator.py +++ b/schematic/schemas/validator.py @@ -13,8 +13,6 @@ validate_schema, ) -from schematic import CONFIG - class SchemaValidator: """Validate Schema against SchemaOrg standard diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 969114415..5de6a3cf1 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -51,7 +51,7 @@ from schematic.store.base import BaseStorage from schematic.exceptions import MissingConfigValueError, AccessCredentialsError -from schematic import CONFIG +from schematic.configuration.configuration import CONFIG logger = logging.getLogger("Synapse storage") @@ -68,21 +68,15 @@ def _download_manifest_to_folder(self): """ try downloading a manifest to local cache or a given folder manifest - Return: + Return: manifest_data: A new Synapse Entity object of the appropriate type """ - # TO DO: potentially deprecate the if else statement because "manifest_folder" key always exist in config - if CONFIG["synapse"]["manifest_folder"]: - manifest_data = self.syn.get( - self.manifest_id, - downloadLocation=CONFIG["synapse"]["manifest_folder"], - ifcollision="overwrite.local", - ) - else: - manifest_data = self.syn.get( - self.manifest_id, - ) - return manifest_data + manifest_data = self.syn.get( + self.manifest_id, + downloadLocation=CONFIG.synapse_manifest_folder, + ifcollision="overwrite.local", + ) + return manifest_data def _entity_type_checking(self): """ @@ -176,20 +170,8 @@ def __init__( self.syn = self.login(token, access_token) self.project_scope = project_scope - - - # check if "master_fileview" has been set - try: - self.storageFileview = CONFIG["synapse"]["master_fileview"] - except KeyError: - raise MissingConfigValueError(("synapse", "master_fileview")) - - # check if "manifest_basename" has been set - try: - self.manifest = CONFIG["synapse"]["manifest_basename"] - except KeyError: - raise MissingConfigValueError(("synapse", "manifest_basename")) - + self.storageFileview = CONFIG.synapse_master_fileview_id + self.manifest = CONFIG.synapse_manifest_basename self._query_fileview() def _purge_synapse_cache(self, root_dir: str = "/var/www/.synapseCache/", maximum_storage_allowed_cache_gb=7): @@ -224,8 +206,8 @@ def _purge_synapse_cache(self, root_dir: str = "/var/www/.synapseCache/", maximu def _query_fileview(self): self._purge_synapse_cache() try: - self.storageFileview = CONFIG["synapse"]["master_fileview"] - self.manifest = CONFIG["synapse"]["manifest_basename"] + self.storageFileview = CONFIG.synapse_master_fileview_id + self.manifest = CONFIG.synapse_manifest_basename if self.project_scope: self.storageFileviewTable = self.syn.tableQuery( f"SELECT * FROM {self.storageFileview} WHERE projectId IN {tuple(self.project_scope + [''])}" @@ -235,9 +217,6 @@ def _query_fileview(self): self.storageFileviewTable = self.syn.tableQuery( "SELECT * FROM " + self.storageFileview ).asDataFrame() - - except AttributeError: - raise AttributeError("storageFileview attribute has not been set.") except SynapseHTTPError: raise AccessCredentialsError(self.storageFileview) @@ -263,9 +242,8 @@ def login(token=None, access_token=None): raise ValueError("No access to resources. Please make sure that your token is correct") else: # login using synapse credentials provided by user in .synapseConfig (default) file - syn = synapseclient.Synapse(configPath=CONFIG.SYNAPSE_CONFIG_PATH) + syn = synapseclient.Synapse(configPath=CONFIG.synapse_configuration_path) syn.login(silent=True) - return syn def missing_entity_handler(method): @@ -1013,9 +991,9 @@ def upload_manifest_file(self, manifest, metadataManifestPath, datasetId, restri # Differentiate "censored" and "uncensored" manifest if "censored" in file_name_full: - file_name_new = os.path.basename(CONFIG["synapse"]["manifest_basename"]) + "_" + component_name + "_censored" + '.' + file_extension + file_name_new = os.path.basename(CONFIG.synapse_manifest_basename) + "_" + component_name + "_censored" + '.' + file_extension else: - file_name_new = os.path.basename(CONFIG["synapse"]["manifest_basename"]) + "_" + component_name + '.' + file_extension + file_name_new = os.path.basename(CONFIG.synapse_manifest_basename) + "_" + component_name + '.' + file_extension manifestSynapseFile = File( metadataManifestPath, @@ -2070,8 +2048,8 @@ def _get_schematic_db_creds(synStore): # Try getting creds from .synapseConfig file if it exists # Primarily useful for local users. Seems to correlate with credentials stored in synaspe object when logged in - if os.path.exists(CONFIG.SYNAPSE_CONFIG_PATH): - config = synStore.syn.getConfigFile(CONFIG.SYNAPSE_CONFIG_PATH) + if os.path.exists(CONFIG.synapse_configuration_path): + config = synStore.syn.getConfigFile(CONFIG.synapse_configuration_path) # check which credentials are provided in file if config.has_option('authentication', 'username'): diff --git a/schematic/utils/cli_utils.py b/schematic/utils/cli_utils.py index 8a1f27ad5..57f1a368c 100644 --- a/schematic/utils/cli_utils.py +++ b/schematic/utils/cli_utils.py @@ -7,12 +7,6 @@ from functools import reduce import re -from schematic import CONFIG -from schematic.exceptions import ( - MissingConfigValueError, - MissingConfigAndArgumentValueError, -) - logger = logging.getLogger(__name__) @@ -39,88 +33,20 @@ def extract(dictionary: Any, key: Any) -> Union[Any, None]: return reduce(extract, keys, dictionary) -def get_from_config( - dictionary: Mapping[Any, Any], keys: Sequence[Any] -) -> Union[Any, None]: - """Access a nested configuration value from a yaml - configuration file. - - Args: - dictionary: A dictionary containing anything. - keys: A sequence of values corresponding to keys - in `dictionary`. - - Returns: - The nested value corresponding to the given series. - - Raises: - MissingConfigValueError: When configuration value not - found in config.yml file for given key. - """ - # get configuration value from config file - config_value = query_dict(dictionary, keys) - - # if configuration value not present then raise Exception - if config_value is None: - raise MissingConfigValueError(keys) - - config_keys_str = " > ".join(keys) - - logger.info( - f"The ({config_keys_str}) argument with value " - f"'{config_value}' is being read from the config file." - ) - - return config_value - - -def fill_in_from_config( - arg_name: str, arg_value: Any, config_keys: Sequence[Any], allow_none: bool = False -) -> Any: - """Fill in a missing value from a configuration object. +def log_value_from_config(arg_name: str, config_value: Any): + """Logs when getting a value from the config Args: - arg_name: Name of the argument. Used for logging. - config_keys: List of keys used to access a nested - value in `config` corresponding to `arg_name`. - arg_value: Value of the argument provided at the - command line. - allow_none: Return None if argument value and - configuration value are both None (rather - than raising an error). - - Returns: - The argument value, either from the calling context - or the corresponding field in the configuration. - - Raises: - AssertionError: If both the argument value and the - configuration object are `None`. + arg_name (str): Name of the argument. Used for logging. + config_value (Any): The value in the config """ - - # Avoid accessing config if argument value is provided - if arg_value is not None: - return arg_value - - # raise Exception if both, configuration value not present - # in config file and CLI argument value is missing - try: - config_value = get_from_config(CONFIG.DATA, config_keys) - except MissingConfigValueError: - if allow_none: - return None - raise MissingConfigAndArgumentValueError(arg_name, config_keys) - - # Make sure argument value and - config_keys_str = " > ".join(config_keys) - logger.info( - f"The '--{arg_name}' argument is being taken from configuration " - f"file ({config_keys_str}), i.e., '{config_value}'." + ( + "The '--%s' argument is being taken from configuration file," + " i.e., '%s'.", arg_name, config_value + ) ) - return config_value - def parse_synIDs( ctx, param, synIDs, ) -> List[str]: diff --git a/schematic/utils/google_api_utils.py b/schematic/utils/google_api_utils.py index 8844c3da0..ac3def72f 100644 --- a/schematic/utils/google_api_utils.py +++ b/schematic/utils/google_api_utils.py @@ -11,7 +11,7 @@ from google.auth.transport.requests import Request from google.oauth2 import service_account from google.oauth2.credentials import Credentials -from schematic import CONFIG +from schematic.configuration.configuration import CONFIG from schematic.store.synapse import SynapseStorage import pandas as pd @@ -24,30 +24,6 @@ "https://www.googleapis.com/auth/drive", ] - -# it will create 'token.pickle' based on credentials.json -def generate_token() -> Credentials: - creds = None - # The file token.pickle stores the user's access and refresh tokens, - # and is created automatically when the authorization flow completes for the first time. - if os.path.exists(CONFIG.TOKEN_PICKLE): - with open(CONFIG.TOKEN_PICKLE, "rb") as token: - creds = pickle.load(token) - - # If there are no (valid) credentials available, let the user log in. - if not creds or not creds.valid: - if creds and creds.expired and creds.refresh_token: - creds.refresh(Request()) - else: - flow = InstalledAppFlow.from_client_secrets_file(CONFIG.CREDS_PATH, SCOPES) - creds = flow.run_console() ### don't have to deal with ports - # Save the credentials for the next run - with open(CONFIG.TOKEN_PICKLE, "wb") as token: - pickle.dump(creds, token) - - return creds - - # TODO: replace by pygsheets calls? def build_credentials() -> Dict[str, Any]: creds = generate_token() @@ -76,7 +52,7 @@ def build_service_account_creds() -> Dict[str, Any]: credentials = service_account.Credentials.from_service_account_info(dict_creds, scopes=SCOPES) else: credentials = service_account.Credentials.from_service_account_file( - CONFIG.SERVICE_ACCT_CREDS, scopes=SCOPES + CONFIG.service_account_credentials_path, scopes=SCOPES ) # get a Google Sheet API service @@ -97,21 +73,21 @@ def download_creds_file() -> None: # if file path of service_account does not exist # and if an environment variable related to service account is not found # regenerate service_account credentials - if not os.path.exists(CONFIG.SERVICE_ACCT_CREDS) and "SERVICE_ACCOUNT_CREDS" not in os.environ: + if not os.path.exists(CONFIG.service_account_credentials_path) and "SERVICE_ACCOUNT_CREDS" not in os.environ: # synapse ID of the 'schematic_service_account_creds.json' file - API_CREDS = CONFIG["synapse"]["service_acct_creds"] + API_CREDS = CONFIG.service_account_credentials_synapse_id # Download in parent directory of SERVICE_ACCT_CREDS to # ensure same file system for os.rename() - creds_dir = os.path.dirname(CONFIG.SERVICE_ACCT_CREDS) + creds_dir = os.path.dirname(CONFIG.service_account_credentials_path) creds_file = syn.get(API_CREDS, downloadLocation=creds_dir) - os.rename(creds_file.path, CONFIG.SERVICE_ACCT_CREDS) + os.rename(creds_file.path, CONFIG.service_account_credentials_path) logger.info( "The credentials file has been downloaded " - f"to '{CONFIG.SERVICE_ACCT_CREDS}'" + f"to '{CONFIG.service_account_credentials_path}'" ) elif "SERVICE_ACCOUNT_CREDS" in os.environ: diff --git a/schematic/utils/io_utils.py b/schematic/utils/io_utils.py index d6e4d3fcc..016ea5dcd 100644 --- a/schematic/utils/io_utils.py +++ b/schematic/utils/io_utils.py @@ -2,7 +2,7 @@ import json import urllib.request -from schematic import CONFIG, LOADER +from schematic import LOADER def load_json(file_path): diff --git a/schematic/utils/validate_utils.py b/schematic/utils/validate_utils.py index 5264d73da..3e694bce8 100644 --- a/schematic/utils/validate_utils.py +++ b/schematic/utils/validate_utils.py @@ -3,7 +3,7 @@ from jsonschema import validate from re import compile, search, IGNORECASE from schematic.utils.io_utils import load_json -from schematic import CONFIG, LOADER +from schematic import LOADER from typing import List import numpy as np diff --git a/schematic/visualization/commands.py b/schematic/visualization/commands.py index 0930ba8ea..ad9670e2b 100644 --- a/schematic/visualization/commands.py +++ b/schematic/visualization/commands.py @@ -8,10 +8,10 @@ from schematic.visualization.attributes_explorer import AttributesExplorer from schematic.visualization.tangled_tree import TangledTree -from schematic.utils.cli_utils import get_from_config, fill_in_from_config, query_dict +from schematic.utils.cli_utils import log_value_from_config, query_dict from schematic.help import viz_commands from schematic.help import model_commands -from schematic import CONFIG +from schematic.configuration.configuration import CONFIG logger = logging.getLogger(__name__) click_log.basic_config(logger) @@ -35,7 +35,8 @@ def viz(ctx, config): # use as `schematic model ...` """ try: logger.debug(f"Loading config file contents in '{config}'") - ctx.obj = CONFIG.load_config(config) + CONFIG.load_config(config) + ctx.obj = CONFIG except ValueError as e: logger.error("'--config' not provided or environment variable not set.") logger.exception(e) @@ -52,7 +53,8 @@ def get_attributes(ctx): """ # Get JSONLD file path - path_to_jsonld = get_from_config(CONFIG.DATA, ("model", "input", "location")) + path_to_jsonld = CONFIG.model_location + log_value_from_config("jsonld", path_to_jsonld) # Run attributes explorer AttributesExplorer(path_to_jsonld).parse_attributes(save_file=True) return @@ -79,7 +81,8 @@ def get_tangled_tree_text(ctx, figure_type, text_format): """ Get text to be placed on the tangled tree visualization. """ # Get JSONLD file path - path_to_jsonld = get_from_config(CONFIG.DATA, ("model", "input", "location")) + path_to_jsonld = CONFIG.model_location + log_value_from_config("jsonld", path_to_jsonld) # Initialize TangledTree tangled_tree = TangledTree(path_to_jsonld, figure_type) @@ -104,7 +107,8 @@ def get_tangled_tree_component_layers(ctx, figure_type): ''' Get the components that belong in each layer of the tangled tree visualization. ''' # Get JSONLD file path - path_to_jsonld = get_from_config(CONFIG.DATA, ("model", "input", "location")) + path_to_jsonld = CONFIG.model_location + log_value_from_config("jsonld", path_to_jsonld) # Initialize Tangled Tree tangled_tree = TangledTree(path_to_jsonld, figure_type) diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 8d8f66367..410c5817d 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -18,7 +18,7 @@ import pandas as pd import json -from schematic import CONFIG +from schematic.configuration.configuration import CONFIG from schematic.visualization.attributes_explorer import AttributesExplorer from schematic.visualization.tangled_tree import TangledTree from schematic.manifest.generator import ManifestGenerator @@ -32,24 +32,13 @@ logger = logging.getLogger(__name__) logging.basicConfig(level=logging.DEBUG) -def config_handler(asset_view=None): - path_to_config = app.config["SCHEMATIC_CONFIG"] - - # if content of the config file is provided: - content_of_config = app.config["SCHEMATIC_CONFIG_CONTENT"] - - # if the environment variable exists - if content_of_config: - CONFIG.load_config_content_from_env() - +def config_handler(asset_view: str=None): # check if path to config is provided - if os.path.isfile(path_to_config): - CONFIG.load_config(path_to_config, asset_view = asset_view) - - else: - raise FileNotFoundError( - f"No configuration file was found at this path: {path_to_config}" - ) + path_to_config = app.config["SCHEMATIC_CONFIG"] + if path_to_config is not None and os.path.isfile(path_to_config): + CONFIG.load_config(path_to_config) + if asset_view is not None: + CONFIG.synapse_master_fileview_id = asset_view class JsonConverter: ''' diff --git a/tests/conftest.py b/tests/conftest.py index 7128c9e53..f19b4a9dc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -27,8 +27,6 @@ TESTS_DIR = os.path.dirname(os.path.abspath(__file__)) DATA_DIR = os.path.join(TESTS_DIR, "data") -CONFIG_PATH = os.path.join(DATA_DIR, "test_config.yml") -CONFIG.load_config(CONFIG_PATH) @pytest.fixture(scope="session") def dataset_id(): @@ -92,7 +90,3 @@ def helpers(): @pytest.fixture(scope="session") def config(): yield CONFIG - -@pytest.fixture(scope="session") -def config_path(): - yield CONFIG_PATH diff --git a/tests/data/test_config.yml b/tests/data/test_config.yml index 99b2d4546..f11771c40 100644 --- a/tests/data/test_config.yml +++ b/tests/data/test_config.yml @@ -1,21 +1,23 @@ +# This config has the same default values as schematic itself has asset_store: synapse: - config_basename: "file_name" - manifest_basename: 'file_name' - master_fileview_id: 'syn1' - manifest_folder: 'folder_name' + config_basename: ".synapseConfig" + manifest_basename: 'synapse_storage_manifest' + master_fileview_id: 'syn23643253' + manifest_folder: 'manifests' manifest: - title: 'title' + title: 'example' data_type: - - 'data_type' + - 'Biospecimen' + - 'Patient' model: - location: 'model.jsonld' - file_type: 'not_local' + location: 'tests/data/example.model.jsonld' + file_type: 'local' google_sheets: - service_acct_creds_synapse_id: 'syn1' - service_acct_creds_basename: "creds.json" - master_template_id: 'id' - strict_validation: false + service_acct_creds_synapse_id: 'syn25171627' + service_acct_creds_basename: "schematic_service_account_creds.json" + master_template_id: '1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU' + strict_validation: true diff --git a/tests/data/test_config2.yml b/tests/data/test_config2.yml new file mode 100644 index 000000000..99b2d4546 --- /dev/null +++ b/tests/data/test_config2.yml @@ -0,0 +1,21 @@ +asset_store: + synapse: + config_basename: "file_name" + manifest_basename: 'file_name' + master_fileview_id: 'syn1' + manifest_folder: 'folder_name' + +manifest: + title: 'title' + data_type: + - 'data_type' + +model: + location: 'model.jsonld' + file_type: 'not_local' + +google_sheets: + service_acct_creds_synapse_id: 'syn1' + service_acct_creds_basename: "creds.json" + master_template_id: 'id' + strict_validation: false diff --git a/tests/test_api.py b/tests/test_api.py index fa8dc83f0..f43b13977 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -10,6 +10,8 @@ from time import perf_counter import pandas as pd # third party library import from schematic.schemas.generator import SchemaGenerator #Local application/library specific imports. +from schematic.configuration.configuration import Configuration +from schematic.configuration.configuration import CONFIG logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -21,8 +23,8 @@ def app(): yield app @pytest.fixture(scope="class") -def client(app, config_path): - app.config['SCHEMATIC_CONFIG'] = config_path +def client(app): + app.config['SCHEMATIC_CONFIG'] = None with app.test_client() as client: yield client @@ -70,8 +72,8 @@ def get_MockComponent_attribute(): yield MockComponent_attribute @pytest.fixture(scope="class") -def syn_token(config): - synapse_config_path = config.SYNAPSE_CONFIG_PATH +def syn_token(config:Configuration): + synapse_config_path = config.synapse_configuration_path config_parser = configparser.ConfigParser() config_parser.read(synapse_config_path) # try using synapse access token diff --git a/tests/test_cli.py b/tests/test_cli.py index d5f5e2e5c..0e1e37799 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -6,8 +6,8 @@ # from schematic import init from schematic.schemas.commands import schema -from schematic.utils.google_api_utils import download_creds_file from schematic.manifest.commands import manifest +from schematic.configuration.configuration import Configuration @pytest.fixture def runner() -> CliRunner: @@ -59,11 +59,11 @@ def test_schema_convert_cli(self, runner, config_path, helpers): # get manifest by default # by default this should download the manifest as a CSV file @pytest.mark.google_credentials_needed - def test_get_example_manifest_default(self, runner, helpers, config, data_model_jsonld): + def test_get_example_manifest_default(self, runner, helpers, config: Configuration, data_model_jsonld): output_path = helpers.get_data_path("example.Patient.manifest.csv") result = runner.invoke( - manifest, ["--config", config.CONFIG_PATH, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld] + manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld] ) @@ -73,22 +73,22 @@ def test_get_example_manifest_default(self, runner, helpers, config, data_model_ # get manifest as a csv # use google drive to export @pytest.mark.google_credentials_needed - def test_get_example_manifest_csv(self, runner, helpers, config, data_model_jsonld): + def test_get_example_manifest_csv(self, runner, helpers, config: Configuration, data_model_jsonld): output_path = helpers.get_data_path("test.csv") result = runner.invoke( - manifest, ["--config", config.CONFIG_PATH, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_csv", output_path] + manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_csv", output_path] ) assert result.exit_code == 0 self.assert_expected_file(result, output_path) # get manifest as an excel spreadsheet @pytest.mark.google_credentials_needed - def test_get_example_manifest_excel(self, runner, helpers, config, data_model_jsonld): + def test_get_example_manifest_excel(self, runner, helpers, config: Configuration, data_model_jsonld): output_path = helpers.get_data_path("test.xlsx") result = runner.invoke( - manifest, ["--config", config.CONFIG_PATH, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_xlsx", output_path] + manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_xlsx", output_path] ) assert result.exit_code == 0 diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 6a65e3091..949fa24a0 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -154,8 +154,8 @@ def test_init(self) -> None: def test_load_config(self) -> None: """Testing for Configuration.load_config""" config = Configuration() - config.load_config("tests/data/test_config.yml") - assert os.path.basename(config.config_path) == "test_config.yml" + config.load_config("tests/data/test_config2.yml") + assert os.path.basename(config.config_path) == "test_config2.yml" assert os.path.basename(config.synapse_configuration_path) == "file_name" assert config.synapse_manifest_basename == "file_name" assert config.synapse_master_fileview_id == "syn1" diff --git a/tests/test_manifest.py b/tests/test_manifest.py index e665e40a0..c65b4676a 100644 --- a/tests/test_manifest.py +++ b/tests/test_manifest.py @@ -8,6 +8,7 @@ from unittest.mock import MagicMock from schematic.manifest.generator import ManifestGenerator from schematic.schemas.generator import SchemaGenerator +from schematic.configuration.configuration import Configuration from schematic.utils.google_api_utils import execute_google_api_requests @@ -200,7 +201,7 @@ def test_get_manifest_excel(self, helpers, sheet_url, output_format, dataset_id) # test all the functions used under get_manifest @pytest.mark.parametrize("template_id", [["provided", "not provided"]]) - def test_create_empty_manifest_spreadsheet(self, config, simple_manifest_generator, template_id): + def test_create_empty_manifest_spreadsheet(self, config: Configuration, simple_manifest_generator, template_id): ''' Create an empty manifest spreadsheet regardless if master_template_id is provided Note: _create_empty_manifest_spreadsheet calls _gdrive_copy_file. If there's no template id provided in config, this function will create a new manifest diff --git a/tests/test_store.py b/tests/test_store.py index 0fa25edce..720ed4720 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -12,10 +12,11 @@ from schematic.models.metadata import MetadataModel from schematic.store.base import BaseStorage from schematic.store.synapse import SynapseStorage, DatasetFileView, ManifestDownload -from schematic.utils.cli_utils import get_from_config from schematic.schemas.generator import SchemaGenerator from synapseclient.core.exceptions import SynapseHTTPError +from schematic.configuration.configuration import Configuration + logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) @@ -113,11 +114,11 @@ def test_getFileAnnotations(self, synapse_store): assert expected_dict == actual_dict - def test_annotation_submission(self, synapse_store, helpers, config): + def test_annotation_submission(self, synapse_store, helpers, config: Configuration): manifest_path = "mock_manifests/annotations_test_manifest.csv" # Upload dataset annotations - inputModelLocaiton = helpers.get_data_path(get_from_config(config.DATA, ("model", "input", "location"))) + inputModelLocaiton = helpers.get_data_path(config.model_location) sg = SchemaGenerator(inputModelLocaiton) try: @@ -291,7 +292,7 @@ def test_tidy_table(self, dataset_fileview_table_tidy): @pytest.mark.table_operations class TestTableOperations: - def test_createTable(self, helpers, synapse_store, config, projectId, datasetId): + def test_createTable(self, helpers, synapse_store, config: Configuration, projectId, datasetId): table_manipulation = None # Check if FollowUp table exists if so delete @@ -307,7 +308,7 @@ def test_createTable(self, helpers, synapse_store, config, projectId, datasetId) # associate metadata with files manifest_path = "mock_manifests/table_manifest.csv" - inputModelLocaiton = helpers.get_data_path(get_from_config(config.DATA, ("model", "input", "location"))) + inputModelLocaiton = helpers.get_data_path(config.model_location) sg = SchemaGenerator(inputModelLocaiton) # updating file view on synapse takes a long time @@ -328,7 +329,7 @@ def test_createTable(self, helpers, synapse_store, config, projectId, datasetId) # assert table exists assert table_name in existing_tables.keys() - def test_replaceTable(self, helpers, synapse_store, config, projectId, datasetId): + def test_replaceTable(self, helpers, synapse_store, config: Configuration, projectId, datasetId): table_manipulation = 'replace' table_name='followup_synapse_storage_manifest_table' @@ -346,7 +347,7 @@ def test_replaceTable(self, helpers, synapse_store, config, projectId, datasetId assert table_name not in synapse_store.get_table_info(projectId = projectId).keys() # associate org FollowUp metadata with files - inputModelLocaiton = helpers.get_data_path(get_from_config(config.DATA, ("model", "input", "location"))) + inputModelLocaiton = helpers.get_data_path(config.model_location) sg = SchemaGenerator(inputModelLocaiton) # updating file view on synapse takes a long time @@ -395,7 +396,7 @@ def test_replaceTable(self, helpers, synapse_store, config, projectId, datasetId # delete table synapse_store.syn.delete(tableId) - def test_upsertTable(self, helpers, synapse_store, config, projectId, datasetId): + def test_upsertTable(self, helpers, synapse_store, config:Configuration, projectId, datasetId): table_manipulation = "upsert" table_name="MockRDB_synapse_storage_manifest_table".lower() @@ -413,7 +414,7 @@ def test_upsertTable(self, helpers, synapse_store, config, projectId, datasetId) assert table_name not in synapse_store.get_table_info(projectId = projectId).keys() # associate org FollowUp metadata with files - inputModelLocaiton = helpers.get_data_path(get_from_config(config.DATA, ("model", "input", "location"))) + inputModelLocaiton = helpers.get_data_path(config.model_location) sg = SchemaGenerator(inputModelLocaiton) # updating file view on synapse takes a long time @@ -493,7 +494,7 @@ def test_get_manifest_id(self, synapse_store, datasetFileView): assert manifest_syn_id == censored_manifest_id @pytest.mark.parametrize("newManifestName",["", "Example"]) - def test_download_manifest(self, config, mock_manifest_download, newManifestName): + def test_download_manifest(self, mock_manifest_download, newManifestName): # test the download function by downloading a manifest manifest_data = mock_manifest_download.download_manifest(mock_manifest_download, newManifestName) assert os.path.exists(manifest_data['path']) diff --git a/tests/test_utils.py b/tests/test_utils.py index 157b975a4..c185fa815 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -90,41 +90,6 @@ def test_query_dict(self): assert test_result_valid == "foobar" assert test_result_invalid is None - def test_get_from_config(self): - - mock_dict = {"k1": {"k2": {"k3": "foobar"}}} - mock_keys_valid = ["k1", "k2", "k3"] - mock_keys_invalid = ["k1", "k2", "k4"] - - test_result_valid = cli_utils.get_from_config(mock_dict, mock_keys_valid) - - assert test_result_valid == "foobar" - - with pytest.raises(MissingConfigValueError): - cli_utils.get_from_config(mock_dict, mock_keys_invalid) - - def test_fill_in_from_config(self, mocker): - - jsonld = "/path/to/one" - jsonld_none = None - - mock_config = {"model": {"path": "/path/to/two"}} - mock_keys = ["model", "path"] - mock_keys_invalid = ["model", "file"] - - mocker.patch("schematic.CONFIG.DATA", mock_config) - - result1 = cli_utils.fill_in_from_config("jsonld", jsonld, mock_keys) - result2 = cli_utils.fill_in_from_config("jsonld", jsonld, mock_keys) - result3 = cli_utils.fill_in_from_config("jsonld_none", jsonld_none, mock_keys) - - assert result1 == "/path/to/one" - assert result2 == "/path/to/one" - assert result3 == "/path/to/two" - - with pytest.raises(MissingConfigAndArgumentValueError): - cli_utils.fill_in_from_config("jsonld_none", jsonld_none, mock_keys_invalid) - class FakeResponse: status: int From d37b9bb0a20a3d83c55b19327c6ecc9a4d183664 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Fri, 12 May 2023 08:21:58 -0700 Subject: [PATCH 010/135] change back to version in schematic --- great_expectations/great_expectations.yml | 113 ++++++++++++++-------- 1 file changed, 72 insertions(+), 41 deletions(-) diff --git a/great_expectations/great_expectations.yml b/great_expectations/great_expectations.yml index 023149ed8..f4976cef6 100644 --- a/great_expectations/great_expectations.yml +++ b/great_expectations/great_expectations.yml @@ -1,78 +1,109 @@ -data_docs_sites: - local_site: - class_name: SiteBuilder - show_how_to_buttons: true - store_backend: - class_name: TupleFilesystemStoreBackend - base_directory: uncommitted/data_docs/local_site/ - root_directory: /home/alamb/repos/schematic/great_expectations - site_index_builder: - class_name: DefaultSiteIndexBuilder -anonymous_usage_statistics: - data_context_id: 1130ca8d-9731-45d9-bc05-2032535250c8 - enabled: true -include_rendered_content: - expectation_suite: false - expectation_validation_result: false - globally: false +# Welcome to Great Expectations! Always know what to expect from your data. +# +# Here you can define datasources, batch kwargs generators, integrations and +# more. This file is intended to be committed to your repo. For help with +# configuration please: +# - Read our docs: https://docs.greatexpectations.io/en/latest/reference/spare_parts/data_context_reference.html#configuration +# - Join our slack channel: http://greatexpectations.io/slack + +# config_version refers to the syntactic version of this config file, and is used in maintaining backwards compatibility +# It is auto-generated and usually does not need to be changed. +config_version: 3.0 + +# Datasources tell Great Expectations where your data lives and how to get it. +# You can use the CLI command `great_expectations datasource new` to help you +# add a new datasource. Read more at https://docs.greatexpectations.io/en/latest/reference/core_concepts/datasource.html datasources: - pandas: - class_name: Datasource + Manifest: module_name: great_expectations.datasource + execution_engine: + module_name: great_expectations.execution_engine + class_name: PandasExecutionEngine data_connectors: default_runtime_data_connector_name: - batch_identifiers: - - default_identifier_name + module_name: great_expectations.datasource.data_connector class_name: RuntimeDataConnector - execution_engine: - class_name: PandasExecutionEngine - example_datasource: + batch_identifiers: + - default_identifier class_name: Datasource + example_datasource: module_name: great_expectations.datasource + execution_engine: + module_name: great_expectations.execution_engine + class_name: PandasExecutionEngine data_connectors: default_runtime_data_connector_name: + module_name: great_expectations.datasource.data_connector + class_name: RuntimeDataConnector batch_identifiers: - default_identifier_name - class_name: RuntimeDataConnector - module_name: great_expectations.datasource.data_connector - execution_engine: - class_name: PandasExecutionEngine - module_name: great_expectations.execution_engine -notebooks: -evaluation_parameter_store_name: evaluation_parameter_store -expectations_store_name: expectations_store -config_variables_file_path: -validations_store_name: validations_store -profiler_store_name: profiler_store + class_name: Datasource +config_variables_file_path: uncommitted/config_variables.yml + +# The plugins_directory will be added to your python path for custom modules +# used to override and extend Great Expectations. +plugins_directory: plugins/ + stores: +# Stores are configurable places to store things like Expectations, Validations +# Data Docs, and more. These are for advanced users only - most users can simply +# leave this section alone. +# +# Three stores are required: expectations, validations, and +# evaluation_parameters, and must exist with a valid store entry. Additional +# stores can be configured for uses such as data_docs, etc. expectations_store: class_name: ExpectationsStore store_backend: class_name: TupleFilesystemStoreBackend base_directory: expectations/ - root_directory: /home/alamb/repos/schematic/great_expectations + validations_store: class_name: ValidationsStore store_backend: class_name: TupleFilesystemStoreBackend base_directory: uncommitted/validations/ - root_directory: /home/alamb/repos/schematic/great_expectations + evaluation_parameter_store: + # Evaluation Parameters enable dynamic expectations. Read more here: + # https://docs.greatexpectations.io/en/latest/reference/core_concepts/evaluation_parameters.html class_name: EvaluationParameterStore + checkpoint_store: class_name: CheckpointStore store_backend: class_name: TupleFilesystemStoreBackend suppress_store_backend_id: true base_directory: checkpoints/ - root_directory: /home/alamb/repos/schematic/great_expectations + profiler_store: class_name: ProfilerStore store_backend: class_name: TupleFilesystemStoreBackend suppress_store_backend_id: true base_directory: profilers/ - root_directory: /home/alamb/repos/schematic/great_expectations -plugins_directory: -config_version: 3.0 + +expectations_store_name: expectations_store +validations_store_name: validations_store +evaluation_parameter_store_name: evaluation_parameter_store checkpoint_store_name: checkpoint_store + +data_docs_sites: + # Data Docs make it simple to visualize data quality in your project. These + # include Expectations, Validations & Profiles. The are built for all + # Datasources from JSON artifacts in the local repo including validations & + # profiles from the uncommitted directory. Read more at https://docs.greatexpectations.io/en/latest/reference/core_concepts/data_docs.html + local_site: + class_name: SiteBuilder + # set to false to hide how-to buttons in Data Docs + show_how_to_buttons: true + store_backend: + class_name: TupleFilesystemStoreBackend + base_directory: uncommitted/data_docs/local_site/ + site_index_builder: + class_name: DefaultSiteIndexBuilder + +anonymous_usage_statistics: + data_context_id: 1130ca8d-9731-45d9-bc05-2032535250c8 + enabled: true +notebooks: \ No newline at end of file From 7a7702d1d7387e7440d20c3b4e7bedea743a14cd Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Fri, 12 May 2023 08:22:20 -0700 Subject: [PATCH 011/135] update testing workflow --- .github/workflows/test.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 44b2477fa..dc1ca29be 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -96,7 +96,7 @@ jobs: run: | # ran only on certain files for now # add here when checked - poetry run black schematic/configuration.py schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py --check + poetry run black schematic/configuration/*.py schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py --check #---------------------------------------------- # type checking/enforcement @@ -105,9 +105,9 @@ jobs: run: | # ran only on certain files for now # add here when checked - poetry run mypy --install-types --non-interactive schematic/configuration.py + # poetry run mypy --install-types --non-interactive # add here when enforced - poetry run mypy --disallow-untyped-defs --install-types --non-interactive schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py + poetry run mypy --disallow-untyped-defs --install-types --non-interactive schematic/configuration/*.py schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py #---------------------------------------------- # linting @@ -116,7 +116,7 @@ jobs: run: | # ran only on certain files for now # add here when checked - poetry run pylint schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py + poetry run pylint schematic/configuration/*.py schematic/exceptions.py schematic/help.py schematic/loader.py schematic/version.py #---------------------------------------------- # run test suite From 9d196e0a709738f49e84d8ca34286192282e8532 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Fri, 12 May 2023 08:22:39 -0700 Subject: [PATCH 012/135] handle merge conflict --- schematic/configuration/configuration.py | 2 +- schematic/configuration/dataclasses.py | 6 +++-- schematic/store/synapse.py | 30 +++++++----------------- 3 files changed, 14 insertions(+), 24 deletions(-) diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index c3e11cd40..48f2e5c09 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -26,7 +26,6 @@ def __init__(self) -> None: self._model_config = ModelConfig() self._google_sheets_config = GoogleSheetsConfig() - def load_config(self, config_path: str) -> None: """Loads a user created config file and overwrites any defaults listed in the file @@ -192,5 +191,6 @@ def google_optional_background_color(self) -> dict[str, float]: "blue": 0.9019, } + # This instantiates the singleton for the rest of the package CONFIG = Configuration() diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index 83c0a0365..716bda6e6 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -8,6 +8,7 @@ # This turns on validation for value assignments after creation pydantic_config = ConfigDict(validate_assignment=True) + @dataclass(config=pydantic_config) class SynapseConfig: """ @@ -16,8 +17,9 @@ class SynapseConfig: master_fileview_id: Synapse id for the master file view manifest_folder: name of the folder manifests will be saved to locally """ + validate_assignment = True - config_basename: str = ".synapseConfig" + config_basename: str = ".synapseConfig" manifest_basename: str = "synapse_storage_manifest" master_fileview_id: str = "syn23643253" manifest_folder: str = "manifests" @@ -67,7 +69,7 @@ class ManifestConfig: """ title: str = "example" - data_type: list[str] = field(default_factory= lambda: ["Biospecimen", "Patient"]) + data_type: list[str] = field(default_factory=lambda: ["Biospecimen", "Patient"]) @validator("title") @classmethod diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index bb5ea367e..2fb21a9ee 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -72,35 +72,23 @@ def _download_manifest_to_folder(self) -> File: """ try downloading a manifest to local cache or a given folder manifest - Return: + Return: manifest_data: A Synapse file entity of the downloaded manifest """ - # TO DO: potentially deprecate the if else statement because "manifest_folder" key always exist in config (See issue FDS-349 in Jira) - # on AWS, to avoid overriding manifest, we download the manifest to a temporary folder if "SECRETS_MANAGER_SECRETS" in os.environ: temporary_manifest_storage = "/var/tmp/temp_manifest_download" if not os.path.exists(temporary_manifest_storage): os.mkdir("/var/tmp/temp_manifest_download") download_location = create_temp_folder(temporary_manifest_storage) - - elif CONFIG["synapse"]["manifest_folder"]: - download_location=CONFIG["synapse"]["manifest_folder"] - - else: - download_location=None - - if not download_location: - manifest_data = self.syn.get( - self.manifest_id, - ) - # if download_location is provided and it is not an empty string else: - manifest_data = self.syn.get( - self.manifest_id, - downloadLocation=download_location, - ifcollision="overwrite.local", - ) - return manifest_data + download_location=CONFIG.synapse_manifest_folder + + manifest_data = self.syn.get( + self.manifest_id, + downloadLocation=download_location, + ifcollision="overwrite.local", + ) + return manifest_data def _entity_type_checking(self) -> str: """ From 01be62dee70facc81bad8d6c23472c7f76b99b94 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Fri, 12 May 2023 12:29:51 -0700 Subject: [PATCH 013/135] various fixes --- tests/test_api.py | 5 ++--- tests/test_cli.py | 4 ++-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index f43b13977..62cc35a63 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -11,7 +11,6 @@ import pandas as pd # third party library import from schematic.schemas.generator import SchemaGenerator #Local application/library specific imports. from schematic.configuration.configuration import Configuration -from schematic.configuration.configuration import CONFIG logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -580,7 +579,7 @@ def test_get_datatype_manifest(self, client, syn_token): @pytest.mark.parametrize("manifest_id, expected_component, expected_file_name", [("syn51078535", "BulkRNA-seqAssay", "synapse_storage_manifest.csv"), ("syn51156998", "Biospecimen", "synapse_storage_manifest_biospecimen.csv")]) @pytest.mark.parametrize("new_manifest_name",[None,"Example.csv"]) @pytest.mark.parametrize("as_json",[None,True,False]) - def test_manifest_download(self, config, client, syn_token, manifest_id, new_manifest_name, as_json, expected_component, expected_file_name): + def test_manifest_download(self, config: Configuration, client, syn_token, manifest_id, new_manifest_name, as_json, expected_component, expected_file_name): params = { "access_token": syn_token, "manifest_id": manifest_id, @@ -600,7 +599,7 @@ def test_manifest_download(self, config, client, syn_token, manifest_id, new_man assert response_dta[0]["Component"] == expected_component current_work_dir = os.getcwd() - folder_test_manifests = config["synapse"]["manifest_folder"] + folder_test_manifests = config.synapse_manifest_folder folder_dir = os.path.join(current_work_dir, folder_test_manifests) # if a manfiest gets renamed, get new manifest file path diff --git a/tests/test_cli.py b/tests/test_cli.py index 0e1e37799..2f9eb224d 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -38,7 +38,7 @@ def assert_expected_file(self, result, output_path): except: pass - def test_schema_convert_cli(self, runner, config_path, helpers): + def test_schema_convert_cli(self, runner, helpers): data_model_csv_path = helpers.get_data_path("example.model.csv") @@ -88,7 +88,7 @@ def test_get_example_manifest_excel(self, runner, helpers, config: Configuration output_path = helpers.get_data_path("test.xlsx") result = runner.invoke( - manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_xlsx", output_path] + manifest, ["--config", "/home/alamb/repos/schematic/tests/data/test_config.yml", "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_xlsx", output_path] ) assert result.exit_code == 0 From 9c29962b2fa0dcb5b6c27a73682038252ec45966 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sat, 13 May 2023 08:09:55 -0700 Subject: [PATCH 014/135] google master template is can now be None --- schematic/configuration/configuration.py | 4 +-- schematic/configuration/dataclasses.py | 25 +++++++++++++++--- schematic/manifest/generator.py | 32 +++++++++++++++++++++--- tests/data/test_config2.yml | 2 +- 4 files changed, 53 insertions(+), 10 deletions(-) diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index 48f2e5c09..976f5d5d1 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -152,10 +152,10 @@ def service_account_credentials_path(self) -> str: ) @property - def google_sheets_master_template_id(self) -> str: + def google_sheets_master_template_id(self) -> Optional[str]: """ Returns: - str: + Optional[str]: """ return self._google_sheets_config.master_template_id diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index 716bda6e6..a082f7514 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -1,6 +1,7 @@ """Pydantic dataclasses""" import re +from typing import Optional from dataclasses import field from pydantic.dataclasses import dataclass from pydantic import validator, ConfigDict @@ -130,13 +131,13 @@ class GoogleSheetsConfig: service_acct_creds_synapse_id: str = "syn25171627" service_acct_creds_basename: str = "schematic_service_account_creds.json" - master_template_id: str = "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + master_template_id: Optional[str] = "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" strict_validation: bool = True - @validator("master_template_id", "service_acct_creds_basename") + @validator("service_acct_creds_basename") @classmethod def validate_string_is_not_empty(cls, value: str) -> str: - """Check if string is not empty(has at least one char) + """Check if string is not empty(has at least one char) Args: value (str): A string @@ -151,6 +152,24 @@ def validate_string_is_not_empty(cls, value: str) -> str: raise ValueError(f"{value} is an empty string") return value + @validator("master_template_id") + @classmethod + def validate_optional_string_is_not_empty(cls, value: str) -> str: + """Check if string is not empty(has at least one char) + + Args: + value (Optional[str]): A string + + Raises: + ValueError: If the value is zero characters long + + Returns: + (str): The input value + """ + if value is not None and len(value) == 0: + raise ValueError(f"{value} is an empty string") + return value + @validator("service_acct_creds_synapse_id") @classmethod def validate_synapse_id(cls, value: str) -> str: diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index e8e6faf36..593e0a256 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -168,10 +168,34 @@ def _gdrive_copy_file(self, origin_file_id, copy_title): .execute()["id"] ) - def _create_empty_manifest_spreadsheet(self, title): - spreadsheet_id = self._gdrive_copy_file( - CONFIG.google_sheets_master_template_id, title - ) + def _create_empty_manifest_spreadsheet(self, title:str) -> str: + """ + Creates an empty google spreadsheet returning the id. + If the configuration has a template id it will be used + + Args: + title (str): The title of the spreadsheet + + Returns: + str: The id of the created spreadsheet + """ + template_id = CONFIG.google_sheets_master_template_id + + if template_id is not None: + spreadsheet_id = self._gdrive_copy_file(template_id, title) + + else: + spreadsheet_body = { + 'properties': { + 'title': title + } + } + + spreadsheet_id = self.sheet_service.spreadsheets().create( + body=spreadsheet_body, + fields="spreadsheetId").execute().get("spreadsheetId" + ) + return spreadsheet_id def _get_cell_borders(self, cell_range): diff --git a/tests/data/test_config2.yml b/tests/data/test_config2.yml index 99b2d4546..5128ec0d2 100644 --- a/tests/data/test_config2.yml +++ b/tests/data/test_config2.yml @@ -17,5 +17,5 @@ model: google_sheets: service_acct_creds_synapse_id: 'syn1' service_acct_creds_basename: "creds.json" - master_template_id: 'id' + master_template_id: null strict_validation: false From 58b73894eec9fcee7c6aba0a3ca2fa6724a53c08 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Sat, 13 May 2023 08:45:03 -0700 Subject: [PATCH 015/135] fixed test --- schematic/configuration/configuration.py | 11 ++++++++++- tests/test_manifest.py | 8 ++++++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index 976f5d5d1..1c5098cea 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -86,7 +86,7 @@ def synapse_master_fileview_id(self) -> str: @synapse_master_fileview_id.setter def synapse_master_fileview_id(self, synapse_id: str) -> None: - """Sets the synapse_master_fileview_id + """Sets the Synapse master fileview ID Args: synapse_id (str): The synapse id to set @@ -159,6 +159,15 @@ def google_sheets_master_template_id(self) -> Optional[str]: """ return self._google_sheets_config.master_template_id + @google_sheets_master_template_id.setter + def google_sheets_master_template_id(self, template_id: str) -> None: + """Sets the Google sheets master template ID + + Args: + template_id (str): The template id to set + """ + self._google_sheets_config.master_template_id = template_id + @property def google_sheets_strict_validation(self) -> bool: """ diff --git a/tests/test_manifest.py b/tests/test_manifest.py index c65b4676a..90aac7c18 100644 --- a/tests/test_manifest.py +++ b/tests/test_manifest.py @@ -222,8 +222,9 @@ def test_create_empty_manifest_spreadsheet(self, config: Configuration, simple_m assert spreadsheet_id == "mock google sheet id" else: - # overwrite test config so that we could test the case when manifest_template_id is not provided - config["style"]["google_manifest"]["master_template_id"] = "" + # Temporarily set master template id to None so that we could test that + template_id = config.google_sheets_master_template_id + config.google_sheets_master_template_id = None mock_spreadsheet = Mock() mock_execute = Mock() @@ -240,6 +241,9 @@ def test_create_empty_manifest_spreadsheet(self, config: Configuration, simple_m spreadsheet_id = generator._create_empty_manifest_spreadsheet(title) assert spreadsheet_id == "mock id" + # Reset config template id + config.google_sheets_master_template_id = config.google_sheets_master_template_id + @pytest.mark.parametrize("schema_path_provided", [True, False]) def test_get_json_schema(self, simple_manifest_generator, helpers, schema_path_provided): ''' From 411ad63d95155405e9778c41851023300494ee3d Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 15 May 2023 08:34:07 -0700 Subject: [PATCH 016/135] google sheet template id defaults to None --- great_expectations/great_expectations.yml | 91 ++++++++--------------- schematic/configuration/dataclasses.py | 2 +- tests/data/test_config.yml | 2 +- tests/data/test_config2.yml | 2 +- tests/test_configuration.py | 34 +++++++-- 5 files changed, 61 insertions(+), 70 deletions(-) diff --git a/great_expectations/great_expectations.yml b/great_expectations/great_expectations.yml index f4976cef6..04809b24d 100644 --- a/great_expectations/great_expectations.yml +++ b/great_expectations/great_expectations.yml @@ -1,109 +1,78 @@ -# Welcome to Great Expectations! Always know what to expect from your data. -# -# Here you can define datasources, batch kwargs generators, integrations and -# more. This file is intended to be committed to your repo. For help with -# configuration please: -# - Read our docs: https://docs.greatexpectations.io/en/latest/reference/spare_parts/data_context_reference.html#configuration -# - Join our slack channel: http://greatexpectations.io/slack - -# config_version refers to the syntactic version of this config file, and is used in maintaining backwards compatibility -# It is auto-generated and usually does not need to be changed. -config_version: 3.0 - -# Datasources tell Great Expectations where your data lives and how to get it. -# You can use the CLI command `great_expectations datasource new` to help you -# add a new datasource. Read more at https://docs.greatexpectations.io/en/latest/reference/core_concepts/datasource.html +expectations_store_name: expectations_store +checkpoint_store_name: checkpoint_store +anonymous_usage_statistics: + enabled: true + data_context_id: 1130ca8d-9731-45d9-bc05-2032535250c8 +validations_store_name: validations_store datasources: - Manifest: - module_name: great_expectations.datasource + pandas: + class_name: Datasource execution_engine: - module_name: great_expectations.execution_engine class_name: PandasExecutionEngine + module_name: great_expectations.datasource data_connectors: default_runtime_data_connector_name: - module_name: great_expectations.datasource.data_connector - class_name: RuntimeDataConnector batch_identifiers: - - default_identifier - class_name: Datasource + - default_identifier_name + class_name: RuntimeDataConnector example_datasource: - module_name: great_expectations.datasource + class_name: Datasource execution_engine: module_name: great_expectations.execution_engine class_name: PandasExecutionEngine + module_name: great_expectations.datasource data_connectors: default_runtime_data_connector_name: - module_name: great_expectations.datasource.data_connector - class_name: RuntimeDataConnector batch_identifiers: - default_identifier_name - class_name: Datasource -config_variables_file_path: uncommitted/config_variables.yml - -# The plugins_directory will be added to your python path for custom modules -# used to override and extend Great Expectations. -plugins_directory: plugins/ - + class_name: RuntimeDataConnector + module_name: great_expectations.datasource.data_connector +profiler_store_name: profiler_store +notebooks: +config_variables_file_path: +include_rendered_content: + expectation_suite: false + expectation_validation_result: false + globally: false stores: -# Stores are configurable places to store things like Expectations, Validations -# Data Docs, and more. These are for advanced users only - most users can simply -# leave this section alone. -# -# Three stores are required: expectations, validations, and -# evaluation_parameters, and must exist with a valid store entry. Additional -# stores can be configured for uses such as data_docs, etc. expectations_store: class_name: ExpectationsStore store_backend: class_name: TupleFilesystemStoreBackend base_directory: expectations/ - + root_directory: /home/alamb/repos/schematic/great_expectations validations_store: class_name: ValidationsStore store_backend: class_name: TupleFilesystemStoreBackend base_directory: uncommitted/validations/ - + root_directory: /home/alamb/repos/schematic/great_expectations evaluation_parameter_store: - # Evaluation Parameters enable dynamic expectations. Read more here: - # https://docs.greatexpectations.io/en/latest/reference/core_concepts/evaluation_parameters.html class_name: EvaluationParameterStore - checkpoint_store: class_name: CheckpointStore store_backend: class_name: TupleFilesystemStoreBackend suppress_store_backend_id: true base_directory: checkpoints/ - + root_directory: /home/alamb/repos/schematic/great_expectations profiler_store: class_name: ProfilerStore store_backend: class_name: TupleFilesystemStoreBackend suppress_store_backend_id: true base_directory: profilers/ - -expectations_store_name: expectations_store -validations_store_name: validations_store -evaluation_parameter_store_name: evaluation_parameter_store -checkpoint_store_name: checkpoint_store - + root_directory: /home/alamb/repos/schematic/great_expectations data_docs_sites: - # Data Docs make it simple to visualize data quality in your project. These - # include Expectations, Validations & Profiles. The are built for all - # Datasources from JSON artifacts in the local repo including validations & - # profiles from the uncommitted directory. Read more at https://docs.greatexpectations.io/en/latest/reference/core_concepts/data_docs.html local_site: class_name: SiteBuilder - # set to false to hide how-to buttons in Data Docs show_how_to_buttons: true store_backend: class_name: TupleFilesystemStoreBackend base_directory: uncommitted/data_docs/local_site/ + root_directory: /home/alamb/repos/schematic/great_expectations site_index_builder: class_name: DefaultSiteIndexBuilder - -anonymous_usage_statistics: - data_context_id: 1130ca8d-9731-45d9-bc05-2032535250c8 - enabled: true -notebooks: \ No newline at end of file +plugins_directory: +evaluation_parameter_store_name: evaluation_parameter_store +config_version: 3.0 diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index a082f7514..a0e11f7af 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -131,7 +131,7 @@ class GoogleSheetsConfig: service_acct_creds_synapse_id: str = "syn25171627" service_acct_creds_basename: str = "schematic_service_account_creds.json" - master_template_id: Optional[str] = "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + master_template_id: Optional[str] = None strict_validation: bool = True @validator("service_acct_creds_basename") diff --git a/tests/data/test_config.yml b/tests/data/test_config.yml index f11771c40..79b7dab73 100644 --- a/tests/data/test_config.yml +++ b/tests/data/test_config.yml @@ -19,5 +19,5 @@ model: google_sheets: service_acct_creds_synapse_id: 'syn25171627' service_acct_creds_basename: "schematic_service_account_creds.json" - master_template_id: '1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU' + master_template_id: null strict_validation: true diff --git a/tests/data/test_config2.yml b/tests/data/test_config2.yml index 5128ec0d2..056a12957 100644 --- a/tests/data/test_config2.yml +++ b/tests/data/test_config2.yml @@ -17,5 +17,5 @@ model: google_sheets: service_acct_creds_synapse_id: 'syn1' service_acct_creds_basename: "creds.json" - master_template_id: null + master_template_id: '1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU' strict_validation: false diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 949fa24a0..0845c15fc 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -135,10 +135,7 @@ def test_init(self) -> None: os.path.basename(config.service_account_credentials_path) == "schematic_service_account_creds.json" ) - assert ( - config.google_sheets_master_template_id - == "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" - ) + assert config.google_sheets_master_template_id is None assert config.google_sheets_strict_validation assert config.google_required_background_color == { "red": 0.9215, @@ -154,6 +151,30 @@ def test_init(self) -> None: def test_load_config(self) -> None: """Testing for Configuration.load_config""" config = Configuration() + + config.load_config("tests/data/test_config.yml") + assert os.path.basename(config.config_path) == "test_config.yml" + assert config.synapse_configuration_path != ".synapseConfig" + assert os.path.basename(config.synapse_configuration_path) == ".synapseConfig" + assert config.synapse_manifest_basename == "synapse_storage_manifest" + assert config.synapse_master_fileview_id == "syn23643253" + assert config.synapse_manifest_folder == "manifests" + assert config.manifest_title == "example" + assert config.manifest_data_type == ["Biospecimen", "Patient"] + assert config.model_location == "tests/data/example.model.jsonld" + assert config.model_file_type == "local" + assert config.service_account_credentials_synapse_id + assert ( + config.service_account_credentials_path + != "schematic_service_account_creds.json" + ) + assert ( + os.path.basename(config.service_account_credentials_path) + == "schematic_service_account_creds.json" + ) + assert config.google_sheets_master_template_id is None + assert config.google_sheets_strict_validation + config.load_config("tests/data/test_config2.yml") assert os.path.basename(config.config_path) == "test_config2.yml" assert os.path.basename(config.synapse_configuration_path) == "file_name" @@ -165,10 +186,11 @@ def test_load_config(self) -> None: assert config.model_location == "model.jsonld" assert config.model_file_type == "not_local" assert config.service_account_credentials_synapse_id + assert os.path.basename(config.service_account_credentials_path) == "creds.json" assert ( - os.path.basename(config.service_account_credentials_path) == "creds.json" + config.google_sheets_master_template_id + == "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" ) - assert config.google_sheets_master_template_id == "id" assert not config.google_sheets_strict_validation def test_set_synapse_master_fileview_id(self) -> None: From 80431d395d96deecfb74f24c0ce0e6df9d7382ea Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 15 May 2023 09:48:41 -0700 Subject: [PATCH 017/135] fix test where path abs path was wrong --- tests/test_store.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_store.py b/tests/test_store.py index 8136ea388..40cf1f2b4 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -118,8 +118,7 @@ def test_annotation_submission(self, synapse_store, helpers, config: Configurati manifest_path = "mock_manifests/annotations_test_manifest.csv" # Upload dataset annotations - inputModelLocaiton = helpers.get_data_path(config.model_location) - sg = SchemaGenerator(inputModelLocaiton) + sg = SchemaGenerator(config.model_location) try: for attempt in Retrying( From ca549ba1795b44db5ee91c7c4364daad6893a0e3 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 17 May 2023 08:19:15 -0700 Subject: [PATCH 018/135] Changed how config.yml works --- .gitignore | 5 ++++- README.md | 40 +++------------------------------------- config.yml | 36 ------------------------------------ config_example.yml | 2 +- 4 files changed, 8 insertions(+), 75 deletions(-) delete mode 100644 config.yml diff --git a/.gitignore b/.gitignore index 915c987fe..fa0de2078 100644 --- a/.gitignore +++ b/.gitignore @@ -176,4 +176,7 @@ tests/data/schema.gpickle # Created during testting Example* -manifests/* \ No newline at end of file +manifests/* + +# schematic config file +config.yml \ No newline at end of file diff --git a/README.md b/README.md index 2f2c00b4a..8b35a4a24 100644 --- a/README.md +++ b/README.md @@ -88,43 +88,9 @@ editor of your choice and edit the `username` and `authtoken` attribute under th Configure config.yml File -*Note*: Below is only a brief explanation of some attributes in `config.yml`. Please use the link [here](https://github.com/Sage-Bionetworks/schematic/blob/develop/config.yml) to get the latest version of `config.yml` in `develop` branch. - -Description of `config.yml` attributes - - definitions: - synapse_config: "~/path/to/.synapseConfig" - service_acct_creds: "~/path/to/service_account_creds.json" - - synapse: - master_fileview: "syn23643253" # fileview of project with datasets on Synapse - manifest_folder: "~/path/to/manifest_folder/" # manifests will be downloaded to this folder - manifest_basename: "filename" # base name of the manifest file in the project dataset, without extension - service_acct_creds: "syn25171627" # synapse ID of service_account_creds.json file - - manifest: - title: "example" # title of metadata manifest file - # to make all manifests enter only 'all manifests' - data_type: - - "Biospecimen" - - "Patient" - - model: - input: - location: "data/schema_org_schemas/example.jsonld" # path to JSON-LD data model - file_type: "local" # only type "local" is supported currently - style: # configuration of google sheet - google_manifest: - req_bg_color: - red: 0.9215 - green: 0.9725 - blue: 0.9803 - opt_bg_color: - red: 1.0 - green: 1.0 - blue: 0.9019 - master_template_id: '1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU' - strict_validation: true +There are some defaults in schematic that cna be configured. These fields are in ``config_example.yml``. If you want to change any of these copy ``config_example.yml`` to ``config.yml``, change any fields you want to, and remove any fields you don't. + +*Note*: `config.yml` is ignored by git. *Note*: Paths can be specified relative to the `config.yml` file or as absolute paths. diff --git a/config.yml b/config.yml deleted file mode 100644 index b71839aeb..000000000 --- a/config.yml +++ /dev/null @@ -1,36 +0,0 @@ -# Do not change the 'definitions' section unless you know what you're doing -definitions: - synapse_config: ".synapseConfig" - service_acct_creds: "schematic_service_account_creds.json" - -synapse: - master_fileview: 'syn23643253' - manifest_folder: 'manifests' - manifest_basename: 'synapse_storage_manifest' - service_acct_creds: 'syn25171627' - -manifest: - # if making many manifests, just include name prefix - title: 'example' - # to make all manifests enter only 'all manifests' - data_type: - - 'Biospecimen' - - 'Patient' - -model: - input: - location: 'tests/data/example.model.jsonld' - file_type: 'local' - -style: - google_manifest: - req_bg_color: - red: 0.9215 - green: 0.9725 - blue: 0.9803 - opt_bg_color: - red: 1.0 - green: 1.0 - blue: 0.9019 - master_template_id: '1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU' - strict_validation: true diff --git a/config_example.yml b/config_example.yml index 95f6b6ad8..fa755379f 100644 --- a/config_example.yml +++ b/config_example.yml @@ -18,5 +18,5 @@ model: google_sheets: service_acct_creds_synapse_id: 'syn25171627' service_acct_creds_basename: "schematic_service_account_creds.json" - master_template_id: '1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU' + master_template_id: null strict_validation: true From d06301695031bd559e30ab63d13bfc5c21e96e8b Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 17 May 2023 08:19:36 -0700 Subject: [PATCH 019/135] fixed some typos --- schematic/configuration/dataclasses.py | 2 +- tests/data/test_config.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index a0e11f7af..d2febf38e 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -13,7 +13,7 @@ @dataclass(config=pydantic_config) class SynapseConfig: """ - config_basename: teh basename of the synapse config file + config_basename: the basename of the synapse config file manifest_basename: the name of downloaded manifest files master_fileview_id: Synapse id for the master file view manifest_folder: name of the folder manifests will be saved to locally diff --git a/tests/data/test_config.yml b/tests/data/test_config.yml index 79b7dab73..2c8618d38 100644 --- a/tests/data/test_config.yml +++ b/tests/data/test_config.yml @@ -2,7 +2,7 @@ asset_store: synapse: config_basename: ".synapseConfig" - manifest_basename: 'synapse_storage_manifest' + manifest_basename: 'synapse_storage_manifest' master_fileview_id: 'syn23643253' manifest_folder: 'manifests' From ebe37fcfcc668c7f64729056ffa157bc7d219216 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 17 May 2023 08:23:58 -0700 Subject: [PATCH 020/135] revert back to dev version --- great_expectations/great_expectations.yml | 91 +++++++++++++++-------- 1 file changed, 61 insertions(+), 30 deletions(-) diff --git a/great_expectations/great_expectations.yml b/great_expectations/great_expectations.yml index 04809b24d..60b73a8f3 100644 --- a/great_expectations/great_expectations.yml +++ b/great_expectations/great_expectations.yml @@ -1,78 +1,109 @@ -expectations_store_name: expectations_store -checkpoint_store_name: checkpoint_store -anonymous_usage_statistics: - enabled: true - data_context_id: 1130ca8d-9731-45d9-bc05-2032535250c8 -validations_store_name: validations_store +# Welcome to Great Expectations! Always know what to expect from your data. +# +# Here you can define datasources, batch kwargs generators, integrations and +# more. This file is intended to be committed to your repo. For help with +# configuration please: +# - Read our docs: https://docs.greatexpectations.io/en/latest/reference/spare_parts/data_context_reference.html#configuration +# - Join our slack channel: http://greatexpectations.io/slack + +# config_version refers to the syntactic version of this config file, and is used in maintaining backwards compatibility +# It is auto-generated and usually does not need to be changed. +config_version: 3.0 + +# Datasources tell Great Expectations where your data lives and how to get it. +# You can use the CLI command `great_expectations datasource new` to help you +# add a new datasource. Read more at https://docs.greatexpectations.io/en/latest/reference/core_concepts/datasource.html datasources: - pandas: - class_name: Datasource + Manifest: + module_name: great_expectations.datasource execution_engine: + module_name: great_expectations.execution_engine class_name: PandasExecutionEngine - module_name: great_expectations.datasource data_connectors: default_runtime_data_connector_name: - batch_identifiers: - - default_identifier_name + module_name: great_expectations.datasource.data_connector class_name: RuntimeDataConnector - example_datasource: + batch_identifiers: + - default_identifier class_name: Datasource + example_datasource: + module_name: great_expectations.datasource execution_engine: module_name: great_expectations.execution_engine class_name: PandasExecutionEngine - module_name: great_expectations.datasource data_connectors: default_runtime_data_connector_name: + module_name: great_expectations.datasource.data_connector + class_name: RuntimeDataConnector batch_identifiers: - default_identifier_name - class_name: RuntimeDataConnector - module_name: great_expectations.datasource.data_connector -profiler_store_name: profiler_store -notebooks: -config_variables_file_path: -include_rendered_content: - expectation_suite: false - expectation_validation_result: false - globally: false + class_name: Datasource +config_variables_file_path: uncommitted/config_variables.yml + +# The plugins_directory will be added to your python path for custom modules +# used to override and extend Great Expectations. +plugins_directory: plugins/ + stores: +# Stores are configurable places to store things like Expectations, Validations +# Data Docs, and more. These are for advanced users only - most users can simply +# leave this section alone. +# +# Three stores are required: expectations, validations, and +# evaluation_parameters, and must exist with a valid store entry. Additional +# stores can be configured for uses such as data_docs, etc. expectations_store: class_name: ExpectationsStore store_backend: class_name: TupleFilesystemStoreBackend base_directory: expectations/ - root_directory: /home/alamb/repos/schematic/great_expectations + validations_store: class_name: ValidationsStore store_backend: class_name: TupleFilesystemStoreBackend base_directory: uncommitted/validations/ - root_directory: /home/alamb/repos/schematic/great_expectations + evaluation_parameter_store: + # Evaluation Parameters enable dynamic expectations. Read more here: + # https://docs.greatexpectations.io/en/latest/reference/core_concepts/evaluation_parameters.html class_name: EvaluationParameterStore + checkpoint_store: class_name: CheckpointStore store_backend: class_name: TupleFilesystemStoreBackend suppress_store_backend_id: true base_directory: checkpoints/ - root_directory: /home/alamb/repos/schematic/great_expectations + profiler_store: class_name: ProfilerStore store_backend: class_name: TupleFilesystemStoreBackend suppress_store_backend_id: true base_directory: profilers/ - root_directory: /home/alamb/repos/schematic/great_expectations + +expectations_store_name: expectations_store +validations_store_name: validations_store +evaluation_parameter_store_name: evaluation_parameter_store +checkpoint_store_name: checkpoint_store + data_docs_sites: + # Data Docs make it simple to visualize data quality in your project. These + # include Expectations, Validations & Profiles. The are built for all + # Datasources from JSON artifacts in the local repo including validations & + # profiles from the uncommitted directory. Read more at https://docs.greatexpectations.io/en/latest/reference/core_concepts/data_docs.html local_site: class_name: SiteBuilder + # set to false to hide how-to buttons in Data Docs show_how_to_buttons: true store_backend: class_name: TupleFilesystemStoreBackend base_directory: uncommitted/data_docs/local_site/ - root_directory: /home/alamb/repos/schematic/great_expectations site_index_builder: class_name: DefaultSiteIndexBuilder -plugins_directory: -evaluation_parameter_store_name: evaluation_parameter_store -config_version: 3.0 + +anonymous_usage_statistics: + data_context_id: 1130ca8d-9731-45d9-bc05-2032535250c8 + enabled: true +notebooks: From 5dee8ca31ea5e11d33dff7124b5cdaa1fe179937 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 17 May 2023 08:25:19 -0700 Subject: [PATCH 021/135] fix typo --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8b35a4a24..42b5855d6 100644 --- a/README.md +++ b/README.md @@ -88,7 +88,7 @@ editor of your choice and edit the `username` and `authtoken` attribute under th Configure config.yml File -There are some defaults in schematic that cna be configured. These fields are in ``config_example.yml``. If you want to change any of these copy ``config_example.yml`` to ``config.yml``, change any fields you want to, and remove any fields you don't. +There are some defaults in schematic that can be configured. These fields are in ``config_example.yml``. If you want to change any of these copy ``config_example.yml`` to ``config.yml``, change any fields you want to, and remove any fields you don't. *Note*: `config.yml` is ignored by git. From 9f62c5499061393d1eb81e7564afcbda23d712c7 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 17 May 2023 11:22:21 -0700 Subject: [PATCH 022/135] fixes and suggestions from Linglin --- schematic/configuration/dataclasses.py | 12 ++++++------ schematic/manifest/commands.py | 2 +- schematic/manifest/generator.py | 3 +++ schematic/models/commands.py | 5 ++--- schematic/utils/cli_utils.py | 8 ++++---- 5 files changed, 16 insertions(+), 14 deletions(-) diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index d2febf38e..46d2b479d 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -57,7 +57,7 @@ def validate_string_is_not_empty(cls, value: str) -> str: Returns: (str): The input value """ - if len(value) == 0: + if not value: raise ValueError(f"{value} is an empty string") return value @@ -86,7 +86,7 @@ def validate_string_is_not_empty(cls, value: str) -> str: Returns: (str): The input value """ - if len(value) == 0: + if not value: raise ValueError(f"{value} is an empty string") return value @@ -115,7 +115,7 @@ def validate_string_is_not_empty(cls, value: str) -> str: Returns: (str): The input value """ - if len(value) == 0: + if not value: raise ValueError(f"{value} is an empty string") return value @@ -134,7 +134,7 @@ class GoogleSheetsConfig: master_template_id: Optional[str] = None strict_validation: bool = True - @validator("service_acct_creds_basename") + @validator("service_acct_creds_basename", ) @classmethod def validate_string_is_not_empty(cls, value: str) -> str: """Check if string is not empty(has at least one char) @@ -148,11 +148,11 @@ def validate_string_is_not_empty(cls, value: str) -> str: Returns: (str): The input value """ - if len(value) == 0: + if not value: raise ValueError(f"{value} is an empty string") return value - @validator("master_template_id") + @validator() @classmethod def validate_optional_string_is_not_empty(cls, value: str) -> str: """Check if string is not empty(has at least one char) diff --git a/schematic/manifest/commands.py b/schematic/manifest/commands.py index 9ad9a8be6..232973b57 100644 --- a/schematic/manifest/commands.py +++ b/schematic/manifest/commands.py @@ -169,7 +169,7 @@ def create_single_manifest(data_type, output_csv=None, output_xlsx=None): output_path = None result = manifest_generator.get_manifest( - dataset_id=dataset_id, sheet_url=sheet_url, json_schema=None, output_format = output_format, output_path = output_path + dataset_id=dataset_id, sheet_url=sheet_url, json_schema=json_schema, output_format = output_format, output_path = output_path ) if sheet_url: diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 768619b44..05142b036 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -535,6 +535,9 @@ def _gs_add_and_format_columns(self, required_metadata_fields, spreadsheet_id): # adding columns to 2nd sheet that can be used for storing data validation ranges (this avoids limitations on number of dropdown items in excel and openoffice) range = "Sheet2!A1:" + str(end_col_letter) + "1" + print(spreadsheet_id) + print(range) + print(body) self.sheet_service.spreadsheets().values().update( spreadsheetId=spreadsheet_id, range=range, valueInputOption="RAW", body=body ).execute() diff --git a/schematic/models/commands.py b/schematic/models/commands.py index c3f8d53e9..bc9909fc7 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -199,9 +199,8 @@ def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules t_validate = perf_counter() - if jsonld is None: - jsonld = CONFIG.model_location - log_value_from_config("jsonld", jsonld) + jsonld = CONFIG.model_location + log_value_from_config("jsonld", jsonld) file_type = CONFIG.model_file_type log_value_from_config("file_type", file_type) diff --git a/schematic/utils/cli_utils.py b/schematic/utils/cli_utils.py index 57f1a368c..c68fe46f6 100644 --- a/schematic/utils/cli_utils.py +++ b/schematic/utils/cli_utils.py @@ -9,6 +9,9 @@ logger = logging.getLogger(__name__) +# We are using fstrings in logger methods +# pylint: disable=logging-fstring-interpolation + def query_dict(dictionary: Mapping[Any, Any], keys: Sequence[Any]) -> Union[Any, None]: """Access a nested value in a dictionary corresponding @@ -41,10 +44,7 @@ def log_value_from_config(arg_name: str, config_value: Any): config_value (Any): The value in the config """ logger.info( - ( - "The '--%s' argument is being taken from configuration file," - " i.e., '%s'.", arg_name, config_value - ) + f"The {arg_name} argument is being taken from configuration file, i.e., {config_value}." ) def parse_synIDs( From 2f85c5747c830c824ed553da44c61a580867a150 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 17 May 2023 11:23:32 -0700 Subject: [PATCH 023/135] fix validator with no field --- schematic/configuration/dataclasses.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index 46d2b479d..15a8cfa0c 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -134,7 +134,7 @@ class GoogleSheetsConfig: master_template_id: Optional[str] = None strict_validation: bool = True - @validator("service_acct_creds_basename", ) + @validator("service_acct_creds_basename") @classmethod def validate_string_is_not_empty(cls, value: str) -> str: """Check if string is not empty(has at least one char) @@ -152,7 +152,7 @@ def validate_string_is_not_empty(cls, value: str) -> str: raise ValueError(f"{value} is an empty string") return value - @validator() + @validator("master_template_id") @classmethod def validate_optional_string_is_not_empty(cls, value: str) -> str: """Check if string is not empty(has at least one char) From 602cff70f07968a6a38942693f02dbf4acaa95a1 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 17 May 2023 11:56:49 -0700 Subject: [PATCH 024/135] change master tempalte id back to having a default, not allowing None --- config_example.yml | 20 ++--- great_expectations/great_expectations.yml | 101 ++++++++-------------- schematic/configuration/configuration.py | 4 +- schematic/configuration/dataclasses.py | 22 +---- schematic/manifest/generator.py | 2 +- tests/data/test_config.yml | 2 +- tests/data/test_config2.yml | 18 ++-- tests/test_configuration.py | 9 +- tests/test_manifest.py | 2 +- 9 files changed, 63 insertions(+), 117 deletions(-) diff --git a/config_example.yml b/config_example.yml index fa755379f..70f579e7f 100644 --- a/config_example.yml +++ b/config_example.yml @@ -1,22 +1,22 @@ asset_store: synapse: config_basename: ".synapseConfig" - manifest_basename: 'synapse_storage_manifest' - master_fileview_id: 'syn23643253' - manifest_folder: 'manifests' + manifest_basename: "synapse_storage_manifest" + master_fileview_id: "syn23643253" + manifest_folder: "manifests" manifest: - title: 'example' + title: "example" data_type: - - 'Biospecimen' - - 'Patient' + - "Biospecimen" + - "Patient" model: - location: 'tests/data/example.model.jsonld' - file_type: 'local' + location: "tests/data/example.model.jsonld" + file_type: "local" google_sheets: - service_acct_creds_synapse_id: 'syn25171627' + service_acct_creds_synapse_id: "syn25171627" service_acct_creds_basename: "schematic_service_account_creds.json" - master_template_id: null + master_template_id: "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" strict_validation: true diff --git a/great_expectations/great_expectations.yml b/great_expectations/great_expectations.yml index 60b73a8f3..5ece2e0ee 100644 --- a/great_expectations/great_expectations.yml +++ b/great_expectations/great_expectations.yml @@ -1,109 +1,78 @@ -# Welcome to Great Expectations! Always know what to expect from your data. -# -# Here you can define datasources, batch kwargs generators, integrations and -# more. This file is intended to be committed to your repo. For help with -# configuration please: -# - Read our docs: https://docs.greatexpectations.io/en/latest/reference/spare_parts/data_context_reference.html#configuration -# - Join our slack channel: http://greatexpectations.io/slack - -# config_version refers to the syntactic version of this config file, and is used in maintaining backwards compatibility -# It is auto-generated and usually does not need to be changed. -config_version: 3.0 - -# Datasources tell Great Expectations where your data lives and how to get it. -# You can use the CLI command `great_expectations datasource new` to help you -# add a new datasource. Read more at https://docs.greatexpectations.io/en/latest/reference/core_concepts/datasource.html +expectations_store_name: expectations_store +evaluation_parameter_store_name: evaluation_parameter_store +validations_store_name: validations_store +data_docs_sites: + local_site: + class_name: SiteBuilder + show_how_to_buttons: true + store_backend: + class_name: TupleFilesystemStoreBackend + base_directory: uncommitted/data_docs/local_site/ + root_directory: /home/alamb/repos/schematic/great_expectations + site_index_builder: + class_name: DefaultSiteIndexBuilder +config_variables_file_path: +include_rendered_content: + expectation_suite: false + expectation_validation_result: false + globally: false datasources: - Manifest: - module_name: great_expectations.datasource + pandas: execution_engine: - module_name: great_expectations.execution_engine class_name: PandasExecutionEngine + class_name: Datasource + module_name: great_expectations.datasource data_connectors: default_runtime_data_connector_name: - module_name: great_expectations.datasource.data_connector class_name: RuntimeDataConnector batch_identifiers: - - default_identifier - class_name: Datasource + - default_identifier_name example_datasource: - module_name: great_expectations.datasource execution_engine: - module_name: great_expectations.execution_engine class_name: PandasExecutionEngine + module_name: great_expectations.execution_engine + class_name: Datasource + module_name: great_expectations.datasource data_connectors: default_runtime_data_connector_name: - module_name: great_expectations.datasource.data_connector class_name: RuntimeDataConnector + module_name: great_expectations.datasource.data_connector batch_identifiers: - default_identifier_name - class_name: Datasource -config_variables_file_path: uncommitted/config_variables.yml - -# The plugins_directory will be added to your python path for custom modules -# used to override and extend Great Expectations. -plugins_directory: plugins/ - +plugins_directory: +checkpoint_store_name: checkpoint_store stores: -# Stores are configurable places to store things like Expectations, Validations -# Data Docs, and more. These are for advanced users only - most users can simply -# leave this section alone. -# -# Three stores are required: expectations, validations, and -# evaluation_parameters, and must exist with a valid store entry. Additional -# stores can be configured for uses such as data_docs, etc. expectations_store: class_name: ExpectationsStore store_backend: class_name: TupleFilesystemStoreBackend base_directory: expectations/ - + root_directory: /home/alamb/repos/schematic/great_expectations validations_store: class_name: ValidationsStore store_backend: class_name: TupleFilesystemStoreBackend base_directory: uncommitted/validations/ - + root_directory: /home/alamb/repos/schematic/great_expectations evaluation_parameter_store: - # Evaluation Parameters enable dynamic expectations. Read more here: - # https://docs.greatexpectations.io/en/latest/reference/core_concepts/evaluation_parameters.html class_name: EvaluationParameterStore - checkpoint_store: class_name: CheckpointStore store_backend: class_name: TupleFilesystemStoreBackend suppress_store_backend_id: true base_directory: checkpoints/ - + root_directory: /home/alamb/repos/schematic/great_expectations profiler_store: class_name: ProfilerStore store_backend: class_name: TupleFilesystemStoreBackend suppress_store_backend_id: true base_directory: profilers/ - -expectations_store_name: expectations_store -validations_store_name: validations_store -evaluation_parameter_store_name: evaluation_parameter_store -checkpoint_store_name: checkpoint_store - -data_docs_sites: - # Data Docs make it simple to visualize data quality in your project. These - # include Expectations, Validations & Profiles. The are built for all - # Datasources from JSON artifacts in the local repo including validations & - # profiles from the uncommitted directory. Read more at https://docs.greatexpectations.io/en/latest/reference/core_concepts/data_docs.html - local_site: - class_name: SiteBuilder - # set to false to hide how-to buttons in Data Docs - show_how_to_buttons: true - store_backend: - class_name: TupleFilesystemStoreBackend - base_directory: uncommitted/data_docs/local_site/ - site_index_builder: - class_name: DefaultSiteIndexBuilder - + root_directory: /home/alamb/repos/schematic/great_expectations +profiler_store_name: profiler_store +config_version: 3.0 anonymous_usage_statistics: - data_context_id: 1130ca8d-9731-45d9-bc05-2032535250c8 enabled: true + data_context_id: 1130ca8d-9731-45d9-bc05-2032535250c8 notebooks: diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index 1c5098cea..f06847bd6 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -152,10 +152,10 @@ def service_account_credentials_path(self) -> str: ) @property - def google_sheets_master_template_id(self) -> Optional[str]: + def google_sheets_master_template_id(self) -> str: """ Returns: - Optional[str]: + str: """ return self._google_sheets_config.master_template_id diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index 15a8cfa0c..4ce6fff59 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -1,7 +1,6 @@ """Pydantic dataclasses""" import re -from typing import Optional from dataclasses import field from pydantic.dataclasses import dataclass from pydantic import validator, ConfigDict @@ -19,7 +18,6 @@ class SynapseConfig: manifest_folder: name of the folder manifests will be saved to locally """ - validate_assignment = True config_basename: str = ".synapseConfig" manifest_basename: str = "synapse_storage_manifest" master_fileview_id: str = "syn23643253" @@ -131,7 +129,7 @@ class GoogleSheetsConfig: service_acct_creds_synapse_id: str = "syn25171627" service_acct_creds_basename: str = "schematic_service_account_creds.json" - master_template_id: Optional[str] = None + master_template_id: str = "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" strict_validation: bool = True @validator("service_acct_creds_basename") @@ -152,24 +150,6 @@ def validate_string_is_not_empty(cls, value: str) -> str: raise ValueError(f"{value} is an empty string") return value - @validator("master_template_id") - @classmethod - def validate_optional_string_is_not_empty(cls, value: str) -> str: - """Check if string is not empty(has at least one char) - - Args: - value (Optional[str]): A string - - Raises: - ValueError: If the value is zero characters long - - Returns: - (str): The input value - """ - if value is not None and len(value) == 0: - raise ValueError(f"{value} is an empty string") - return value - @validator("service_acct_creds_synapse_id") @classmethod def validate_synapse_id(cls, value: str) -> str: diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 05142b036..79a133fde 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -181,7 +181,7 @@ def _create_empty_manifest_spreadsheet(self, title:str) -> str: """ template_id = CONFIG.google_sheets_master_template_id - if template_id is not None: + if template_id: spreadsheet_id = self._gdrive_copy_file(template_id, title) else: diff --git a/tests/data/test_config.yml b/tests/data/test_config.yml index 2c8618d38..c23c6c10d 100644 --- a/tests/data/test_config.yml +++ b/tests/data/test_config.yml @@ -19,5 +19,5 @@ model: google_sheets: service_acct_creds_synapse_id: 'syn25171627' service_acct_creds_basename: "schematic_service_account_creds.json" - master_template_id: null + master_template_id: "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" strict_validation: true diff --git a/tests/data/test_config2.yml b/tests/data/test_config2.yml index 056a12957..6a4d8e495 100644 --- a/tests/data/test_config2.yml +++ b/tests/data/test_config2.yml @@ -1,21 +1,21 @@ asset_store: synapse: config_basename: "file_name" - manifest_basename: 'file_name' - master_fileview_id: 'syn1' - manifest_folder: 'folder_name' + manifest_basename: "file_name" + master_fileview_id: "syn1" + manifest_folder: "folder_name" manifest: - title: 'title' + title: "title" data_type: - - 'data_type' + - "data_type" model: - location: 'model.jsonld' - file_type: 'not_local' + location: "model.jsonld" + file_type: "not_local" google_sheets: - service_acct_creds_synapse_id: 'syn1' + service_acct_creds_synapse_id: "syn1" service_acct_creds_basename: "creds.json" - master_template_id: '1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU' + master_template_id: "" strict_validation: false diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 0845c15fc..410f4870d 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -135,7 +135,7 @@ def test_init(self) -> None: os.path.basename(config.service_account_credentials_path) == "schematic_service_account_creds.json" ) - assert config.google_sheets_master_template_id is None + assert config.google_sheets_master_template_id == "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" assert config.google_sheets_strict_validation assert config.google_required_background_color == { "red": 0.9215, @@ -172,7 +172,7 @@ def test_load_config(self) -> None: os.path.basename(config.service_account_credentials_path) == "schematic_service_account_creds.json" ) - assert config.google_sheets_master_template_id is None + assert config.google_sheets_master_template_id == "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" assert config.google_sheets_strict_validation config.load_config("tests/data/test_config2.yml") @@ -187,10 +187,7 @@ def test_load_config(self) -> None: assert config.model_file_type == "not_local" assert config.service_account_credentials_synapse_id assert os.path.basename(config.service_account_credentials_path) == "creds.json" - assert ( - config.google_sheets_master_template_id - == "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" - ) + assert config.google_sheets_master_template_id == "" assert not config.google_sheets_strict_validation def test_set_synapse_master_fileview_id(self) -> None: diff --git a/tests/test_manifest.py b/tests/test_manifest.py index bb9f3c2db..f38f32e4d 100644 --- a/tests/test_manifest.py +++ b/tests/test_manifest.py @@ -218,7 +218,7 @@ def test_create_empty_manifest_spreadsheet(self, config: Configuration, simple_m else: # Temporarily set master template id to None so that we could test that template_id = config.google_sheets_master_template_id - config.google_sheets_master_template_id = None + config.google_sheets_master_template_id = "" mock_spreadsheet = Mock() mock_execute = Mock() From f1f9a5a1e66494c1ee8404a2e955fb12d3fac3d6 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 17 May 2023 12:21:56 -0700 Subject: [PATCH 025/135] revert yaml --- great_expectations/great_expectations.yml | 101 ++++++++++++++-------- 1 file changed, 66 insertions(+), 35 deletions(-) diff --git a/great_expectations/great_expectations.yml b/great_expectations/great_expectations.yml index 5ece2e0ee..60b73a8f3 100644 --- a/great_expectations/great_expectations.yml +++ b/great_expectations/great_expectations.yml @@ -1,78 +1,109 @@ -expectations_store_name: expectations_store -evaluation_parameter_store_name: evaluation_parameter_store -validations_store_name: validations_store -data_docs_sites: - local_site: - class_name: SiteBuilder - show_how_to_buttons: true - store_backend: - class_name: TupleFilesystemStoreBackend - base_directory: uncommitted/data_docs/local_site/ - root_directory: /home/alamb/repos/schematic/great_expectations - site_index_builder: - class_name: DefaultSiteIndexBuilder -config_variables_file_path: -include_rendered_content: - expectation_suite: false - expectation_validation_result: false - globally: false +# Welcome to Great Expectations! Always know what to expect from your data. +# +# Here you can define datasources, batch kwargs generators, integrations and +# more. This file is intended to be committed to your repo. For help with +# configuration please: +# - Read our docs: https://docs.greatexpectations.io/en/latest/reference/spare_parts/data_context_reference.html#configuration +# - Join our slack channel: http://greatexpectations.io/slack + +# config_version refers to the syntactic version of this config file, and is used in maintaining backwards compatibility +# It is auto-generated and usually does not need to be changed. +config_version: 3.0 + +# Datasources tell Great Expectations where your data lives and how to get it. +# You can use the CLI command `great_expectations datasource new` to help you +# add a new datasource. Read more at https://docs.greatexpectations.io/en/latest/reference/core_concepts/datasource.html datasources: - pandas: + Manifest: + module_name: great_expectations.datasource execution_engine: + module_name: great_expectations.execution_engine class_name: PandasExecutionEngine - class_name: Datasource - module_name: great_expectations.datasource data_connectors: default_runtime_data_connector_name: + module_name: great_expectations.datasource.data_connector class_name: RuntimeDataConnector batch_identifiers: - - default_identifier_name + - default_identifier + class_name: Datasource example_datasource: + module_name: great_expectations.datasource execution_engine: - class_name: PandasExecutionEngine module_name: great_expectations.execution_engine - class_name: Datasource - module_name: great_expectations.datasource + class_name: PandasExecutionEngine data_connectors: default_runtime_data_connector_name: - class_name: RuntimeDataConnector module_name: great_expectations.datasource.data_connector + class_name: RuntimeDataConnector batch_identifiers: - default_identifier_name -plugins_directory: -checkpoint_store_name: checkpoint_store + class_name: Datasource +config_variables_file_path: uncommitted/config_variables.yml + +# The plugins_directory will be added to your python path for custom modules +# used to override and extend Great Expectations. +plugins_directory: plugins/ + stores: +# Stores are configurable places to store things like Expectations, Validations +# Data Docs, and more. These are for advanced users only - most users can simply +# leave this section alone. +# +# Three stores are required: expectations, validations, and +# evaluation_parameters, and must exist with a valid store entry. Additional +# stores can be configured for uses such as data_docs, etc. expectations_store: class_name: ExpectationsStore store_backend: class_name: TupleFilesystemStoreBackend base_directory: expectations/ - root_directory: /home/alamb/repos/schematic/great_expectations + validations_store: class_name: ValidationsStore store_backend: class_name: TupleFilesystemStoreBackend base_directory: uncommitted/validations/ - root_directory: /home/alamb/repos/schematic/great_expectations + evaluation_parameter_store: + # Evaluation Parameters enable dynamic expectations. Read more here: + # https://docs.greatexpectations.io/en/latest/reference/core_concepts/evaluation_parameters.html class_name: EvaluationParameterStore + checkpoint_store: class_name: CheckpointStore store_backend: class_name: TupleFilesystemStoreBackend suppress_store_backend_id: true base_directory: checkpoints/ - root_directory: /home/alamb/repos/schematic/great_expectations + profiler_store: class_name: ProfilerStore store_backend: class_name: TupleFilesystemStoreBackend suppress_store_backend_id: true base_directory: profilers/ - root_directory: /home/alamb/repos/schematic/great_expectations -profiler_store_name: profiler_store -config_version: 3.0 + +expectations_store_name: expectations_store +validations_store_name: validations_store +evaluation_parameter_store_name: evaluation_parameter_store +checkpoint_store_name: checkpoint_store + +data_docs_sites: + # Data Docs make it simple to visualize data quality in your project. These + # include Expectations, Validations & Profiles. The are built for all + # Datasources from JSON artifacts in the local repo including validations & + # profiles from the uncommitted directory. Read more at https://docs.greatexpectations.io/en/latest/reference/core_concepts/data_docs.html + local_site: + class_name: SiteBuilder + # set to false to hide how-to buttons in Data Docs + show_how_to_buttons: true + store_backend: + class_name: TupleFilesystemStoreBackend + base_directory: uncommitted/data_docs/local_site/ + site_index_builder: + class_name: DefaultSiteIndexBuilder + anonymous_usage_statistics: - enabled: true data_context_id: 1130ca8d-9731-45d9-bc05-2032535250c8 + enabled: true notebooks: From f7532ca295a6baaf5c5788ad8d60ba4e54d7d465 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 18 May 2023 13:02:17 -0700 Subject: [PATCH 026/135] imrpoved readme --- README.md | 6 ++++++ schematic/configuration/dataclasses.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 42b5855d6..b288729e6 100644 --- a/README.md +++ b/README.md @@ -90,6 +90,12 @@ editor of your choice and edit the `username` and `authtoken` attribute under th There are some defaults in schematic that can be configured. These fields are in ``config_example.yml``. If you want to change any of these copy ``config_example.yml`` to ``config.yml``, change any fields you want to, and remove any fields you don't. +For example if you wanted to change the folder where manifests are downloaded your config should look like: + + asset_store: + synapse: + manifest_folder: "manifest_folder" + *Note*: `config.yml` is ignored by git. *Note*: Paths can be specified relative to the `config.yml` file or as absolute paths. diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index 4ce6fff59..207619dcf 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -14,7 +14,7 @@ class SynapseConfig: """ config_basename: the basename of the synapse config file manifest_basename: the name of downloaded manifest files - master_fileview_id: Synapse id for the master file view + master_fileview_id: Synapse ID of the file view listing all project data assets. manifest_folder: name of the folder manifests will be saved to locally """ From 5c68a417747bf690c28e4cd02b5e61edef1d266c Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 25 May 2023 08:33:47 -0700 Subject: [PATCH 027/135] moved all test configs to new folder and added some --- tests/data/test_configs/default_config.yml | 23 +++++++++++++++++++++ tests/data/test_configs/invalid_config1.yml | 5 +++++ tests/data/test_configs/invalid_config2.yml | 3 +++ tests/data/test_configs/invalid_config3.yml | 5 +++++ tests/data/test_configs/valid_config.yml | 23 +++++++++++++++++++++ 5 files changed, 59 insertions(+) create mode 100644 tests/data/test_configs/default_config.yml create mode 100644 tests/data/test_configs/invalid_config1.yml create mode 100644 tests/data/test_configs/invalid_config2.yml create mode 100644 tests/data/test_configs/invalid_config3.yml create mode 100644 tests/data/test_configs/valid_config.yml diff --git a/tests/data/test_configs/default_config.yml b/tests/data/test_configs/default_config.yml new file mode 100644 index 000000000..c23c6c10d --- /dev/null +++ b/tests/data/test_configs/default_config.yml @@ -0,0 +1,23 @@ +# This config has the same default values as schematic itself has +asset_store: + synapse: + config_basename: ".synapseConfig" + manifest_basename: 'synapse_storage_manifest' + master_fileview_id: 'syn23643253' + manifest_folder: 'manifests' + +manifest: + title: 'example' + data_type: + - 'Biospecimen' + - 'Patient' + +model: + location: 'tests/data/example.model.jsonld' + file_type: 'local' + +google_sheets: + service_acct_creds_synapse_id: 'syn25171627' + service_acct_creds_basename: "schematic_service_account_creds.json" + master_template_id: "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + strict_validation: true diff --git a/tests/data/test_configs/invalid_config1.yml b/tests/data/test_configs/invalid_config1.yml new file mode 100644 index 000000000..071047b05 --- /dev/null +++ b/tests/data/test_configs/invalid_config1.yml @@ -0,0 +1,5 @@ +# This is a invalid config, but has fields that a previous version included + +definitions: + synapse_config: ".synapseConfig" + service_acct_creds: "schematic_service_account_creds.json" diff --git a/tests/data/test_configs/invalid_config2.yml b/tests/data/test_configs/invalid_config2.yml new file mode 100644 index 000000000..e2f710b48 --- /dev/null +++ b/tests/data/test_configs/invalid_config2.yml @@ -0,0 +1,3 @@ +# This is a invalid config, but has fields in asset store that are not supported +asset_store: + invalid_field: "xxx" \ No newline at end of file diff --git a/tests/data/test_configs/invalid_config3.yml b/tests/data/test_configs/invalid_config3.yml new file mode 100644 index 000000000..7cb2bc0f8 --- /dev/null +++ b/tests/data/test_configs/invalid_config3.yml @@ -0,0 +1,5 @@ +# This is a invalid config, but has fields in the synapse section that are not supported +asset_store: + synapse: + invalid_field: "xxx" + diff --git a/tests/data/test_configs/valid_config.yml b/tests/data/test_configs/valid_config.yml new file mode 100644 index 000000000..e1c115ef1 --- /dev/null +++ b/tests/data/test_configs/valid_config.yml @@ -0,0 +1,23 @@ +# This is a valid config, but all values are different from defaults + +asset_store: + synapse: + config_basename: "file_name" + manifest_basename: "file_name" + master_fileview_id: "syn1" + manifest_folder: "folder_name" + +manifest: + title: "title" + data_type: + - "data_type" + +model: + location: "model.jsonld" + file_type: "not_local" + +google_sheets: + service_acct_creds_synapse_id: "syn1" + service_acct_creds_basename: "creds.json" + master_template_id: "" + strict_validation: false From b01f091999496e6ac1b230718692e3c854f13c16 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 25 May 2023 08:34:11 -0700 Subject: [PATCH 028/135] added chekcing for correct fields fir cofnig file --- schematic/configuration/configuration.py | 60 ++++++++++++++++++++---- schematic/configuration/dataclasses.py | 2 +- tests/data/test_config.yml | 23 --------- tests/data/test_config2.yml | 21 --------- tests/test_configuration.py | 42 +++++++++++++---- 5 files changed, 86 insertions(+), 62 deletions(-) delete mode 100644 tests/data/test_config.yml delete mode 100644 tests/data/test_config2.yml diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index f06847bd6..c8c4474ae 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -1,6 +1,6 @@ """Configuration singleton for the Schematic Package""" -from typing import Optional +from typing import Optional, Any import os import yaml from .dataclasses import ( @@ -10,6 +10,29 @@ GoogleSheetsConfig, ) +class ConfigNonAllowedFieldError(Exception): + """Raised when a user submitted config file contains non allowed fields""" + + def __init__(self, message: str, fields: list[str], allowed_fields: list[str]) -> None: + """ + Args: + message (str): A message describing the error + fields (list[str]): The fields in the config + allowed_fields (list[str]): The allowed fields in the config + """ + self.message = message + self.fields = fields + self.allowed_fields = allowed_fields + super().__init__(self.message) + + def __str__(self) -> str: + """String representation""" + return ( + f"{self.message}; " + f"config contains fields: {self.fields}; " + f"allowed fields: {self.allowed_fields}" + ) + class Configuration: """ @@ -31,7 +54,11 @@ def load_config(self, config_path: str) -> None: Args: config_path (str): The path to the config file + + Raises: + ConfigNonAllowedFieldError: If there are non allowed fields in the config file """ + allowed_config_fields = {"asset_store", "manifest", "model", "google_sheets"} config_path = os.path.expanduser(config_path) config_path = os.path.abspath(config_path) self.config_path = config_path @@ -39,13 +66,30 @@ def load_config(self, config_path: str) -> None: self._parent_directory = os.path.dirname(config_path) with open(config_path, "r", encoding="utf-8") as file: - data = yaml.safe_load(file) - self._synapse_config = SynapseConfig( - **data.get("asset_store", {}).get("synapse", {}) - ) - self._manifest_config = ManifestConfig(**data.get("manifest", {})) - self._model_config = ModelConfig(**data.get("model", {})) - self._google_sheets_config = GoogleSheetsConfig(**data.get("google_sheets", {})) + config: dict[str, Any] = yaml.safe_load(file) + if not set(config.keys()).issubset(allowed_config_fields): + raise ConfigNonAllowedFieldError( + "Non allowed fields in top level of configuration file.", + config.keys(), + allowed_config_fields + ) + + self._manifest_config = ManifestConfig(**config.get("manifest", {})) + self._model_config = ModelConfig(**config.get("model", {})) + self._google_sheets_config = GoogleSheetsConfig(**config.get("google_sheets", {})) + self._set_asset_store(config.get("asset_store", {})) + + def _set_asset_store(self, config: dict[str, Any]) -> None: + allowed_config_fields = {"synapse"} + if not config: + pass + if not set(config.keys()).issubset(allowed_config_fields): + raise ConfigNonAllowedFieldError( + "Non allowed fields in asset_store of configuration file.", + config.keys(), + allowed_config_fields + ) + self._synapse_config = SynapseConfig(**config["synapse"]) def _normalize_path(self, path: str) -> str: """ diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index 207619dcf..824b71e46 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -6,7 +6,7 @@ from pydantic import validator, ConfigDict # This turns on validation for value assignments after creation -pydantic_config = ConfigDict(validate_assignment=True) +pydantic_config = ConfigDict(validate_assignment=True, extra="forbid") @dataclass(config=pydantic_config) diff --git a/tests/data/test_config.yml b/tests/data/test_config.yml deleted file mode 100644 index c23c6c10d..000000000 --- a/tests/data/test_config.yml +++ /dev/null @@ -1,23 +0,0 @@ -# This config has the same default values as schematic itself has -asset_store: - synapse: - config_basename: ".synapseConfig" - manifest_basename: 'synapse_storage_manifest' - master_fileview_id: 'syn23643253' - manifest_folder: 'manifests' - -manifest: - title: 'example' - data_type: - - 'Biospecimen' - - 'Patient' - -model: - location: 'tests/data/example.model.jsonld' - file_type: 'local' - -google_sheets: - service_acct_creds_synapse_id: 'syn25171627' - service_acct_creds_basename: "schematic_service_account_creds.json" - master_template_id: "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" - strict_validation: true diff --git a/tests/data/test_config2.yml b/tests/data/test_config2.yml deleted file mode 100644 index 6a4d8e495..000000000 --- a/tests/data/test_config2.yml +++ /dev/null @@ -1,21 +0,0 @@ -asset_store: - synapse: - config_basename: "file_name" - manifest_basename: "file_name" - master_fileview_id: "syn1" - manifest_folder: "folder_name" - -manifest: - title: "title" - data_type: - - "data_type" - -model: - location: "model.jsonld" - file_type: "not_local" - -google_sheets: - service_acct_creds_synapse_id: "syn1" - service_acct_creds_basename: "creds.json" - master_template_id: "" - strict_validation: false diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 410f4870d..b3f729ad9 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -10,7 +10,7 @@ ModelConfig, GoogleSheetsConfig, ) -from schematic.configuration.configuration import Configuration +from schematic.configuration.configuration import Configuration, ConfigNonAllowedFieldError class TestDataclasses: @@ -135,7 +135,9 @@ def test_init(self) -> None: os.path.basename(config.service_account_credentials_path) == "schematic_service_account_creds.json" ) - assert config.google_sheets_master_template_id == "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + assert config.google_sheets_master_template_id == ( + "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + ) assert config.google_sheets_strict_validation assert config.google_required_background_color == { "red": 0.9215, @@ -148,12 +150,12 @@ def test_init(self) -> None: "blue": 0.9019, } - def test_load_config(self) -> None: - """Testing for Configuration.load_config""" + def test_load_config1(self) -> None: + """Testing for Configuration.load_config where config file contains default values""" config = Configuration() - config.load_config("tests/data/test_config.yml") - assert os.path.basename(config.config_path) == "test_config.yml" + config.load_config("tests/data/test_configs/default_config.yml") + assert os.path.basename(config.config_path) == "default_config.yml" assert config.synapse_configuration_path != ".synapseConfig" assert os.path.basename(config.synapse_configuration_path) == ".synapseConfig" assert config.synapse_manifest_basename == "synapse_storage_manifest" @@ -172,11 +174,20 @@ def test_load_config(self) -> None: os.path.basename(config.service_account_credentials_path) == "schematic_service_account_creds.json" ) - assert config.google_sheets_master_template_id == "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + assert config.google_sheets_master_template_id == ( + "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + ) assert config.google_sheets_strict_validation - config.load_config("tests/data/test_config2.yml") - assert os.path.basename(config.config_path) == "test_config2.yml" + def test_load_config2(self) -> None: + """ + Testing for Configuration.load_config where config file + contains values different from the default + """ + config = Configuration() + + config.load_config("tests/data/test_configs/valid_config.yml") + assert os.path.basename(config.config_path) == "valid_config.yml" assert os.path.basename(config.synapse_configuration_path) == "file_name" assert config.synapse_manifest_basename == "file_name" assert config.synapse_master_fileview_id == "syn1" @@ -190,6 +201,19 @@ def test_load_config(self) -> None: assert config.google_sheets_master_template_id == "" assert not config.google_sheets_strict_validation + def test_load_config3(self) -> None: + """ + Testing for Configuration.load_config where config file + is not valid + """ + config = Configuration() + with pytest.raises(ConfigNonAllowedFieldError): + config.load_config("tests/data/test_configs/invalid_config1.yml") + with pytest.raises(ConfigNonAllowedFieldError): + config.load_config("tests/data/test_configs/invalid_config2.yml") + with pytest.raises(TypeError): + config.load_config("tests/data/test_configs/invalid_config3.yml") + def test_set_synapse_master_fileview_id(self) -> None: """Testing for Configuration synapse_master_fileview_id setter""" config = Configuration() From b7fafea4ed7c763da173c7da5e0c12ce7535d028 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 25 May 2023 08:36:13 -0700 Subject: [PATCH 029/135] remove print statements --- schematic/manifest/generator.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 79a133fde..488dda151 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -535,9 +535,6 @@ def _gs_add_and_format_columns(self, required_metadata_fields, spreadsheet_id): # adding columns to 2nd sheet that can be used for storing data validation ranges (this avoids limitations on number of dropdown items in excel and openoffice) range = "Sheet2!A1:" + str(end_col_letter) + "1" - print(spreadsheet_id) - print(range) - print(body) self.sheet_service.spreadsheets().values().update( spreadsheetId=spreadsheet_id, range=range, valueInputOption="RAW", body=body ).execute() From df5e7d0619421c6df27255074a5eadd9d9ff0c5b Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 25 May 2023 08:49:48 -0700 Subject: [PATCH 030/135] fixed mypy and black formatting issues --- schematic/configuration/configuration.py | 17 +++++++++++------ schematic/configuration/dataclasses.py | 4 ++-- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index c8c4474ae..29ead5a40 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -10,10 +10,13 @@ GoogleSheetsConfig, ) + class ConfigNonAllowedFieldError(Exception): """Raised when a user submitted config file contains non allowed fields""" - def __init__(self, message: str, fields: list[str], allowed_fields: list[str]) -> None: + def __init__( + self, message: str, fields: list[str], allowed_fields: list[str] + ) -> None: """ Args: message (str): A message describing the error @@ -70,13 +73,15 @@ def load_config(self, config_path: str) -> None: if not set(config.keys()).issubset(allowed_config_fields): raise ConfigNonAllowedFieldError( "Non allowed fields in top level of configuration file.", - config.keys(), - allowed_config_fields + list(config.keys()), + list(allowed_config_fields), ) self._manifest_config = ManifestConfig(**config.get("manifest", {})) self._model_config = ModelConfig(**config.get("model", {})) - self._google_sheets_config = GoogleSheetsConfig(**config.get("google_sheets", {})) + self._google_sheets_config = GoogleSheetsConfig( + **config.get("google_sheets", {}) + ) self._set_asset_store(config.get("asset_store", {})) def _set_asset_store(self, config: dict[str, Any]) -> None: @@ -86,8 +91,8 @@ def _set_asset_store(self, config: dict[str, Any]) -> None: if not set(config.keys()).issubset(allowed_config_fields): raise ConfigNonAllowedFieldError( "Non allowed fields in asset_store of configuration file.", - config.keys(), - allowed_config_fields + list(config.keys()), + list(allowed_config_fields), ) self._synapse_config = SynapseConfig(**config["synapse"]) diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index 824b71e46..5681a6283 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -3,10 +3,10 @@ import re from dataclasses import field from pydantic.dataclasses import dataclass -from pydantic import validator, ConfigDict +from pydantic import validator, ConfigDict, Extra # This turns on validation for value assignments after creation -pydantic_config = ConfigDict(validate_assignment=True, extra="forbid") +pydantic_config = ConfigDict(validate_assignment=True, extra=Extra.forbid) @dataclass(config=pydantic_config) From f0d9f5c747c2a6cbaccd191ea38223606f9b0e1b Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 25 May 2023 10:15:09 -0700 Subject: [PATCH 031/135] added basename to tests to get them to pass --- tests/test_store.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_store.py b/tests/test_store.py index 40cf1f2b4..1e9102df6 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -307,7 +307,7 @@ def test_createTable(self, helpers, synapse_store, config: Configuration, projec # associate metadata with files manifest_path = "mock_manifests/table_manifest.csv" - inputModelLocaiton = helpers.get_data_path(config.model_location) + inputModelLocaiton = helpers.get_data_path(os.path.basename(config.model_location)) sg = SchemaGenerator(inputModelLocaiton) # updating file view on synapse takes a long time @@ -346,7 +346,7 @@ def test_replaceTable(self, helpers, synapse_store, config: Configuration, proje assert table_name not in synapse_store.get_table_info(projectId = projectId).keys() # associate org FollowUp metadata with files - inputModelLocaiton = helpers.get_data_path(config.model_location) + inputModelLocaiton = helpers.get_data_path(os.path.basename(config.model_location)) sg = SchemaGenerator(inputModelLocaiton) # updating file view on synapse takes a long time @@ -413,7 +413,7 @@ def test_upsertTable(self, helpers, synapse_store, config:Configuration, project assert table_name not in synapse_store.get_table_info(projectId = projectId).keys() # associate org FollowUp metadata with files - inputModelLocaiton = helpers.get_data_path(config.model_location) + inputModelLocaiton = helpers.get_data_path(os.path.basename(config.model_location)) sg = SchemaGenerator(inputModelLocaiton) # updating file view on synapse takes a long time From 0b705d9fb6b9335d2342dbba6b377a7d4dd63d91 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 25 May 2023 10:48:38 -0700 Subject: [PATCH 032/135] fixed missing docstrings and comments --- config_example.yml | 32 +++++++++++++++++++--- schematic/configuration/configuration.py | 29 ++++++++------------ schematic/configuration/dataclasses.py | 21 +++++++------- tests/data/test_configs/default_config.yml | 1 - tests/data/test_configs/valid_config.yml | 1 - 5 files changed, 50 insertions(+), 34 deletions(-) diff --git a/config_example.yml b/config_example.yml index 70f579e7f..7080ee800 100644 --- a/config_example.yml +++ b/config_example.yml @@ -1,22 +1,46 @@ +# This is an example config for Schematic. +# All listed values are those that are the default if a config is not used. +# Save this as config.yml, this will be gitignored. +# Remove any fields in the config you don't want to change +# Change the values of any fields you do want to change + + +# This describes where assets such as manifests are stored asset_store: + # This is when assets are stored in a synapse project synapse: - config_basename: ".synapseConfig" - manifest_basename: "synapse_storage_manifest" + # Synapse ID of the file view listing all project data assets. master_fileview_id: "syn23643253" + # Path to the synapse config file, either absolute or relative to this file + config: ".synapseConfig" + # Base name that manifest files will be saved as + manifest_basename: "synapse_storage_manifest" + # Location where manifests will saved to manifest_folder: "manifests" +# This describes information about manifests as it relates to generation and validation manifest: + # Title or title prefix given to generated manifest(s) title: "example" + # Data types of manifests to be generated or data type (singular) to validate manifest against data_type: - "Biospecimen" - "Patient" +# Describes the location of your schema model: + # Location of your schema jsonld, it must be a path relative to this file or absolute location: "tests/data/example.model.jsonld" - file_type: "local" +# This section is for validation via google sheets google_sheets: + # The Synapse id of the Google service account credentials. service_acct_creds_synapse_id: "syn25171627" - service_acct_creds_basename: "schematic_service_account_creds.json" + # The basename of the Google service account credentials. + service_acct_creds: "schematic_service_account_creds.json" + # The template id of the google sheet. master_template_id: "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + # When doing google sheet validation (regex match) with the validation rules. + # True is alerting the user and not allowing entry of bad values. + # False is warning but allowing the entry on to the sheet. strict_validation: true diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index 29ead5a40..728de292a 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -146,7 +146,7 @@ def synapse_master_fileview_id(self, synapse_id: str) -> None: def synapse_manifest_folder(self) -> str: """ Returns: - str: + str: Location where manifests will saved to """ return self._synapse_config.manifest_folder @@ -154,7 +154,7 @@ def synapse_manifest_folder(self) -> str: def manifest_title(self) -> str: """ Returns: - str: + str: Title or title prefix given to generated manifest(s) """ return self._manifest_config.title @@ -162,7 +162,8 @@ def manifest_title(self) -> str: def manifest_data_type(self) -> list[str]: """ Returns: - list[str]: + list[str]: Data types of manifests to be generated or data type (singular) to validate + manifest against """ return self._manifest_config.data_type @@ -170,23 +171,15 @@ def manifest_data_type(self) -> list[str]: def model_location(self) -> str: """ Returns: - str: + str: The path to the model.jsonld """ return self._model_config.location - @property - def model_file_type(self) -> str: - """ - Returns: - str: - """ - return self._model_config.file_type - @property def service_account_credentials_synapse_id(self) -> str: """ Returns: - str: + str: The Synapse id of the Google service account credentials. """ return self._google_sheets_config.service_acct_creds_synapse_id @@ -194,7 +187,7 @@ def service_account_credentials_synapse_id(self) -> str: def service_account_credentials_path(self) -> str: """ Returns: - str: + str: The path of the Google service account credentials. """ return self._normalize_path( self._google_sheets_config.service_acct_creds_basename @@ -204,7 +197,7 @@ def service_account_credentials_path(self) -> str: def google_sheets_master_template_id(self) -> str: """ Returns: - str: + str: The template id of the google sheet. """ return self._google_sheets_config.master_template_id @@ -221,7 +214,7 @@ def google_sheets_master_template_id(self, template_id: str) -> None: def google_sheets_strict_validation(self) -> bool: """ Returns: - bool: + bool: Weather or not to disallow bad values in the google sheet """ return self._google_sheets_config.strict_validation @@ -229,7 +222,7 @@ def google_sheets_strict_validation(self) -> bool: def google_required_background_color(self) -> dict[str, float]: """ Returns: - dict[str, float]: + dict[str, float]: Background color for google sheet """ return { "red": 0.9215, @@ -241,7 +234,7 @@ def google_required_background_color(self) -> dict[str, float]: def google_optional_background_color(self) -> dict[str, float]: """ Returns: - dict[str, float]: + dict[str, float]: Background color for google sheet """ return { "red": 1.0, diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index 5681a6283..ff8f0f333 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -63,8 +63,9 @@ def validate_string_is_not_empty(cls, value: str) -> str: @dataclass(config=pydantic_config) class ManifestConfig: """ - title: - data_type: + title: Title or title prefix given to generated manifest(s) + data_type: Data types of manifests to be generated or data type (singular) to validate + manifest against """ title: str = "example" @@ -92,14 +93,12 @@ def validate_string_is_not_empty(cls, value: str) -> str: @dataclass(config=pydantic_config) class ModelConfig: """ - location: location of the schema jsonld, either a path, ro url - file_type: one of ["local"] + location: location of the schema jsonld """ location: str = "tests/data/example.model.jsonld" - file_type: str = "local" - @validator("location", "file_type") + @validator("location") @classmethod def validate_string_is_not_empty(cls, value: str) -> str: """Check if string is not empty(has at least one char) @@ -121,10 +120,12 @@ def validate_string_is_not_empty(cls, value: str) -> str: @dataclass(config=pydantic_config) class GoogleSheetsConfig: """ - master_template_id: - strict_validation: - service_acct_creds_synapse_id: - service_acct_creds_basename: + master_template_id: The template id of the google sheet. + strict_validation: When doing google sheet validation (regex match) with the validation rules. + True is alerting the user and not allowing entry of bad values. + False is warning but allowing the entry on to the sheet. + service_acct_creds_synapse_id: The Synapse id of the Google service account credentials. + service_acct_creds_basename: The basename of the Google service account credentials. """ service_acct_creds_synapse_id: str = "syn25171627" diff --git a/tests/data/test_configs/default_config.yml b/tests/data/test_configs/default_config.yml index c23c6c10d..9a14a0759 100644 --- a/tests/data/test_configs/default_config.yml +++ b/tests/data/test_configs/default_config.yml @@ -14,7 +14,6 @@ manifest: model: location: 'tests/data/example.model.jsonld' - file_type: 'local' google_sheets: service_acct_creds_synapse_id: 'syn25171627' diff --git a/tests/data/test_configs/valid_config.yml b/tests/data/test_configs/valid_config.yml index e1c115ef1..9f2fd6a96 100644 --- a/tests/data/test_configs/valid_config.yml +++ b/tests/data/test_configs/valid_config.yml @@ -14,7 +14,6 @@ manifest: model: location: "model.jsonld" - file_type: "not_local" google_sheets: service_acct_creds_synapse_id: "syn1" From b6016296e8e6fc6def63daf89840fd52d077fcce Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 25 May 2023 10:48:56 -0700 Subject: [PATCH 033/135] removed file_type form configuration --- schematic/models/commands.py | 10 ++-------- tests/test_configuration.py | 7 ++----- 2 files changed, 4 insertions(+), 13 deletions(-) diff --git a/schematic/models/commands.py b/schematic/models/commands.py index bc9909fc7..f37c730f4 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -114,11 +114,8 @@ def submit_manifest( jsonld = CONFIG.model_location log_value_from_config("jsonld", jsonld) - file_type = CONFIG.model_file_type - log_value_from_config("file_type", file_type) - metadata_model = MetadataModel( - inputMModelLocation=jsonld, inputMModelLocationType=file_type + inputMModelLocation=jsonld, inputMModelLocationType="local" ) @@ -202,11 +199,8 @@ def validate_manifest(ctx, manifest_path, data_type, json_schema, restrict_rules jsonld = CONFIG.model_location log_value_from_config("jsonld", jsonld) - file_type = CONFIG.model_file_type - log_value_from_config("file_type", file_type) - metadata_model = MetadataModel( - inputMModelLocation=jsonld, inputMModelLocationType=file_type + inputMModelLocation=jsonld, inputMModelLocationType="local" ) errors, warnings = metadata_model.validateModelManifest( diff --git a/tests/test_configuration.py b/tests/test_configuration.py index b3f729ad9..408cd94ca 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -69,11 +69,11 @@ def test_model_config(self) -> None: """Testing for ModelConfig""" assert isinstance(ModelConfig(), ModelConfig) assert isinstance( - ModelConfig(location="url", file_type="local"), + ModelConfig(location="url"), ModelConfig, ) with pytest.raises(ValidationError): - ModelConfig(location="", file_type="local") + ModelConfig(location="") def test_google_sheets_config(self) -> None: """Testing for ModelConfig""" @@ -125,7 +125,6 @@ def test_init(self) -> None: assert config.manifest_title == "example" assert config.manifest_data_type == ["Biospecimen", "Patient"] assert config.model_location == "tests/data/example.model.jsonld" - assert config.model_file_type == "local" assert config.service_account_credentials_synapse_id assert ( config.service_account_credentials_path @@ -164,7 +163,6 @@ def test_load_config1(self) -> None: assert config.manifest_title == "example" assert config.manifest_data_type == ["Biospecimen", "Patient"] assert config.model_location == "tests/data/example.model.jsonld" - assert config.model_file_type == "local" assert config.service_account_credentials_synapse_id assert ( config.service_account_credentials_path @@ -195,7 +193,6 @@ def test_load_config2(self) -> None: assert config.manifest_title == "title" assert config.manifest_data_type == ["data_type"] assert config.model_location == "model.jsonld" - assert config.model_file_type == "not_local" assert config.service_account_credentials_synapse_id assert os.path.basename(config.service_account_credentials_path) == "creds.json" assert config.google_sheets_master_template_id == "" From 72e885ab04835544f3f51e93a5009b17b9dea3b5 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Fri, 26 May 2023 10:02:28 -0700 Subject: [PATCH 034/135] scratch old renaming method, add comments outlining new process --- schematic/store/synapse.py | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index e7e9a07c9..86310b6ff 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -2192,21 +2192,16 @@ def _update_table_uuid_column(synStore: SynapseStorage, table_id: str, sg: Schem schema = synStore.syn.store(schema) # If there is not, then use the old `Uuid` column as a basis for the new `Id` column else: - # Create a new `Id` column based off of the old `Uuid` column, and store (column is empty) - new_col = deepcopy(col) - new_col['name'] = 'Id' - schema.addColumn(new_col) - schema = synStore.syn.store(schema) - - - # Recently stored column is empty, so populated with uuid values - TableOperations._populate_new_id_column(synStore, table_id, schema) + pass + # Build ColumnModel that will be used for new column - # get the up-to-date table, remove old `Uuid` column, and store - sleep(1) - schema = synStore.syn.get(table_id) - schema.removeColumn(col) - schema = synStore.syn.store(schema) + # Send POST /column request to define new column and get new column ID + + # Define columnChange body + + # Build body for POST request + + # Send POST request to change column name # Exit iteration; only concerned with `Uuid` column break From c45baf48f6ed9368d7cb2b2a9203bd722ac7a819 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Fri, 26 May 2023 11:03:48 -0700 Subject: [PATCH 035/135] allow TableOperations instances to reference themselves --- schematic/store/synapse.py | 146 ++++++++++++++++++++++--------------- 1 file changed, 87 insertions(+), 59 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 86310b6ff..630d847e3 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -996,17 +996,31 @@ def buildDB(self, # Put table manifest onto synapse schema = Schema(name=table_name, columns=col_schema, parent=self.getDatasetProject(datasetId)) - + if table_name in table_info: + existingTableId = table_info[table_name] + else: + existingTableId = None + + + tableOps = TableOperations( + synStore = self, + tableToLoad = table_manifest, + tableName = table_name, + datasetId = datasetId, + existingTableId = existingTableId, + restrict = restrict, + ) + if not table_manipulation or table_name not in table_info.keys(): - manifest_table_id = TableOperations.createTable(self, tableToLoad=table_manifest, tableName=table_name, datasetId=datasetId, columnTypeDict=col_schema, specifySchema=True, restrict=restrict) + manifest_table_id = tableOps.createTable(columnTypeDict=col_schema, specifySchema=True,) elif table_name in table_info.keys() and table_info[table_name]: if table_manipulation.lower() == 'replace': - manifest_table_id = TableOperations.replaceTable(self, tableToLoad=table_manifest, tableName=table_name, existingTableId=table_info[table_name], specifySchema = True, datasetId = datasetId, columnTypeDict=col_schema, restrict=restrict) + manifest_table_id = tableOps.replaceTable(specifySchema = True, columnTypeDict=col_schema,) elif table_manipulation.lower() == 'upsert': - manifest_table_id = TableOperations.upsertTable(self, sg=sg, tableToLoad = table_manifest, tableName=table_name, existingTableId=table_info[table_name], datasetId=datasetId) + manifest_table_id = tableOps.upsertTable(sg=sg,) elif table_manipulation.lower() == 'update': - manifest_table_id = TableOperations.updateTable(self, tableToLoad=table_manifest, existingTableId=table_info[table_name], restrict=restrict) + manifest_table_id = tableOps.updateTable() @@ -1929,9 +1943,23 @@ class TableOperations: Operations currently in development are: upsertTable: add metadata from a manifest to an existing table that contains metadata from another manifest """ + def __init__(self, + synStore: SynapseStorage, + tableToLoad: pd.DataFrame = None, + tableName: str = None, + datasetId: str = None, + existingTableId: str = None, + restrict: bool = False + ): + self.synStore = synStore + self.tableToLoad = tableToLoad + self.tableName = tableName + self.datasetId = datasetId + self.existingTableId = existingTableId + self.restrict = restrict - def createTable(synStore: SynapseStorage, tableToLoad: pd.DataFrame = None, tableName: str = None, datasetId: str = None, columnTypeDict: dict = None, specifySchema: bool = True, restrict: bool = False): + def createTable(self, columnTypeDict: dict = None, specifySchema: bool = True,): """ Method to create a table from a metadata manifest and upload it to synapse @@ -1948,19 +1976,19 @@ def createTable(synStore: SynapseStorage, tableToLoad: pd.DataFrame = None, tabl table.schema.id: synID of the newly created table """ - datasetEntity = synStore.syn.get(datasetId, downloadFile = False) + datasetEntity = self.synStore.syn.get(self.datasetId, downloadFile = False) datasetName = datasetEntity.name - table_schema_by_cname = synStore._get_table_schema_by_cname(columnTypeDict) + table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) - if not tableName: - tableName = datasetName + 'table' - datasetParentProject = synStore.getDatasetProject(datasetId) + if not self.tableName: + self.tableName = datasetName + 'table' + datasetParentProject = self.synStore.getDatasetProject(self.datasetId) if specifySchema: if columnTypeDict == {}: logger.error("Did not provide a columnTypeDict.") #create list of columns: cols = [] - for col in tableToLoad.columns: + for col in self.tableToLoad.columns: if col in table_schema_by_cname: col_type = table_schema_by_cname[col]['columnType'] max_size = table_schema_by_cname[col]['maximumSize'] if 'maximumSize' in table_schema_by_cname[col].keys() else 100 @@ -1976,18 +2004,18 @@ def createTable(synStore: SynapseStorage, tableToLoad: pd.DataFrame = None, tabl else: #TODO add warning that the given col was not found and it's max size is set to 100 cols.append(Column(name=col, columnType='STRING', maximumSize=100)) - schema = Schema(name=tableName, columns=cols, parent=datasetParentProject) - table = Table(schema, tableToLoad) - table = synStore.syn.store(table, isRestricted = restrict) + schema = Schema(name=self.tableName, columns=cols, parent=datasetParentProject) + table = Table(schema, self.tableToLoad) + table = self.synStore.syn.store(table, isRestricted = self.restrict) return table.schema.id else: # For just uploading the tables to synapse using default # column types. - table = build_table(tableName, datasetParentProject, tableToLoad) - table = synStore.syn.store(table, isRestricted = restrict) + table = build_table(self.tableName, datasetParentProject, self.tableToLoad) + table = self.synStore.syn.store(table, isRestricted = self.restrict) return table.schema.id - def replaceTable(synStore: SynapseStorage, tableToLoad: pd.DataFrame = None, tableName: str = None, existingTableId: str = None, specifySchema: bool = True, datasetId: str = None, columnTypeDict: dict = None, restrict: bool = False): + def replaceTable(self, specifySchema: bool = True, columnTypeDict: dict = None,): """ Method to replace an existing table on synapse with metadata from a new manifest @@ -2004,34 +2032,34 @@ def replaceTable(synStore: SynapseStorage, tableToLoad: pd.DataFrame = None, tab Returns: existingTableId: synID of the already existing table that had its metadata replaced """ - datasetEntity = synStore.syn.get(datasetId, downloadFile = False) + datasetEntity = self.synStore.syn.get(self.datasetId, downloadFile = False) datasetName = datasetEntity.name - table_schema_by_cname = synStore._get_table_schema_by_cname(columnTypeDict) - existing_table, existing_results = synStore.get_synapse_table(existingTableId) + table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) + existing_table, existing_results = self.synStore.get_synapse_table(self.existingTableId) # remove rows - synStore.syn.delete(existing_results) + self.synStore.syn.delete(existing_results) # wait for row deletion to finish on synapse before getting empty table sleep(10) # removes all current columns - current_table = synStore.syn.get(existingTableId) - current_columns = synStore.syn.getTableColumns(current_table) + current_table = self.synStore.syn.get(self.existingTableId) + current_columns = self.synStore.syn.getTableColumns(current_table) for col in current_columns: current_table.removeColumn(col) - if not tableName: - tableName = datasetName + 'table' + if not self.tableName: + self.tableName = datasetName + 'table' # Process columns according to manifest entries - table_schema_by_cname = synStore._get_table_schema_by_cname(columnTypeDict) - datasetParentProject = synStore.getDatasetProject(datasetId) + table_schema_by_cname = self.synStore._get_table_schema_by_cname(columnTypeDict) + datasetParentProject = self.synStore.getDatasetProject(self.datasetId) if specifySchema: if columnTypeDict == {}: logger.error("Did not provide a columnTypeDict.") #create list of columns: cols = [] - for col in tableToLoad.columns: + for col in self.tableToLoad.columns: if col in table_schema_by_cname: col_type = table_schema_by_cname[col]['columnType'] @@ -2053,25 +2081,25 @@ def replaceTable(synStore: SynapseStorage, tableToLoad: pd.DataFrame = None, tab # adds new columns to schema for col in cols: current_table.addColumn(col) - synStore.syn.store(current_table, isRestricted = restrict) + self.synStore.syn.store(current_table, isRestricted = self.restrict) # wait for synapse store to finish sleep(1) # build schema and table from columns and store with necessary restrictions - schema = Schema(name=tableName, columns=cols, parent=datasetParentProject) - schema.id = existingTableId - table = Table(schema, tableToLoad, etag = existing_results.etag) - table = synStore.syn.store(table, isRestricted = restrict) + schema = Schema(name=self.tableName, columns=cols, parent=datasetParentProject) + schema.id = self.existingTableId + table = Table(schema, self.tableToLoad, etag = existing_results.etag) + table = self.synStore.syn.store(table, isRestricted = self.restrict) else: logging.error("Must specify a schema for table replacements") # remove system metadata from manifest existing_table.drop(columns = ['ROW_ID', 'ROW_VERSION'], inplace = True) - return existingTableId + return self.existingTableId - def _get_schematic_db_creds(synStore: SynapseStorage): + def _get_schematic_db_creds(self,): username = None authtoken = None @@ -2085,13 +2113,13 @@ def _get_schematic_db_creds(synStore: SynapseStorage): # Get token from authorization header # Primarily useful for API endpoint functionality - if 'Authorization' in synStore.syn.default_headers: - authtoken = synStore.syn.default_headers['Authorization'].split('Bearer ')[-1] + if 'Authorization' in self.synStore.syn.default_headers: + authtoken = self.synStore.syn.default_headers['Authorization'].split('Bearer ')[-1] return username, authtoken # retrive credentials from synapse object # Primarily useful for local users, could only be stored here when a .synapseConfig file is used, but including to be safe - synapse_object_creds = synStore.syn.credentials + synapse_object_creds = self.synStore.syn.credentials if hasattr(synapse_object_creds, 'username'): username = synapse_object_creds.username if hasattr(synapse_object_creds, '_token'): @@ -2100,7 +2128,7 @@ def _get_schematic_db_creds(synStore: SynapseStorage): # Try getting creds from .synapseConfig file if it exists # Primarily useful for local users. Seems to correlate with credentials stored in synaspe object when logged in if os.path.exists(CONFIG.SYNAPSE_CONFIG_PATH): - config = synStore.syn.getConfigFile(CONFIG.SYNAPSE_CONFIG_PATH) + config = self.synStore.syn.getConfigFile(CONFIG.SYNAPSE_CONFIG_PATH) # check which credentials are provided in file if config.has_option('authentication', 'username'): @@ -2122,7 +2150,7 @@ def _get_schematic_db_creds(synStore: SynapseStorage): return username, authtoken - def upsertTable(synStore: SynapseStorage, sg: SchemaGenerator, tableToLoad: pd.DataFrame = None, tableName: str = None, existingTableId: str = None, datasetId: str = None): + def upsertTable(self, sg: SchemaGenerator,): """ Method to upsert rows from a new manifest into an existing table on synapse For upsert functionality to work, primary keys must follow the naming convention of _id @@ -2142,26 +2170,26 @@ def upsertTable(synStore: SynapseStorage, sg: SchemaGenerator, tableToLoad: pd.D existingTableId: synID of the already existing table that had its metadata replaced """ - username, authtoken = TableOperations._get_schematic_db_creds(synStore) + username, authtoken = self._get_schematic_db_creds() - synConfig = SynapseConfig(username, authtoken, synStore.getDatasetProject(datasetId)) + synConfig = SynapseConfig(username, authtoken, self.synStore.getDatasetProject(self.datasetId)) synapseDB = SynapseDatabase(synConfig) try: # Try performing upsert - synapseDB.upsert_table_rows(table_name=tableName, data=tableToLoad) + synapseDB.upsert_table_rows(table_name=self.tableName, data=self.tableToLoad) except(SynapseHTTPError) as ex: # If error is raised because Table has old `Uuid` column and not new `Id` column, then handle and re-attempt upload if 'Id is not a valid column name or id' in str(ex): - TableOperations._update_table_uuid_column(synStore, existingTableId, sg) - synapseDB.upsert_table_rows(table_name=tableName, data=tableToLoad) + self._update_table_uuid_column(sg) + synapseDB.upsert_table_rows(table_name=self.tableName, data=self.tableToLoad) # Raise if other error else: raise ex - return existingTableId + return self.existingTableId - def _update_table_uuid_column(synStore: SynapseStorage, table_id: str, sg: SchemaGenerator,) -> None: + def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: """Removes the `Uuid` column when present, and relpaces with an `Id` column Used to enable backwards compatability for manifests using the old `Uuid` convention @@ -2173,8 +2201,8 @@ def _update_table_uuid_column(synStore: SynapseStorage, table_id: str, sg: Schem """ # Get the columns of the schema - schema = synStore.syn.get(table_id) - cols = synStore.syn.getTableColumns(schema) + schema = self.synStore.syn.get(self.existingTableId) + cols = self.synStore.syn.getTableColumns(schema) # Iterate through columns until `Uuid` column is found for col in cols: @@ -2189,7 +2217,7 @@ def _update_table_uuid_column(synStore: SynapseStorage, table_id: str, sg: Schem if uuid_col_in_schema: new_col = Column(columnType = "STRING", maximumSize = 64, name = "Id") schema.addColumn(new_col) - schema = synStore.syn.store(schema) + schema = self.synStore.syn.store(schema) # If there is not, then use the old `Uuid` column as a basis for the new `Id` column else: pass @@ -2208,7 +2236,7 @@ def _update_table_uuid_column(synStore: SynapseStorage, table_id: str, sg: Schem return - def _populate_new_id_column(synStore: SynapseStorage, table_id: str, schema: Schema) -> None: + def _populate_new_id_column(self, table_id: str, schema: Schema) -> None: """Copies the uuid values that were present in the column named `Uuid` to the new column named `Id` Args: @@ -2219,15 +2247,15 @@ def _populate_new_id_column(synStore: SynapseStorage, table_id: str, schema: Sch None """ # Query the table for the old `Uuid` column and new `Id` column - results = synStore.syn.tableQuery(f"select Uuid,Id from {table_id}") + results = self.synStore.syn.tableQuery(f"select Uuid,Id from {table_id}") results_df = results.asDataFrame() # Copy uuid values to new column, and store in table results_df = populate_df_col_with_another_col(results_df, 'Uuid', 'Id') - table = synStore.syn.store(Table(schema, results_df, etag=results.etag)) + table = self.synStore.syn.store(Table(schema, results_df, etag=results.etag)) return - def updateTable(synStore: SynapseStorage, tableToLoad: pd.DataFrame = None, existingTableId: str = None, update_col: str = 'Id', restrict: bool = False): + def updateTable(self, update_col: str = 'Id',): """ Method to update an existing table with a new column @@ -2241,13 +2269,13 @@ def updateTable(synStore: SynapseStorage, tableToLoad: pd.DataFrame = None, exis Returns: existingTableId: synID of the already existing table that had its metadata replaced """ - existing_table, existing_results = synStore.get_synapse_table(existingTableId) + existing_table, existing_results = self.synStore.get_synapse_table(self.existingTableId) - tableToLoad = update_df(existing_table, tableToLoad, update_col) + self.tableToLoad = update_df(existing_table, self.tableToLoad, update_col) # store table with existing etag data and impose restrictions as appropriate - synStore.syn.store(Table(existingTableId, tableToLoad, etag = existing_results.etag), isRestricted = restrict) + self.synStore.syn.store(Table(self.existingTableId, self.tableToLoad, etag = existing_results.etag), isRestricted = self.restrict) - return existingTableId + return self.existingTableId class DatasetFileView: From fa6cfdc79681ce92308e78627db15f61ab6a85a3 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Fri, 26 May 2023 11:07:25 -0700 Subject: [PATCH 036/135] update docstrings --- schematic/store/synapse.py | 41 +++++++++++++++----------------------- 1 file changed, 16 insertions(+), 25 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 630d847e3..879caa91b 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -1951,6 +1951,17 @@ def __init__(self, existingTableId: str = None, restrict: bool = False ): + + """ + Class governing table operations (creation, replacement, upserts, updates) in schematic + + tableToLoad: manifest formatted appropriately for the table + tableName: name of the table to be uploaded + datasetId: synID of the dataset for the manifest + existingTableId: synId of the table currently exising on synapse (if there is one) + restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions + + """ self.synStore = synStore self.tableToLoad = tableToLoad self.tableName = tableName @@ -1964,13 +1975,8 @@ def createTable(self, columnTypeDict: dict = None, specifySchema: bool = True,): Method to create a table from a metadata manifest and upload it to synapse Args: - tableToLoad: manifest formatted appropriately for the table - tableName: name of the table to be uploaded - datasetId: synID of the dataset for the manifest columnTypeDict: dictionary schema for table columns: type, size, etc - specifySchema: to specify a specific schema for the table format - restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions - + specifySchema: to specify a specific schema for the table format Returns: table.schema.id: synID of the newly created table @@ -2020,14 +2026,8 @@ def replaceTable(self, specifySchema: bool = True, columnTypeDict: dict = None,) Method to replace an existing table on synapse with metadata from a new manifest Args: - tableToLoad: manifest formatted appropriately for the table - tableName: name of the table to be uploaded - existingTableId: synId of the existing table to be replaced specifySchema: to infer a schema for the table format - datasetId: synID of the dataset for the manifest - columnTypeDict: dictionary schema for table columns: type, size, etc - restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions - + columnTypeDict: dictionary schema for table columns: type, size, etc Returns: existingTableId: synID of the already existing table that had its metadata replaced @@ -2159,13 +2159,8 @@ def upsertTable(self, sg: SchemaGenerator,): Args: - tableToLoad: manifest formatted appropriately for the table - tableName: name of the table to be uploaded - existingTableId: synId of the existing table to be replaced - datasetId: synID of the dataset for the manifest - columnTypeDict: dictionary schema for table columns: type, size, etc + sg: SchemaGenerator instance - Returns: existingTableId: synID of the already existing table that had its metadata replaced """ @@ -2194,7 +2189,7 @@ def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: Used to enable backwards compatability for manifests using the old `Uuid` convention Args: - table_id (str): The Synapse id of the table to be upserted into, that needs columns updated + sg: SchemaGenerator instance Returns: None @@ -2260,11 +2255,7 @@ def updateTable(self, update_col: str = 'Id',): Method to update an existing table with a new column Args: - tableToLoad: manifest formatted appropriately for the table, that contains the new column - existingTableId: synId of the existing table to be replaced - updateCol: column to index the old and new tables on - restrict: bool, whether or not the manifest contains sensitive data that will need additional access restrictions - + updateCol: column to index the old and new tables on Returns: existingTableId: synID of the already existing table that had its metadata replaced From 0c0a4f29453ab1a408e3879b87b5d5fc3c70620f Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 31 May 2023 15:16:30 -0700 Subject: [PATCH 037/135] add method to make synapse api calls --- schematic/store/synapse.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 879caa91b..1e8777708 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -297,6 +297,15 @@ def wrapper(*args, **kwargs): raise ex return wrapper + def _send_api_request(self, request_type: str, uri: str, headers: Dict, body: Union[Dict, str], endpoint: str = None, **kwargs): + response = None + body = json.dumps(body) + + request = getattr(self.syn, request_type) + + response = request(uri, body, endpoint, headers) + + return response def getStorageFileviewTable(self): """ Returns the storageFileviewTable obtained during initialization. From 39c8b77c3c4af79660cf46ea4237e588179fea36 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 31 May 2023 15:18:40 -0700 Subject: [PATCH 038/135] rename column with api calls --- schematic/store/synapse.py | 41 ++++++++++++++++++++++++++++++++++---- 1 file changed, 37 insertions(+), 4 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 1e8777708..0cce5b0f6 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -2224,17 +2224,50 @@ def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: schema = self.synStore.syn.store(schema) # If there is not, then use the old `Uuid` column as a basis for the new `Id` column else: - pass + # Build ColumnModel that will be used for new column + columnModelDict = { + "id": None, + "name": "Id", + "defaultValue": None, + "columnType": "STRING", + "maximumSize": 64, + "maximumListLength": 1, + } # Send POST /column request to define new column and get new column ID - + newColResponse = self.synStore._send_api_request( + request_type = "restPOST", + uri = "https://repo-prod.prod.sagebase.org/repo/v1/column", + body = columnModelDict, + headers = self.synStore.syn.default_headers + ) + # Define columnChange body - + columnChangeDict = { + "concreteType": "org.sagebionetworks.repo.model.table.TableSchemaChangeRequest", + "entityId": self.existingTableId, + "changes": [ + { + "oldColumnId": col['id'], + "newColumnId": newColResponse['id'], + } + ] + } + # Build body for POST request + schemaChangeBody = { + "entityId": self.existingTableId, + "changes": [columnChangeDict], + } # Send POST request to change column name - + schemaChangeResponse = self.synStore._send_api_request( + request_type = "restPOST", + uri = f"https://repo-prod.prod.sagebase.org/repo/v1/entity/{self.existingTableId}/table/transaction/async/start", + body = schemaChangeBody, + headers = self.synStore.syn.default_headers + ) # Exit iteration; only concerned with `Uuid` column break From bef3a8cd49c6e31dfe935cf41913d48f5e1cb5d2 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 1 Jun 2023 09:11:22 -0700 Subject: [PATCH 039/135] update method name --- schematic/store/synapse.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 0cce5b0f6..d167013ca 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -297,7 +297,7 @@ def wrapper(*args, **kwargs): raise ex return wrapper - def _send_api_request(self, request_type: str, uri: str, headers: Dict, body: Union[Dict, str], endpoint: str = None, **kwargs): + def send_api_request(self, request_type: str, uri: str, headers: Dict, body: Union[Dict, str], endpoint: str = None, **kwargs): response = None body = json.dumps(body) @@ -2236,7 +2236,7 @@ def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: } # Send POST /column request to define new column and get new column ID - newColResponse = self.synStore._send_api_request( + newColResponse = self.synStore.send_api_request( request_type = "restPOST", uri = "https://repo-prod.prod.sagebase.org/repo/v1/column", body = columnModelDict, @@ -2260,9 +2260,9 @@ def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: "entityId": self.existingTableId, "changes": [columnChangeDict], } - + # Send POST request to change column name - schemaChangeResponse = self.synStore._send_api_request( + schemaChangeResponse = self.synStore.send_api_request( request_type = "restPOST", uri = f"https://repo-prod.prod.sagebase.org/repo/v1/entity/{self.existingTableId}/table/transaction/async/start", body = schemaChangeBody, From 8d8a227be927c277c0cec559d414180bd297e570 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 1 Jun 2023 10:05:10 -0700 Subject: [PATCH 040/135] only convert to string if Dict is passed in --- schematic/store/synapse.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 29edb21e9..5370e4498 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -298,7 +298,9 @@ def wrapper(*args, **kwargs): def send_api_request(self, request_type: str, uri: str, headers: Dict, body: Union[Dict, str], endpoint: str = None, **kwargs): response = None - body = json.dumps(body) + + if isinstance(body, Dict): + body = json.dumps(body) request = getattr(self.syn, request_type) From aa1d410cfe6929edbc6d1c78c5e6ac30487194b9 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 1 Jun 2023 10:15:38 -0700 Subject: [PATCH 041/135] check passed in `request_type` string --- schematic/store/synapse.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 5370e4498..4d864d7ad 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -297,12 +297,25 @@ def wrapper(*args, **kwargs): return wrapper def send_api_request(self, request_type: str, uri: str, headers: Dict, body: Union[Dict, str], endpoint: str = None, **kwargs): + request_types = { + 'restget': 'restGET', + 'restpost': 'restPOST', + 'restput': 'restPUT', + 'restdelete': 'restDELETE', + + } + response = None if isinstance(body, Dict): body = json.dumps(body) - request = getattr(self.syn, request_type) + if request_type.lower() in request_types.keys(): + request = getattr(self.syn, request_types[request_type]) + else: + raise NotImplementedError( + "The selected request is currenlty not exposed in the synaspePythonClient and cannot be used." + ) response = request(uri, body, endpoint, headers) From 5ba2e625ae14cfade265b94cc713935975c62143 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 1 Jun 2023 10:18:08 -0700 Subject: [PATCH 042/135] update error message --- schematic/store/synapse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 4d864d7ad..0a13aaaf5 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -314,7 +314,7 @@ def send_api_request(self, request_type: str, uri: str, headers: Dict, body: Uni request = getattr(self.syn, request_types[request_type]) else: raise NotImplementedError( - "The selected request is currenlty not exposed in the synaspePythonClient and cannot be used." + f"The selected request: {request_type} is currenlty not exposed in the synaspePythonClient and cannot be used." ) response = request(uri, body, endpoint, headers) From 0d93ed04603a1fded100a31bcc3dd826ae22f233 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 1 Jun 2023 10:30:27 -0700 Subject: [PATCH 043/135] add comments and docstring --- schematic/store/synapse.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 0a13aaaf5..47d0af540 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -297,6 +297,21 @@ def wrapper(*args, **kwargs): return wrapper def send_api_request(self, request_type: str, uri: str, headers: Dict, body: Union[Dict, str], endpoint: str = None, **kwargs): + """ + Method to send API request to synapse via the python client + + Args: + request_type: type of request to send (restGET, restPOST, restPUT, or restDELETE) + uri: uri on which request is performed + headers: dictionary of headers to use rather than the API-key-signed default set of headers + body: body of the request, can be dictionary or JSON formatted string + endpoint: name of the endpoint to use, defaults to none which is evaluated by the client as self.repoEndpoint + **kwargs: other keyword arguments to pass to the request method + """ + + # Make a dictionary for two purposes: + # to be used for chekcing that the value entered is a valid request type + # and to map from any capitalization of the string to the format required by the python client request_types = { 'restget': 'restGET', 'restpost': 'restPOST', @@ -305,11 +320,15 @@ def send_api_request(self, request_type: str, uri: str, headers: Dict, body: Uni } + # intialize response variable to None response = None + # If the user passed in a dictionary, convert to JSON string, + # if they passed in a string, assume it's formatted appropriately if isinstance(body, Dict): body = json.dumps(body) + # Validate that entered request type is valid, and get the appropriate method from the python client if request_type.lower() in request_types.keys(): request = getattr(self.syn, request_types[request_type]) else: @@ -317,6 +336,7 @@ def send_api_request(self, request_type: str, uri: str, headers: Dict, body: Uni f"The selected request: {request_type} is currenlty not exposed in the synaspePythonClient and cannot be used." ) + # Store request and return response = request(uri, body, endpoint, headers) return response From 584c15bc16d672495624d33946e3d50e6a761b6d Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 1 Jun 2023 10:36:09 -0700 Subject: [PATCH 044/135] fix KeyError --- schematic/store/synapse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 47d0af540..8ba1ea626 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -330,7 +330,7 @@ def send_api_request(self, request_type: str, uri: str, headers: Dict, body: Uni # Validate that entered request type is valid, and get the appropriate method from the python client if request_type.lower() in request_types.keys(): - request = getattr(self.syn, request_types[request_type]) + request = getattr(self.syn, request_types[request_type.lower()]) else: raise NotImplementedError( f"The selected request: {request_type} is currenlty not exposed in the synaspePythonClient and cannot be used." From b87376780a31d9464f0f70b5b24ccccb7deb5dd5 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 1 Jun 2023 10:37:53 -0700 Subject: [PATCH 045/135] make header param optional --- schematic/store/synapse.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 8ba1ea626..3360b54fa 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -296,16 +296,17 @@ def wrapper(*args, **kwargs): raise ex return wrapper - def send_api_request(self, request_type: str, uri: str, headers: Dict, body: Union[Dict, str], endpoint: str = None, **kwargs): + def send_api_request(self, request_type: str, uri: str, body: Union[Dict, str], headers: Dict = None, endpoint: str = None, **kwargs): """ Method to send API request to synapse via the python client Args: request_type: type of request to send (restGET, restPOST, restPUT, or restDELETE) uri: uri on which request is performed - headers: dictionary of headers to use rather than the API-key-signed default set of headers + headers: Optional, dictionary of headers to use rather than the API-key-signed default set of headers + If none passed in, use headers from synapse store object body: body of the request, can be dictionary or JSON formatted string - endpoint: name of the endpoint to use, defaults to none which is evaluated by the client as self.repoEndpoint + endpoint: Optional, name of the endpoint to use, defaults to none which is evaluated by the client as self.repoEndpoint **kwargs: other keyword arguments to pass to the request method """ @@ -322,6 +323,10 @@ def send_api_request(self, request_type: str, uri: str, headers: Dict, body: Uni # intialize response variable to None response = None + + # Use existing headers from synapse store object if none passed in + if not headers: + headers = self.syn.default_headers # If the user passed in a dictionary, convert to JSON string, # if they passed in a string, assume it's formatted appropriately @@ -2274,7 +2279,6 @@ def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: request_type = "restPOST", uri = "https://repo-prod.prod.sagebase.org/repo/v1/column", body = columnModelDict, - headers = self.synStore.syn.default_headers ) # Define columnChange body @@ -2300,7 +2304,6 @@ def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: request_type = "restPOST", uri = f"https://repo-prod.prod.sagebase.org/repo/v1/entity/{self.existingTableId}/table/transaction/async/start", body = schemaChangeBody, - headers = self.synStore.syn.default_headers ) # Exit iteration; only concerned with `Uuid` column break From bc2445ff78a15a0a9781e1427efe965c606181cd Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 1 Jun 2023 13:53:42 -0700 Subject: [PATCH 046/135] pass along kwargs to request --- schematic/store/synapse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 3360b54fa..e0af6b566 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -342,7 +342,7 @@ def send_api_request(self, request_type: str, uri: str, body: Union[Dict, str], ) # Store request and return - response = request(uri, body, endpoint, headers) + response = request(uri, body, endpoint, headers, **kwargs) return response From d2f46b8ced69fb212f027f2289684e6b78282ceb Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 1 Jun 2023 14:43:14 -0700 Subject: [PATCH 047/135] remove basename from config fields that shouldnt have them --- config_example.yml | 4 ++-- schematic/configuration/configuration.py | 4 ++-- schematic/configuration/dataclasses.py | 13 +++++++------ tests/data/test_configs/default_config.yml | 4 ++-- tests/data/test_configs/valid_config.yml | 4 ++-- tests/test_configuration.py | 16 ++++++++-------- 6 files changed, 23 insertions(+), 22 deletions(-) diff --git a/config_example.yml b/config_example.yml index 7080ee800..ef699a067 100644 --- a/config_example.yml +++ b/config_example.yml @@ -18,7 +18,7 @@ asset_store: # Location where manifests will saved to manifest_folder: "manifests" -# This describes information about manifests as it relates to generation and validation +# This describes information about manifests as it relates to generation and validation manifest: # Title or title prefix given to generated manifest(s) title: "example" @@ -36,7 +36,7 @@ model: google_sheets: # The Synapse id of the Google service account credentials. service_acct_creds_synapse_id: "syn25171627" - # The basename of the Google service account credentials. + # Path to the synapse config file, either absolute or relative to this file service_acct_creds: "schematic_service_account_creds.json" # The template id of the google sheet. master_template_id: "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index 728de292a..7fdce8b10 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -115,7 +115,7 @@ def synapse_configuration_path(self) -> str: Returns: str: The path to the synapse configuration file """ - return self._normalize_path(self._synapse_config.config_basename) + return self._normalize_path(self._synapse_config.config) @property def synapse_manifest_basename(self) -> str: @@ -190,7 +190,7 @@ def service_account_credentials_path(self) -> str: str: The path of the Google service account credentials. """ return self._normalize_path( - self._google_sheets_config.service_acct_creds_basename + self._google_sheets_config.service_acct_creds ) @property diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index ff8f0f333..8fa64a33b 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -12,13 +12,13 @@ @dataclass(config=pydantic_config) class SynapseConfig: """ - config_basename: the basename of the synapse config file + config_basename: Path to the synapse config file, either absolute or relative to this file manifest_basename: the name of downloaded manifest files master_fileview_id: Synapse ID of the file view listing all project data assets. manifest_folder: name of the folder manifests will be saved to locally """ - config_basename: str = ".synapseConfig" + config: str = ".synapseConfig" manifest_basename: str = "synapse_storage_manifest" master_fileview_id: str = "syn23643253" manifest_folder: str = "manifests" @@ -41,7 +41,7 @@ def validate_synapse_id(cls, value: str) -> str: raise ValueError(f"{value} is not a valid Synapse id") return value - @validator("config_basename", "manifest_basename", "manifest_folder") + @validator("config", "manifest_basename", "manifest_folder") @classmethod def validate_string_is_not_empty(cls, value: str) -> str: """Check if string is not empty(has at least one char) @@ -125,15 +125,16 @@ class GoogleSheetsConfig: True is alerting the user and not allowing entry of bad values. False is warning but allowing the entry on to the sheet. service_acct_creds_synapse_id: The Synapse id of the Google service account credentials. - service_acct_creds_basename: The basename of the Google service account credentials. + service_acct_creds: Path to the Google service account credentials, + either absolute or relative to this file """ service_acct_creds_synapse_id: str = "syn25171627" - service_acct_creds_basename: str = "schematic_service_account_creds.json" + service_acct_creds: str = "schematic_service_account_creds.json" master_template_id: str = "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" strict_validation: bool = True - @validator("service_acct_creds_basename") + @validator("service_acct_creds") @classmethod def validate_string_is_not_empty(cls, value: str) -> str: """Check if string is not empty(has at least one char) diff --git a/tests/data/test_configs/default_config.yml b/tests/data/test_configs/default_config.yml index 9a14a0759..be3ddac97 100644 --- a/tests/data/test_configs/default_config.yml +++ b/tests/data/test_configs/default_config.yml @@ -1,7 +1,7 @@ # This config has the same default values as schematic itself has asset_store: synapse: - config_basename: ".synapseConfig" + config: ".synapseConfig" manifest_basename: 'synapse_storage_manifest' master_fileview_id: 'syn23643253' manifest_folder: 'manifests' @@ -17,6 +17,6 @@ model: google_sheets: service_acct_creds_synapse_id: 'syn25171627' - service_acct_creds_basename: "schematic_service_account_creds.json" + service_acct_creds: "schematic_service_account_creds.json" master_template_id: "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" strict_validation: true diff --git a/tests/data/test_configs/valid_config.yml b/tests/data/test_configs/valid_config.yml index 9f2fd6a96..de40acb39 100644 --- a/tests/data/test_configs/valid_config.yml +++ b/tests/data/test_configs/valid_config.yml @@ -2,7 +2,7 @@ asset_store: synapse: - config_basename: "file_name" + config: "file_name" manifest_basename: "file_name" master_fileview_id: "syn1" manifest_folder: "folder_name" @@ -17,6 +17,6 @@ model: google_sheets: service_acct_creds_synapse_id: "syn1" - service_acct_creds_basename: "creds.json" + service_acct_creds: "creds.json" master_template_id: "" strict_validation: false diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 408cd94ca..bcc33fee1 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -21,7 +21,7 @@ def test_synapse_config(self) -> None: assert isinstance(SynapseConfig(), SynapseConfig) assert isinstance( SynapseConfig( - config_basename="file_name", + config="file_name", manifest_basename="file_name", master_fileview_id="syn1", manifest_folder="folder_name", @@ -31,7 +31,7 @@ def test_synapse_config(self) -> None: with pytest.raises(ValidationError): SynapseConfig( - config_basename=None, + config=None, manifest_basename="file_name", master_fileview_id="syn1", manifest_folder="folder_name", @@ -39,7 +39,7 @@ def test_synapse_config(self) -> None: with pytest.raises(ValidationError): SynapseConfig( - config_basename="file_name", + config="file_name", manifest_basename="file_name", master_fileview_id="syn", manifest_folder="folder_name", @@ -47,7 +47,7 @@ def test_synapse_config(self) -> None: with pytest.raises(ValidationError): SynapseConfig( - config_basename="", + config="", manifest_basename="file_name", master_fileview_id="syn", manifest_folder="folder_name", @@ -80,7 +80,7 @@ def test_google_sheets_config(self) -> None: assert isinstance(GoogleSheetsConfig(), GoogleSheetsConfig) assert isinstance( GoogleSheetsConfig( - service_acct_creds_basename="file_name", + service_acct_creds="file_name", service_acct_creds_synapse_id="syn1", master_template_id="id", strict_validation=True, @@ -89,21 +89,21 @@ def test_google_sheets_config(self) -> None: ) with pytest.raises(ValidationError): GoogleSheetsConfig( - service_acct_creds_basename="file_name", + service_acct_creds="file_name", service_acct_creds_synapse_id="syn1", master_template_id="id", strict_validation="tru", ) with pytest.raises(ValidationError): GoogleSheetsConfig( - service_acct_creds_basename="", + service_acct_creds="", service_acct_creds_synapse_id="syn1", master_template_id="id", strict_validation=True, ) with pytest.raises(ValidationError): GoogleSheetsConfig( - service_acct_creds_basename="file_name", + service_acct_creds="file_name", service_acct_creds_synapse_id="syn", master_template_id="id", strict_validation=True, From 45a68f3fd5ceee6fe7efabc1bbd3a9c3858c3444 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 1 Jun 2023 14:50:13 -0700 Subject: [PATCH 048/135] fix file nor formatted with black --- schematic/configuration/configuration.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index 7fdce8b10..63785b448 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -189,9 +189,7 @@ def service_account_credentials_path(self) -> str: Returns: str: The path of the Google service account credentials. """ - return self._normalize_path( - self._google_sheets_config.service_acct_creds - ) + return self._normalize_path(self._google_sheets_config.service_acct_creds) @property def google_sheets_master_template_id(self) -> str: From 7ceb4e74c4da0efb548f8bfdb1fc7f36b07584b0 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 1 Jun 2023 15:05:00 -0700 Subject: [PATCH 049/135] update docstring --- schematic/store/synapse.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index e0af6b566..0e00e49d4 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -308,6 +308,10 @@ def send_api_request(self, request_type: str, uri: str, body: Union[Dict, str], body: body of the request, can be dictionary or JSON formatted string endpoint: Optional, name of the endpoint to use, defaults to none which is evaluated by the client as self.repoEndpoint **kwargs: other keyword arguments to pass to the request method + + Returns: + response: response from the request sent + """ # Make a dictionary for two purposes: From d762313b8d92ae2354f0dd067a174a79ce1a2fd0 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 1 Jun 2023 15:05:13 -0700 Subject: [PATCH 050/135] add todo list --- schematic/store/synapse.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 0e00e49d4..54a58c4d3 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -312,6 +312,9 @@ def send_api_request(self, request_type: str, uri: str, body: Union[Dict, str], Returns: response: response from the request sent + TODO: + Allow asynchronous calls to operate asynchronously on the schematic side + Further generalize this function for other calls, ie. GET calls without a body """ # Make a dictionary for two purposes: From 8e9e690e4d91fdee6d6f123957a6cac8e183b339 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 1 Jun 2023 15:10:43 -0700 Subject: [PATCH 051/135] remove old method --- schematic/store/synapse.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 54a58c4d3..3c6f99516 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -2317,25 +2317,6 @@ def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: return - def _populate_new_id_column(self, table_id: str, schema: Schema) -> None: - """Copies the uuid values that were present in the column named `Uuid` to the new column named `Id` - - Args: - table_id (str): The Synapse id of the table to be upserted into, that needs columns updated - schema (synapseclient.table.Schema): Schema of the table columns - - Returns: - None - """ - # Query the table for the old `Uuid` column and new `Id` column - results = self.synStore.syn.tableQuery(f"select Uuid,Id from {table_id}") - results_df = results.asDataFrame() - - # Copy uuid values to new column, and store in table - results_df = populate_df_col_with_another_col(results_df, 'Uuid', 'Id') - table = self.synStore.syn.store(Table(schema, results_df, etag=results.etag)) - return - def updateTable(self, update_col: str = 'Id',): """ Method to update an existing table with a new column From 028a52f5b2ff3a877b4175baa78080356ccee29b Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 1 Jun 2023 15:18:43 -0700 Subject: [PATCH 052/135] add unit test --- tests/test_store.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/tests/test_store.py b/tests/test_store.py index 626d1e23d..dfa8cd076 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -218,6 +218,32 @@ def test_getDatasetManifest(self, synapse_store, downloadFile): # return manifest id assert manifest_data == "syn51204513" + def test_synapse_rest_request(self, synapse_store): + """ + Test to ensure that we can send successfully send a request to the Synaspe API + TODO: + Add more test cases as more use cases for request method arise + """ + + # Build a dictionary to send to the API to create a new column + testColumnDict = { + "id": None, + "name": "TestColumn", + "defaultValue": None, + "columnType": "STRING", + "maximumSize": 64, + "maximumListLength": 1, + } + + # Send POST /column request to define new column + newColResponse = synapse_store.send_api_request( + request_type = "restPOST", + uri = "https://repo-prod.prod.sagebase.org/repo/v1/column", + body = testColumnDict, + ) + + assert newColResponse is not None + class TestDatasetFileView: def test_init(self, dataset_id, dataset_fileview, synapse_store): From d3ea6d43cdf020022e96f3eba6700473f6f43dd2 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 1 Jun 2023 16:38:07 -0700 Subject: [PATCH 053/135] remove google master sheet id from config --- config_example.yml | 2 -- schematic/configuration/configuration.py | 11 +---------- schematic/configuration/dataclasses.py | 1 - tests/data/test_configs/default_config.yml | 1 - tests/data/test_configs/valid_config.yml | 1 - tests/test_configuration.py | 8 +++----- 6 files changed, 4 insertions(+), 20 deletions(-) diff --git a/config_example.yml b/config_example.yml index ef699a067..b035fb626 100644 --- a/config_example.yml +++ b/config_example.yml @@ -38,8 +38,6 @@ google_sheets: service_acct_creds_synapse_id: "syn25171627" # Path to the synapse config file, either absolute or relative to this file service_acct_creds: "schematic_service_account_creds.json" - # The template id of the google sheet. - master_template_id: "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" # When doing google sheet validation (regex match) with the validation rules. # True is alerting the user and not allowing entry of bad values. # False is warning but allowing the entry on to the sheet. diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index 63785b448..a2348128d 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -197,16 +197,7 @@ def google_sheets_master_template_id(self) -> str: Returns: str: The template id of the google sheet. """ - return self._google_sheets_config.master_template_id - - @google_sheets_master_template_id.setter - def google_sheets_master_template_id(self, template_id: str) -> None: - """Sets the Google sheets master template ID - - Args: - template_id (str): The template id to set - """ - self._google_sheets_config.master_template_id = template_id + return "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" @property def google_sheets_strict_validation(self) -> bool: diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index 8fa64a33b..3e1a31fe1 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -131,7 +131,6 @@ class GoogleSheetsConfig: service_acct_creds_synapse_id: str = "syn25171627" service_acct_creds: str = "schematic_service_account_creds.json" - master_template_id: str = "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" strict_validation: bool = True @validator("service_acct_creds") diff --git a/tests/data/test_configs/default_config.yml b/tests/data/test_configs/default_config.yml index be3ddac97..e85127b1a 100644 --- a/tests/data/test_configs/default_config.yml +++ b/tests/data/test_configs/default_config.yml @@ -18,5 +18,4 @@ model: google_sheets: service_acct_creds_synapse_id: 'syn25171627' service_acct_creds: "schematic_service_account_creds.json" - master_template_id: "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" strict_validation: true diff --git a/tests/data/test_configs/valid_config.yml b/tests/data/test_configs/valid_config.yml index de40acb39..7cab11352 100644 --- a/tests/data/test_configs/valid_config.yml +++ b/tests/data/test_configs/valid_config.yml @@ -18,5 +18,4 @@ model: google_sheets: service_acct_creds_synapse_id: "syn1" service_acct_creds: "creds.json" - master_template_id: "" strict_validation: false diff --git a/tests/test_configuration.py b/tests/test_configuration.py index bcc33fee1..03f22ad56 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -82,7 +82,6 @@ def test_google_sheets_config(self) -> None: GoogleSheetsConfig( service_acct_creds="file_name", service_acct_creds_synapse_id="syn1", - master_template_id="id", strict_validation=True, ), GoogleSheetsConfig, @@ -91,21 +90,18 @@ def test_google_sheets_config(self) -> None: GoogleSheetsConfig( service_acct_creds="file_name", service_acct_creds_synapse_id="syn1", - master_template_id="id", strict_validation="tru", ) with pytest.raises(ValidationError): GoogleSheetsConfig( service_acct_creds="", service_acct_creds_synapse_id="syn1", - master_template_id="id", strict_validation=True, ) with pytest.raises(ValidationError): GoogleSheetsConfig( service_acct_creds="file_name", service_acct_creds_synapse_id="syn", - master_template_id="id", strict_validation=True, ) @@ -195,7 +191,9 @@ def test_load_config2(self) -> None: assert config.model_location == "model.jsonld" assert config.service_account_credentials_synapse_id assert os.path.basename(config.service_account_credentials_path) == "creds.json" - assert config.google_sheets_master_template_id == "" + assert config.google_sheets_master_template_id == ( + "1LYS5qE4nV9jzcYw5sXwCza25slDfRA1CIg3cs-hCdpU" + ) assert not config.google_sheets_strict_validation def test_load_config3(self) -> None: From e91c9082cd3862fcf8850d305e16d6b13c2de48a Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Fri, 2 Jun 2023 08:34:39 -0700 Subject: [PATCH 054/135] removed uneeded part of test --- tests/test_manifest.py | 29 ++--------------------------- 1 file changed, 2 insertions(+), 27 deletions(-) diff --git a/tests/test_manifest.py b/tests/test_manifest.py index f38f32e4d..1fcb8b6ab 100644 --- a/tests/test_manifest.py +++ b/tests/test_manifest.py @@ -195,19 +195,16 @@ def test_get_manifest_excel(self, helpers, sheet_url, output_format, dataset_id) # test all the functions used under get_manifest @pytest.mark.parametrize("template_id", [["provided", "not provided"]]) - def test_create_empty_manifest_spreadsheet(self, config: Configuration, simple_manifest_generator, template_id): + def test_create_empty_manifest_spreadsheet(self, simple_manifest_generator, template_id): ''' Create an empty manifest spreadsheet regardless if master_template_id is provided Note: _create_empty_manifest_spreadsheet calls _gdrive_copy_file. If there's no template id provided in config, this function will create a new manifest ''' generator = simple_manifest_generator - - mock_spreadsheet = MagicMock() - title="Example" if template_id == "provided": - # mock _gdrive_copy_file function + # mock _gdrive_copy_file function with patch('schematic.manifest.generator.ManifestGenerator._gdrive_copy_file') as MockClass: instance = MockClass.return_value instance.method.return_value = 'mock google sheet id' @@ -215,28 +212,6 @@ def test_create_empty_manifest_spreadsheet(self, config: Configuration, simple_m spreadsheet_id = generator._create_empty_manifest_spreadsheet(title=title) assert spreadsheet_id == "mock google sheet id" - else: - # Temporarily set master template id to None so that we could test that - template_id = config.google_sheets_master_template_id - config.google_sheets_master_template_id = "" - - mock_spreadsheet = Mock() - mock_execute = Mock() - - - # Chain the mocks together - mock_spreadsheet.create.return_value = mock_spreadsheet - mock_spreadsheet.execute.return_value = mock_execute - mock_execute.get.return_value = "mock id" - mock_create = Mock(return_value=mock_spreadsheet) - - with patch.object(generator.sheet_service, "spreadsheets", mock_create): - - spreadsheet_id = generator._create_empty_manifest_spreadsheet(title) - assert spreadsheet_id == "mock id" - - # Reset config template id - config.google_sheets_master_template_id = config.google_sheets_master_template_id @pytest.mark.parametrize("schema_path_provided", [True, False]) def test_get_json_schema(self, simple_manifest_generator, helpers, schema_path_provided): From f5ce48d5dc6844501b81bcab80fc0424ec0f69a8 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Mon, 5 Jun 2023 13:05:27 -0700 Subject: [PATCH 055/135] mialys suggestions --- config_example.yml | 10 +++++----- schematic/__init__.py | 2 +- schematic/configuration/configuration.py | 4 ++-- schematic/configuration/dataclasses.py | 9 ++++----- schematic/manifest/commands.py | 9 ++++----- schematic/store/synapse.py | 2 +- tests/data/test_configs/default_config.yml | 2 +- tests/data/test_configs/valid_config.yml | 2 +- tests/test_api.py | 2 +- tests/test_cli.py | 2 +- tests/test_configuration.py | 10 +++------- tests/test_store.py | 1 - 12 files changed, 24 insertions(+), 31 deletions(-) diff --git a/config_example.yml b/config_example.yml index b035fb626..b08cc4a90 100644 --- a/config_example.yml +++ b/config_example.yml @@ -15,11 +15,11 @@ asset_store: config: ".synapseConfig" # Base name that manifest files will be saved as manifest_basename: "synapse_storage_manifest" - # Location where manifests will saved to - manifest_folder: "manifests" # This describes information about manifests as it relates to generation and validation manifest: + # Location where manifests will saved to + manifest_folder: "manifests" # Title or title prefix given to generated manifest(s) title: "example" # Data types of manifests to be generated or data type (singular) to validate manifest against @@ -32,13 +32,13 @@ model: # Location of your schema jsonld, it must be a path relative to this file or absolute location: "tests/data/example.model.jsonld" -# This section is for validation via google sheets +# This section is for using google sheets with Schematic google_sheets: # The Synapse id of the Google service account credentials. service_acct_creds_synapse_id: "syn25171627" # Path to the synapse config file, either absolute or relative to this file service_acct_creds: "schematic_service_account_creds.json" # When doing google sheet validation (regex match) with the validation rules. - # True is alerting the user and not allowing entry of bad values. - # False is warning but allowing the entry on to the sheet. + # true is alerting the user and not allowing entry of bad values. + # false is warning but allowing the entry on to the sheet. strict_validation: true diff --git a/schematic/__init__.py b/schematic/__init__.py index eb9597f23..de46c4fe4 100644 --- a/schematic/__init__.py +++ b/schematic/__init__.py @@ -41,5 +41,5 @@ def init(config): logger.exception(exc) sys.exit(1) - # download crdentials file based on selected mode of authentication + # download credentials file based on selected mode of authentication download_creds_file() diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index a2348128d..e48cd1483 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -143,12 +143,12 @@ def synapse_master_fileview_id(self, synapse_id: str) -> None: self._synapse_config.master_fileview_id = synapse_id @property - def synapse_manifest_folder(self) -> str: + def manifest_folder(self) -> str: """ Returns: str: Location where manifests will saved to """ - return self._synapse_config.manifest_folder + return self._manifest_config.manifest_folder @property def manifest_title(self) -> str: diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index 3e1a31fe1..5f06d1c1d 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -15,13 +15,11 @@ class SynapseConfig: config_basename: Path to the synapse config file, either absolute or relative to this file manifest_basename: the name of downloaded manifest files master_fileview_id: Synapse ID of the file view listing all project data assets. - manifest_folder: name of the folder manifests will be saved to locally """ config: str = ".synapseConfig" manifest_basename: str = "synapse_storage_manifest" master_fileview_id: str = "syn23643253" - manifest_folder: str = "manifests" @validator("master_fileview_id") @classmethod @@ -41,7 +39,7 @@ def validate_synapse_id(cls, value: str) -> str: raise ValueError(f"{value} is not a valid Synapse id") return value - @validator("config", "manifest_basename", "manifest_folder") + @validator("config", "manifest_basename") @classmethod def validate_string_is_not_empty(cls, value: str) -> str: """Check if string is not empty(has at least one char) @@ -63,15 +61,16 @@ def validate_string_is_not_empty(cls, value: str) -> str: @dataclass(config=pydantic_config) class ManifestConfig: """ + manifest_folder: name of the folder manifests will be saved to locally title: Title or title prefix given to generated manifest(s) data_type: Data types of manifests to be generated or data type (singular) to validate manifest against """ - + manifest_folder: str = "manifests" title: str = "example" data_type: list[str] = field(default_factory=lambda: ["Biospecimen", "Patient"]) - @validator("title") + @validator("title", "manifest_folder") @classmethod def validate_string_is_not_empty(cls, value: str) -> str: """Check if string is not empty(has at least one char) diff --git a/schematic/manifest/commands.py b/schematic/manifest/commands.py index 232973b57..2bdcab158 100644 --- a/schematic/manifest/commands.py +++ b/schematic/manifest/commands.py @@ -1,24 +1,23 @@ import os import logging from pathlib import Path -import click -import click_log -import logging import sys from typing import List +import click +import click_log from schematic.manifest.generator import ManifestGenerator from schematic.utils.cli_utils import log_value_from_config, query_dict, parse_synIDs from schematic.help import manifest_commands from schematic.schemas.generator import SchemaGenerator -from schematic.utils.google_api_utils import export_manifest_csv, export_manifest_excel, export_manifest_drive_service +from schematic.utils.google_api_utils import export_manifest_csv from schematic.store.synapse import SynapseStorage +from schematic.configuration.configuration import CONFIG logger = logging.getLogger('schematic') click_log.basic_config(logger) CONTEXT_SETTINGS = dict(help_option_names=["--help", "-h"]) # help options -from schematic.configuration.configuration import CONFIG # invoke_without_command=True -> forces the application not to show aids before losing them with a --h @click.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 2fb21a9ee..2b586b2aa 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -81,7 +81,7 @@ def _download_manifest_to_folder(self) -> File: os.mkdir("/var/tmp/temp_manifest_download") download_location = create_temp_folder(temporary_manifest_storage) else: - download_location=CONFIG.synapse_manifest_folder + download_location=CONFIG.manifest_folder manifest_data = self.syn.get( self.manifest_id, diff --git a/tests/data/test_configs/default_config.yml b/tests/data/test_configs/default_config.yml index e85127b1a..6775b569a 100644 --- a/tests/data/test_configs/default_config.yml +++ b/tests/data/test_configs/default_config.yml @@ -4,9 +4,9 @@ asset_store: config: ".synapseConfig" manifest_basename: 'synapse_storage_manifest' master_fileview_id: 'syn23643253' - manifest_folder: 'manifests' manifest: + manifest_folder: 'manifests' title: 'example' data_type: - 'Biospecimen' diff --git a/tests/data/test_configs/valid_config.yml b/tests/data/test_configs/valid_config.yml index 7cab11352..3e340721c 100644 --- a/tests/data/test_configs/valid_config.yml +++ b/tests/data/test_configs/valid_config.yml @@ -5,9 +5,9 @@ asset_store: config: "file_name" manifest_basename: "file_name" master_fileview_id: "syn1" - manifest_folder: "folder_name" manifest: + manifest_folder: "folder_name" title: "title" data_type: - "data_type" diff --git a/tests/test_api.py b/tests/test_api.py index 62cc35a63..012450a77 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -599,7 +599,7 @@ def test_manifest_download(self, config: Configuration, client, syn_token, manif assert response_dta[0]["Component"] == expected_component current_work_dir = os.getcwd() - folder_test_manifests = config.synapse_manifest_folder + folder_test_manifests = config.manifest_folder folder_dir = os.path.join(current_work_dir, folder_test_manifests) # if a manfiest gets renamed, get new manifest file path diff --git a/tests/test_cli.py b/tests/test_cli.py index 2f9eb224d..e9885585f 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -88,7 +88,7 @@ def test_get_example_manifest_excel(self, runner, helpers, config: Configuration output_path = helpers.get_data_path("test.xlsx") result = runner.invoke( - manifest, ["--config", "/home/alamb/repos/schematic/tests/data/test_config.yml", "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_xlsx", output_path] + manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_xlsx", output_path] ) assert result.exit_code == 0 diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 03f22ad56..b2fd59a7a 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -24,7 +24,6 @@ def test_synapse_config(self) -> None: config="file_name", manifest_basename="file_name", master_fileview_id="syn1", - manifest_folder="folder_name", ), SynapseConfig, ) @@ -34,7 +33,6 @@ def test_synapse_config(self) -> None: config=None, manifest_basename="file_name", master_fileview_id="syn1", - manifest_folder="folder_name", ) with pytest.raises(ValidationError): @@ -42,7 +40,6 @@ def test_synapse_config(self) -> None: config="file_name", manifest_basename="file_name", master_fileview_id="syn", - manifest_folder="folder_name", ) with pytest.raises(ValidationError): @@ -50,7 +47,6 @@ def test_synapse_config(self) -> None: config="", manifest_basename="file_name", master_fileview_id="syn", - manifest_folder="folder_name", ) def test_manifest_config(self) -> None: @@ -117,7 +113,7 @@ def test_init(self) -> None: assert os.path.basename(config.synapse_configuration_path) == ".synapseConfig" assert config.synapse_manifest_basename == "synapse_storage_manifest" assert config.synapse_master_fileview_id == "syn23643253" - assert config.synapse_manifest_folder == "manifests" + assert config.manifest_folder == "manifests" assert config.manifest_title == "example" assert config.manifest_data_type == ["Biospecimen", "Patient"] assert config.model_location == "tests/data/example.model.jsonld" @@ -155,7 +151,7 @@ def test_load_config1(self) -> None: assert os.path.basename(config.synapse_configuration_path) == ".synapseConfig" assert config.synapse_manifest_basename == "synapse_storage_manifest" assert config.synapse_master_fileview_id == "syn23643253" - assert config.synapse_manifest_folder == "manifests" + assert config.manifest_folder == "manifests" assert config.manifest_title == "example" assert config.manifest_data_type == ["Biospecimen", "Patient"] assert config.model_location == "tests/data/example.model.jsonld" @@ -185,7 +181,7 @@ def test_load_config2(self) -> None: assert os.path.basename(config.synapse_configuration_path) == "file_name" assert config.synapse_manifest_basename == "file_name" assert config.synapse_master_fileview_id == "syn1" - assert config.synapse_manifest_folder == "folder_name" + assert config.manifest_folder == "folder_name" assert config.manifest_title == "title" assert config.manifest_data_type == ["data_type"] assert config.model_location == "model.jsonld" diff --git a/tests/test_store.py b/tests/test_store.py index 1e9102df6..0040822a6 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -14,7 +14,6 @@ from schematic.store.synapse import SynapseStorage, DatasetFileView, ManifestDownload from schematic.schemas.generator import SchemaGenerator from synapseclient.core.exceptions import SynapseHTTPError - from schematic.configuration.configuration import Configuration logging.basicConfig(level=logging.DEBUG) From b859e564e9b29fb8f4b46b63f7b9a4d1000824b4 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 8 Jun 2023 08:08:02 -0700 Subject: [PATCH 056/135] fix test so it works with configuration.py --- tests/test_manifest.py | 35 +++++++---------------------------- 1 file changed, 7 insertions(+), 28 deletions(-) diff --git a/tests/test_manifest.py b/tests/test_manifest.py index 204f922c0..a145f6426 100644 --- a/tests/test_manifest.py +++ b/tests/test_manifest.py @@ -194,40 +194,19 @@ def test_get_manifest_excel(self, helpers, sheet_url, output_format, dataset_id) os.remove(manifest) # test all the functions used under get_manifest - @pytest.mark.parametrize("master_template_id", [None, "mock_master_template_id"]) - def test_create_empty_manifest_spreadsheet(self, config, simple_manifest_generator, master_template_id): + def test_create_empty_manifest_spreadsheet(self, simple_manifest_generator): ''' - Create an empty manifest spreadsheet regardless if master_template_id is provided - Note: _create_empty_manifest_spreadsheet calls _gdrive_copy_file. If there's no template id provided in config, this function will create a new manifest + Create an empty manifest spreadsheet. + Note: _create_empty_manifest_spreadsheet calls _gdrive_copy_file. ''' generator = simple_manifest_generator title="Example" - if master_template_id: - # mock _gdrive_copy_file function - config["style"]["google_manifest"]["master_template_id"] = master_template_id - with patch('schematic.manifest.generator.ManifestGenerator._gdrive_copy_file', return_value="mock google sheet id") as MockClass: + # mock _gdrive_copy_file function + with patch('schematic.manifest.generator.ManifestGenerator._gdrive_copy_file', return_value="mock google sheet id"): + spreadsheet_id = generator._create_empty_manifest_spreadsheet(title=title) + assert spreadsheet_id == "mock google sheet id" - spreadsheet_id = generator._create_empty_manifest_spreadsheet(title=title) - assert spreadsheet_id == "mock google sheet id" - - else: - config["style"]["google_manifest"]["master_template_id"] = "" - - mock_spreadsheet = Mock() - mock_execute = Mock() - - - # Chain the mocks together - mock_spreadsheet.create.return_value = mock_spreadsheet - mock_spreadsheet.execute.return_value = mock_execute - mock_execute.get.return_value = "mock id" - mock_create = Mock(return_value=mock_spreadsheet) - - with patch.object(generator.sheet_service, "spreadsheets", mock_create): - - spreadsheet_id = generator._create_empty_manifest_spreadsheet(title) - assert spreadsheet_id == "mock id" @pytest.mark.parametrize("schema_path_provided", [True, False]) def test_get_json_schema(self, simple_manifest_generator, helpers, schema_path_provided): From ba7620c23b77b9a754fb40d6947b79f8a7f21790 Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 8 Jun 2023 11:48:29 -0400 Subject: [PATCH 057/135] profile function --- schematic/store/synapse.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 3c6f99516..e49ba1cc3 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -54,6 +54,8 @@ from schematic import CONFIG +from schematic.utils.general import Profile + logger = logging.getLogger("Synapse storage") @dataclass @@ -2231,6 +2233,7 @@ def upsertTable(self, sg: SchemaGenerator,): except(SynapseHTTPError) as ex: # If error is raised because Table has old `Uuid` column and not new `Id` column, then handle and re-attempt upload if 'Id is not a valid column name or id' in str(ex): + print('triggering this function call that I want to test') self._update_table_uuid_column(sg) synapseDB.upsert_table_rows(table_name=self.tableName, data=self.tableToLoad) # Raise if other error @@ -2238,7 +2241,8 @@ def upsertTable(self, sg: SchemaGenerator,): raise ex return self.existingTableId - + + @Profile def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: """Removes the `Uuid` column when present, and relpaces with an `Id` column Used to enable backwards compatability for manifests using the old `Uuid` convention From 80bf85301878920dff8d6fc4ff02b19557b9ec77 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 8 Jun 2023 08:49:40 -0700 Subject: [PATCH 058/135] fix readme --- README.md | 64 +++++++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 58 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index b288729e6..3eb9e1d3d 100644 --- a/README.md +++ b/README.md @@ -88,17 +88,69 @@ editor of your choice and edit the `username` and `authtoken` attribute under th Configure config.yml File -There are some defaults in schematic that can be configured. These fields are in ``config_example.yml``. If you want to change any of these copy ``config_example.yml`` to ``config.yml``, change any fields you want to, and remove any fields you don't. +There are some defaults in schematic that can be configured. These fields are in ``config_example.yml``: + +```text + +# This is an example config for Schematic. +# All listed values are those that are the default if a config is not used. +# Save this as config.yml, this will be gitignored. +# Remove any fields in the config you don't want to change +# Change the values of any fields you do want to change + + +# This describes where assets such as manifests are stored +asset_store: + # This is when assets are stored in a synapse project + synapse: + # Synapse ID of the file view listing all project data assets. + master_fileview_id: "syn23643253" + # Path to the synapse config file, either absolute or relative to this file + config: ".synapseConfig" + # Base name that manifest files will be saved as + manifest_basename: "synapse_storage_manifest" + +# This describes information about manifests as it relates to generation and validation +manifest: + # Location where manifests will saved to + manifest_folder: "manifests" + # Title or title prefix given to generated manifest(s) + title: "example" + # Data types of manifests to be generated or data type (singular) to validate manifest against + data_type: + - "Biospecimen" + - "Patient" + +# Describes the location of your schema +model: + # Location of your schema jsonld, it must be a path relative to this file or absolute + location: "tests/data/example.model.jsonld" + +# This section is for using google sheets with Schematic +google_sheets: + # The Synapse id of the Google service account credentials. + service_acct_creds_synapse_id: "syn25171627" + # Path to the synapse config file, either absolute or relative to this file + service_acct_creds: "schematic_service_account_creds.json" + # When doing google sheet validation (regex match) with the validation rules. + # true is alerting the user and not allowing entry of bad values. + # false is warning but allowing the entry on to the sheet. + strict_validation: true +``` + +If you want to change any of these copy ``config_example.yml`` to ``config.yml``, change any fields you want to, and remove any fields you don't. For example if you wanted to change the folder where manifests are downloaded your config should look like: - asset_store: - synapse: - manifest_folder: "manifest_folder" +```text + +manifest: + manifest_folder: "my_manifest_folder_path" +``` -*Note*: `config.yml` is ignored by git. +_Note_: `config.yml` is ignored by git. -*Note*: Paths can be specified relative to the `config.yml` file or as absolute paths. +_Note_: Paths can be specified relative to the `config.yml` file or as absolute paths. 6. Login to Synapse by using the command line On the CLI in your virtual environment, run the following command: From 90d59a305ad54da8a294e32f639341492db5a68a Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Thu, 8 Jun 2023 08:53:40 -0700 Subject: [PATCH 059/135] ran balck --- schematic/configuration/dataclasses.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/schematic/configuration/dataclasses.py b/schematic/configuration/dataclasses.py index 5f06d1c1d..7fbc7df57 100644 --- a/schematic/configuration/dataclasses.py +++ b/schematic/configuration/dataclasses.py @@ -66,11 +66,12 @@ class ManifestConfig: data_type: Data types of manifests to be generated or data type (singular) to validate manifest against """ + manifest_folder: str = "manifests" title: str = "example" data_type: list[str] = field(default_factory=lambda: ["Biospecimen", "Patient"]) - @validator("title", "manifest_folder") + @validator("title", "manifest_folder") @classmethod def validate_string_is_not_empty(cls, value: str) -> str: """Check if string is not empty(has at least one char) From b6488a3410e368d1fdffc6aada5a46ba56355551 Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 8 Jun 2023 12:57:19 -0400 Subject: [PATCH 060/135] save triggering profile --- schematic/store/synapse.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index e49ba1cc3..820e8d6f2 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -54,7 +54,7 @@ from schematic import CONFIG -from schematic.utils.general import Profile +from schematic.utils.general import profile logger = logging.getLogger("Synapse storage") @@ -2242,7 +2242,7 @@ def upsertTable(self, sg: SchemaGenerator,): return self.existingTableId - @Profile + @profile(sort_by='cumulative', strip_dirs=True) def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: """Removes the `Uuid` column when present, and relpaces with an `Id` column Used to enable backwards compatability for manifests using the old `Uuid` convention From d3b29a4d3f9300e68c4e8bc909553a672271bc88 Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 8 Jun 2023 13:23:48 -0400 Subject: [PATCH 061/135] replace synapse api call with python client --- schematic/store/synapse.py | 23 ++++------------------- 1 file changed, 4 insertions(+), 19 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 820e8d6f2..0d379020a 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -2285,12 +2285,9 @@ def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: "maximumListLength": 1, } - # Send POST /column request to define new column and get new column ID - newColResponse = self.synStore.send_api_request( - request_type = "restPOST", - uri = "https://repo-prod.prod.sagebase.org/repo/v1/column", - body = columnModelDict, - ) + new_col = Column(name='Id', columnType='STRING', maximumSize=64, maximumListLength=1) + newColResponse = self.synStore.syn.store(new_col) + # Define columnChange body columnChangeDict = { @@ -2304,19 +2301,7 @@ def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: ] } - # Build body for POST request - schemaChangeBody = { - "entityId": self.existingTableId, - "changes": [columnChangeDict], - } - - # Send POST request to change column name - schemaChangeResponse = self.synStore.send_api_request( - request_type = "restPOST", - uri = f"https://repo-prod.prod.sagebase.org/repo/v1/entity/{self.existingTableId}/table/transaction/async/start", - body = schemaChangeBody, - ) - # Exit iteration; only concerned with `Uuid` column + self.synStore.syn._async_table_update(table=self.existingTableId, changes=[columnChangeDict], wait=False) break return From 36d983e685e59e11b554dc680e76e508b858acfd Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 8 Jun 2023 13:50:33 -0400 Subject: [PATCH 062/135] remove print --- schematic/store/synapse.py | 1 - 1 file changed, 1 deletion(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 0d379020a..b0bd5dd67 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -2233,7 +2233,6 @@ def upsertTable(self, sg: SchemaGenerator,): except(SynapseHTTPError) as ex: # If error is raised because Table has old `Uuid` column and not new `Id` column, then handle and re-attempt upload if 'Id is not a valid column name or id' in str(ex): - print('triggering this function call that I want to test') self._update_table_uuid_column(sg) synapseDB.upsert_table_rows(table_name=self.tableName, data=self.tableToLoad) # Raise if other error From a015e849617b1d69d380bc05b9d8d068fafe4e77 Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 8 Jun 2023 14:59:24 -0400 Subject: [PATCH 063/135] rename column --- schematic/store/synapse.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index b0bd5dd67..4e5e5d577 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -2275,17 +2275,8 @@ def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: else: # Build ColumnModel that will be used for new column - columnModelDict = { - "id": None, - "name": "Id", - "defaultValue": None, - "columnType": "STRING", - "maximumSize": 64, - "maximumListLength": 1, - } - - new_col = Column(name='Id', columnType='STRING', maximumSize=64, maximumListLength=1) - newColResponse = self.synStore.syn.store(new_col) + id_column = Column(name='Id', columnType='STRING', maximumSize=64, defaultValue=None, maximumListLength=1) + new_col_response = self.synStore.syn.store(id_column) # Define columnChange body @@ -2295,7 +2286,7 @@ def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: "changes": [ { "oldColumnId": col['id'], - "newColumnId": newColResponse['id'], + "newColumnId": new_col_response['id'], } ] } From 59a55ddc5a9e771ec9d417a3e482748347eea7d7 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 14 Jun 2023 08:43:02 -0700 Subject: [PATCH 064/135] moved normalize path from configuration to utils --- schematic/configuration/configuration.py | 21 ++++++------------ schematic/utils/__init__.py | 27 ------------------------ schematic/utils/general.py | 16 ++++++++++++++ 3 files changed, 22 insertions(+), 42 deletions(-) diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index e48cd1483..ac0a32166 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -3,6 +3,7 @@ from typing import Optional, Any import os import yaml +from schematic.utils.general import normalize_path from .dataclasses import ( SynapseConfig, ManifestConfig, @@ -11,6 +12,7 @@ ) + class ConfigNonAllowedFieldError(Exception): """Raised when a user submitted config file contains non allowed fields""" @@ -96,26 +98,13 @@ def _set_asset_store(self, config: dict[str, Any]) -> None: ) self._synapse_config = SynapseConfig(**config["synapse"]) - def _normalize_path(self, path: str) -> str: - """ - - Args: - path (str): The path to normalize - - Returns: - str: The normalized path - """ - if not os.path.isabs(path): - path = os.path.join(self._parent_directory, path) - return os.path.normpath(path) - @property def synapse_configuration_path(self) -> str: """ Returns: str: The path to the synapse configuration file """ - return self._normalize_path(self._synapse_config.config) + return normalize_path(self._synapse_config.config, self._parent_directory) @property def synapse_manifest_basename(self) -> str: @@ -189,7 +178,9 @@ def service_account_credentials_path(self) -> str: Returns: str: The path of the Google service account credentials. """ - return self._normalize_path(self._google_sheets_config.service_acct_creds) + return normalize_path( + self._google_sheets_config.service_acct_creds, self._parent_directory + ) @property def google_sheets_master_template_id(self) -> str: diff --git a/schematic/utils/__init__.py b/schematic/utils/__init__.py index 73e77e0d8..e69de29bb 100644 --- a/schematic/utils/__init__.py +++ b/schematic/utils/__init__.py @@ -1,27 +0,0 @@ -from schematic.utils.curie_utils import ( - expand_curie_to_uri, - expand_curies_in_schema, - extract_name_from_uri_or_curie, - uri2label, -) -from schematic.utils.df_utils import update_df -from schematic.utils.general import dict2list, find_duplicates, str2list, unlist -from schematic.utils.google_api_utils import ( - download_creds_file, - execute_google_api_requests, - export_manifest_csv, - export_manifest_excel, -) -from schematic.utils.io_utils import ( - export_json, - load_default, - load_json, - load_schemaorg, -) -from schematic.utils.schema_utils import load_schema_into_networkx -from schematic.utils.validate_utils import ( - validate_class_schema, - validate_property_schema, - validate_schema, -) -from schematic.utils.viz_utils import visualize diff --git a/schematic/utils/general.py b/schematic/utils/general.py index 6c5d27b73..59edf4243 100644 --- a/schematic/utils/general.py +++ b/schematic/utils/general.py @@ -183,3 +183,19 @@ def wrapper(*args, **kwargs): return wrapper return inner + +def normalize_path(path: str, parent_folder: str) -> str: + """ + Normalizes a path. + If the path is relative, the parent_folder is added to make it an absolute path. + + Args: + path (str): The path to the file to normalize. + parent_folder (str): The folder the file is in. + + Returns: + str: The normalized path. + """ + if not os.path.isabs(path): + path = os.path.join(parent_folder, path) + return os.path.normpath(path) From 605ee109969e038ab6739fcb18dd90db0605c198 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 14 Jun 2023 08:47:00 -0700 Subject: [PATCH 065/135] add comment in example config to clear up removing headers --- config_example.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/config_example.yml b/config_example.yml index b08cc4a90..245b8fefe 100644 --- a/config_example.yml +++ b/config_example.yml @@ -2,6 +2,7 @@ # All listed values are those that are the default if a config is not used. # Save this as config.yml, this will be gitignored. # Remove any fields in the config you don't want to change +# If you remove all fields from a section, the entire section should be removed including the header. # Change the values of any fields you do want to change From ca4b215bcea17d3b8d02faa800bb889a88f2f158 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 14 Jun 2023 08:48:54 -0700 Subject: [PATCH 066/135] ran black --- schematic/configuration/configuration.py | 1 - 1 file changed, 1 deletion(-) diff --git a/schematic/configuration/configuration.py b/schematic/configuration/configuration.py index ac0a32166..c31007672 100644 --- a/schematic/configuration/configuration.py +++ b/schematic/configuration/configuration.py @@ -12,7 +12,6 @@ ) - class ConfigNonAllowedFieldError(Exception): """Raised when a user submitted config file contains non allowed fields""" From 4f79b0ab3133dc2c4c08f0172fb68f2d337a9536 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 15 Jun 2023 15:49:20 -0700 Subject: [PATCH 067/135] fix load_df so it more accurately captures intergers --- schematic/utils/df_utils.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/schematic/utils/df_utils.py b/schematic/utils/df_utils.py index 93325ced9..789fb4881 100644 --- a/schematic/utils/df_utils.py +++ b/schematic/utils/df_utils.py @@ -60,6 +60,9 @@ def load_df(file_path, preserve_raw_input=True, data_model=False, **load_args): pandarallel.initialize(verbose = 1) ints = org_df.parallel_applymap(lambda x: np.int64(x) if str.isdigit(x) else False, na_action='ignore').fillna(False) + # Identify cells converted to intergers + ints_tf_df = ints.applymap(pd.api.types.is_integer) + # convert strings to numerical dtype (float) if possible, preserve non-numerical strings for col in org_df.columns: float_df[col]=pd.to_numeric(float_df[col], errors='coerce') @@ -68,9 +71,9 @@ def load_df(file_path, preserve_raw_input=True, data_model=False, **load_args): # Trim nans and empty rows and columns processed_df = trim_commas_df(float_df) - + # Store values that were converted to type int in the final dataframe - processed_df=processed_df.mask(ints != False, other = ints) + processed_df=processed_df.mask(ints_tf_df, other = ints) # log manifest load and processing time logger.debug(f"Load Elapsed time {perf_counter()-t_load_df}") From be57506fdb3387a84b52131505dae1c8cba587b0 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 16 Jun 2023 10:23:48 -0700 Subject: [PATCH 068/135] update model, test manifest, and test_validation.py to check that 0s are converted properly --- tests/data/example.model.csv | 87 ++++++++++--------- tests/data/example.model.jsonld | 22 +++++ .../mock_manifests/Invalid_Test_Manifest.csv | 8 +- .../mock_manifests/Valid_Test_Manifest.csv | 10 +-- tests/test_validation.py | 12 ++- 5 files changed, 86 insertions(+), 53 deletions(-) diff --git a/tests/data/example.model.csv b/tests/data/example.model.csv index 1f6c9589c..f15db469c 100644 --- a/tests/data/example.model.csv +++ b/tests/data/example.model.csv @@ -1,43 +1,44 @@ -Attribute,Description,Valid Values,DependsOn,Properties,Required,Parent,DependsOn Component,Source,Validation Rules -Patient,,,"Patient ID, Sex, Year of Birth, Diagnosis, Component",,FALSE,DataType,,, -Patient ID,,,,,TRUE,DataProperty,,, -Sex,,"Female, Male, Other",,,TRUE,DataProperty,,, -Year of Birth,,,,,FALSE,DataProperty,,, -Diagnosis,,"Healthy, Cancer",,,TRUE,DataProperty,,, -Cancer,,,"Cancer Type, Family History",,FALSE,ValidValue,,, -Cancer Type,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,,, -Family History,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,,,list strict -Biospecimen,,,"Sample ID, Patient ID, Tissue Status, Component",,FALSE,DataType,Patient,, -Sample ID,,,,,TRUE,DataProperty,,, -Tissue Status,,"Healthy, Malignant",,,TRUE,DataProperty,,, -Bulk RNA-seq Assay,,,"Filename, Sample ID, File Format, Component",,FALSE,DataType,Biospecimen,, -Filename,,,,,TRUE,DataProperty,,, -File Format,,"FASTQ, BAM, CRAM, CSV/TSV",,,TRUE,DataProperty,,, -BAM,,,Genome Build,,FALSE,ValidValue,,, -CRAM,,,"Genome Build, Genome FASTA",,FALSE,ValidValue,,, -CSV/TSV,,,Genome Build,,FALSE,ValidValue,,, -Genome Build,,"GRCh37, GRCh38, GRCm38, GRCm39",,,TRUE,DataProperty,,, -Genome FASTA,,,,,TRUE,DataProperty,,, -MockComponent,,,"Component, Check List, Check Regex List, Check Regex Single, Check Regex Format, Check Num, Check Float, Check Int, Check String, Check URL,Check Match at Least, Check Match at Least values, Check Match Exactly, Check Match Exactly values, Check Recommended, Check Ages, Check Unique, Check Range, Check Date, Check NA",,FALSE,DataType,,, -Check List,,"ab, cd, ef, gh",,,TRUE,DataProperty,,,list strict -Check Regex List,,,,,TRUE,DataProperty,,,list strict::regex match [a-f] -Check Regex Single,,,,,TRUE,DataProperty,,,regex search [a-f] -Check Regex Format,,,,,TRUE,DataProperty,,,regex match [a-f] -Check Num,,,,,TRUE,DataProperty,,,num -Check Float,,,,,TRUE,DataProperty,,,float -Check Int,,,,,TRUE,DataProperty,,,int -Check String,,,,,TRUE,DataProperty,,,str -Check URL,,,,,TRUE,DataProperty,,,url -Check Match at Least,,,,,TRUE,DataProperty,,,matchAtLeastOne Patient.PatientID set -Check Match Exactly,,,,,TRUE,DataProperty,,,matchExactlyOne MockComponent.checkMatchExactly set -Check Match at Least values,,,,,TRUE,DataProperty,,,matchAtLeastOne MockComponent.checkMatchatLeastvalues value -Check Match Exactly values,,,,,TRUE,DataProperty,,,matchExactlyOne MockComponent.checkMatchExactlyvalues value -Check Recommended,,,,,FALSE,DataProperty,,,recommended -Check Ages,,,,,TRUE,DataProperty,,,protectAges -Check Unique,,,,,TRUE,DataProperty,,,unique error -Check Range,,,,,TRUE,DataProperty,,,inRange 50 100 error -Check Date,,,,,TRUE,DataProperty,,,date -Check NA,,,,,TRUE,DataProperty,,,int::IsNA -MockRDB,,,"Component, MockRDB_id, SourceManifest",,FALSE,DataType,,, -MockRDB_id,,,,,TRUE,DataProperty,,,int -SourceManifest,,,,,TRUE,DataProperty,,, +Attribute,Description,Valid Values,DependsOn,Properties,Required,Parent,DependsOn Component,Source,Validation Rules +Patient,,,"Patient ID, Sex, Year of Birth, Diagnosis, Component",,FALSE,DataType,,, +Patient ID,,,,,TRUE,DataProperty,,, +Sex,,"Female, Male, Other",,,TRUE,DataProperty,,, +Year of Birth,,,,,FALSE,DataProperty,,, +Diagnosis,,"Healthy, Cancer",,,TRUE,DataProperty,,, +Cancer,,,"Cancer Type, Family History",,FALSE,ValidValue,,, +Cancer Type,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,,, +Family History,,"Breast, Colorectal, Lung, Prostate, Skin",,,TRUE,DataProperty,,,list strict +Biospecimen,,,"Sample ID, Patient ID, Tissue Status, Component",,FALSE,DataType,Patient,, +Sample ID,,,,,TRUE,DataProperty,,, +Tissue Status,,"Healthy, Malignant",,,TRUE,DataProperty,,, +Bulk RNA-seq Assay,,,"Filename, Sample ID, File Format, Component",,FALSE,DataType,Biospecimen,, +Filename,,,,,TRUE,DataProperty,,, +File Format,,"FASTQ, BAM, CRAM, CSV/TSV",,,TRUE,DataProperty,,, +BAM,,,Genome Build,,FALSE,ValidValue,,, +CRAM,,,"Genome Build, Genome FASTA",,FALSE,ValidValue,,, +CSV/TSV,,,Genome Build,,FALSE,ValidValue,,, +Genome Build,,"GRCh37, GRCh38, GRCm38, GRCm39",,,TRUE,DataProperty,,, +Genome FASTA,,,,,TRUE,DataProperty,,, +MockComponent,,,"Component, Check List, Check Regex List, Check Regex Single, Check Regex Format, Check Regex Integer, Check Num, Check Float, Check Int, Check String, Check URL,Check Match at Least, Check Match at Least values, Check Match Exactly, Check Match Exactly values, Check Recommended, Check Ages, Check Unique, Check Range, Check Date, Check NA",,FALSE,DataType,,, +Check List,,"ab, cd, ef, gh",,,TRUE,DataProperty,,,list strict +Check Regex List,,,,,TRUE,DataProperty,,,list strict::regex match [a-f] +Check Regex Single,,,,,TRUE,DataProperty,,,regex search [a-f] +Check Regex Format,,,,,TRUE,DataProperty,,,regex match [a-f] +Check Regex Integer,,,,,TRUE,DataProperty,,,regex search ^\d+$ +Check Num,,,,,TRUE,DataProperty,,,num +Check Float,,,,,TRUE,DataProperty,,,float +Check Int,,,,,TRUE,DataProperty,,,int +Check String,,,,,TRUE,DataProperty,,,str +Check URL,,,,,TRUE,DataProperty,,,url +Check Match at Least,,,,,TRUE,DataProperty,,,matchAtLeastOne Patient.PatientID set +Check Match Exactly,,,,,TRUE,DataProperty,,,matchExactlyOne MockComponent.checkMatchExactly set +Check Match at Least values,,,,,TRUE,DataProperty,,,matchAtLeastOne MockComponent.checkMatchatLeastvalues value +Check Match Exactly values,,,,,TRUE,DataProperty,,,matchExactlyOne MockComponent.checkMatchExactlyvalues value +Check Recommended,,,,,FALSE,DataProperty,,,recommended +Check Ages,,,,,TRUE,DataProperty,,,protectAges +Check Unique,,,,,TRUE,DataProperty,,,unique error +Check Range,,,,,TRUE,DataProperty,,,inRange 50 100 error +Check Date,,,,,TRUE,DataProperty,,,date +Check NA,,,,,TRUE,DataProperty,,,int::IsNA +MockRDB,,,"Component, MockRDB_id, SourceManifest",,FALSE,DataType,,, +MockRDB_id,,,,,TRUE,DataProperty,,,int +SourceManifest,,,,,TRUE,DataProperty,,, \ No newline at end of file diff --git a/tests/data/example.model.jsonld b/tests/data/example.model.jsonld index 2d65b72bf..6f29cbf7b 100644 --- a/tests/data/example.model.jsonld +++ b/tests/data/example.model.jsonld @@ -2498,6 +2498,9 @@ { "@id": "bts:CheckRegexFormat" }, + { + "@id": "bts:CheckRegexInteger" + }, { "@id": "bts:CheckNum" }, @@ -2637,6 +2640,25 @@ "regex match [a-f]" ] }, + { + "@id": "bts:CheckRegexInteger", + "@type": "rdfs:Class", + "rdfs:comment": "TBD", + "rdfs:label": "CheckRegexInteger", + "rdfs:subClassOf": [ + { + "@id": "bts:DataProperty" + } + ], + "schema:isPartOf": { + "@id": "http://schema.biothings.io" + }, + "sms:displayName": "Check Regex Integer", + "sms:required": "sms:true", + "sms:validationRules": [ + "regex search ^\\d+$" + ] + }, { "@id": "bts:CheckNum", "@type": "rdfs:Class", diff --git a/tests/data/mock_manifests/Invalid_Test_Manifest.csv b/tests/data/mock_manifests/Invalid_Test_Manifest.csv index fcd84fa7f..ea0e7685c 100644 --- a/tests/data/mock_manifests/Invalid_Test_Manifest.csv +++ b/tests/data/mock_manifests/Invalid_Test_Manifest.csv @@ -1,4 +1,4 @@ -Component,Check List,Check Regex List,Check Regex Format,Check Regex Single,Check Num,Check Float,Check Int,Check String,Check URL,Check Match at Least,Check Match at Least values,Check Match Exactly,Check Match Exactly values,Check Recommended,Check Ages,Check Unique,Check Range,Check Date,Check NA -MockComponent,"ab,cd","ab,cd,ef",a,a,6,99.65,7,valid,https://www.google.com/,1738,1738,8085,98085,,6549,str1,70,32-984,7 -MockComponent,invalid list values,ab cd ef,m,q,c,99,5.63,94,http://googlef.com/,7163,51100,9965,71738,,32851,str1,30,notADate,9.5 -MockComponent,"ab,cd","ab,cd,ef",b,b,6.5,62.3,2,valid,https://github.com/Sage-Bionetworks/schematic,8085,8085,1738,210065,,6550,str1,90,84-43-094,Not Applicable +Component,Check List,Check Regex List,Check Regex Single,Check Regex Format,Check Regex Integer,Check Num,Check Float,Check Int,Check String,Check URL,Check Match at Least,Check Match at Least values,Check Match Exactly,Check Match Exactly values,Check Recommended,Check Ages,Check Unique,Check Range,Check Date,Check NA +MockComponent,"ab,cd","ab,cd,ef",a,a,5.4,6,99.65,7,valid,https://www.google.com/,1738,1738,8085,98085,,6549,str1,70,32-984,7 +MockComponent,invalid list values,ab cd ef,q,m,0,c,99,5.63,94,http://googlef.com/,7163,51100,9965,71738,,32851,str1,30,notADate,9.5 +MockComponent,"ab,cd","ab,cd,ef",b,b,683902,6.5,62.3,2,valid,https://github.com/Sage-Bionetworks/schematic,8085,8085,1738,210065,,6550,str1,90,84-43-094,Not Applicable \ No newline at end of file diff --git a/tests/data/mock_manifests/Valid_Test_Manifest.csv b/tests/data/mock_manifests/Valid_Test_Manifest.csv index a3d061026..c4b6fb01f 100644 --- a/tests/data/mock_manifests/Valid_Test_Manifest.csv +++ b/tests/data/mock_manifests/Valid_Test_Manifest.csv @@ -1,5 +1,5 @@ -Component,Check List,Check Regex List,Check Regex Single,Check Regex Format,Check Num,Check Float,Check Int,Check String,Check URL,Check Match at Least,Check Match at Least values,Check Match Exactly,Check Match Exactly values,Check Recommended,Check Ages,Check Unique,Check Range,Check Date,Check NA -MockComponent,"ab,cd","a,c,f",a,a,6,99.65,7,valid,https://www.google.com/,1738,1738,8085,8085,,6571,str1,75,10/21/2022,Not Applicable -MockComponent,"ab,cd","a,c,f",e,b,71,58.4,3,valid,https://www.google.com/,9965,9965,9965,9965,,6571,str2,80,October 21 2022,8 -MockComponent,"ab,cd","b,d,f",b,c,6.5,62.3,2,valid,https://www.google.com/,8085,8085,1738,1738,present,32849,str3,95,10/21/2022,Not Applicable -MockComponent,"ab,cd","b,d,f",b,c,6.5,62.3,2,valid,https://www.google.com/,79,79,7,7,,32849,str4,55,21/10/2022,695 +Component,Check List,Check Regex List,Check Regex Single,Check Regex Format,Check Regex Integer,Check Num,Check Float,Check Int,Check String,Check URL,Check Match at Least,Check Match at Least values,Check Match Exactly,Check Match Exactly values,Check Recommended,Check Ages,Check Unique,Check Range,Check Date,Check NA +MockComponent,"ab,cd","a,c,f",a,a,0,6,99.65,7,valid,https://www.google.com/,1738,1738,8085,8085,,6571,str1,75,10/21/2022,Not Applicable +MockComponent,"ab,cd","a,c,f",e,b,1234,71,58.4,3,valid,https://www.google.com/,9965,9965,9965,9965,,6571,str2,80,October 21 2022,8 +MockComponent,"ab,cd","b,d,f",b,c,683902,6.5,62.3,2,valid,https://www.google.com/,8085,8085,1738,1738,present,32849,str3,95,10/21/2022,Not Applicable +MockComponent,"ab,cd","b,d,f",b,c,0,6.5,62.3,2,valid,https://www.google.com/,79,79,7,7,,32849,str4,55,21/10/2022,695 \ No newline at end of file diff --git a/tests/test_validation.py b/tests/test_validation.py index 1f9e68e69..22b64199b 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -128,7 +128,17 @@ def test_invalid_manifest(self,helpers,sg,metadataModel): module_to_call = 'search', invalid_entry = 'q', sg = sg, - )[0] in errors + )[0] in errors + + assert GenerateError.generate_regex_error( + val_rule = 'regex', + reg_expression = '^\d+$', + row_num = '2', + attribute_name = 'Check Regex Integer', + module_to_call = 'search', + invalid_entry = '5.4', + sg = sg, + )[0] in errors assert GenerateError.generate_url_error( val_rule = 'url', From 66f61d2397d6074a7a7b821bc17d9cb1c19e8262 Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Wed, 21 Jun 2023 12:19:04 -0700 Subject: [PATCH 069/135] add missing import to test --- tests/test_api.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_api.py b/tests/test_api.py index f9e353766..63f14ccb0 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -12,10 +12,12 @@ import pandas as pd # third party library import import pytest +from schematic.configuration.configuration import Configuration from schematic.schemas.generator import \ SchemaGenerator # Local application/library specific imports. from schematic_api.api import create_app + logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) From 1680b3dd6e539f01a3daadf1d2de451c05c853c8 Mon Sep 17 00:00:00 2001 From: linglp Date: Mon, 26 Jun 2023 10:12:34 -0400 Subject: [PATCH 070/135] modify tag --- .github/workflows/docker_build.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/docker_build.yml b/.github/workflows/docker_build.yml index d2864fbcd..2ee27ec1f 100644 --- a/.github/workflows/docker_build.yml +++ b/.github/workflows/docker_build.yml @@ -8,6 +8,7 @@ on: push: tags: - 'v[0-9]+.[0-9]+.[0-9]+' + - '[0-9]+.[0-9]+.[0-9]+-[A-Za-z0-9]+' env: REGISTRY: ghcr.io From bf489f0fa01f341b5f4fa4b6049515fbf81c0b1a Mon Sep 17 00:00:00 2001 From: linglp Date: Mon, 26 Jun 2023 10:17:31 -0400 Subject: [PATCH 071/135] add v in tags regex --- .github/workflows/docker_build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker_build.yml b/.github/workflows/docker_build.yml index 2ee27ec1f..ec990ed4b 100644 --- a/.github/workflows/docker_build.yml +++ b/.github/workflows/docker_build.yml @@ -8,7 +8,7 @@ on: push: tags: - 'v[0-9]+.[0-9]+.[0-9]+' - - '[0-9]+.[0-9]+.[0-9]+-[A-Za-z0-9]+' + - 'v[0-9]+.[0-9]+.[0-9]+-[A-Za-z0-9]+' env: REGISTRY: ghcr.io From 05179c307d74e83d7f3c66141b29c57000cf30db Mon Sep 17 00:00:00 2001 From: andrewelamb Date: Tue, 27 Jun 2023 12:07:42 -0700 Subject: [PATCH 072/135] fix cli tests by using exampele config --- tests/test_cli.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/test_cli.py b/tests/test_cli.py index e9885585f..f3cd19a59 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -61,6 +61,7 @@ def test_schema_convert_cli(self, runner, helpers): @pytest.mark.google_credentials_needed def test_get_example_manifest_default(self, runner, helpers, config: Configuration, data_model_jsonld): output_path = helpers.get_data_path("example.Patient.manifest.csv") + config.load_config("config_example.yml") result = runner.invoke( manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld] @@ -75,6 +76,7 @@ def test_get_example_manifest_default(self, runner, helpers, config: Configurati @pytest.mark.google_credentials_needed def test_get_example_manifest_csv(self, runner, helpers, config: Configuration, data_model_jsonld): output_path = helpers.get_data_path("test.csv") + config.load_config("config_example.yml") result = runner.invoke( manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_csv", output_path] @@ -86,6 +88,7 @@ def test_get_example_manifest_csv(self, runner, helpers, config: Configuration, @pytest.mark.google_credentials_needed def test_get_example_manifest_excel(self, runner, helpers, config: Configuration, data_model_jsonld): output_path = helpers.get_data_path("test.xlsx") + config.load_config("config_example.yml") result = runner.invoke( manifest, ["--config", config.config_path, "get", "--data_type", "Patient", "--jsonld", data_model_jsonld, "--output_xlsx", output_path] From cfa59348dfa50fc5055fce741e31853be645122b Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 29 Jun 2023 12:29:34 -0400 Subject: [PATCH 073/135] modify parameters to prevent timeout --- certificate.conf | 4 +++- uwsgi-nginx-entrypoint.sh | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/certificate.conf b/certificate.conf index a6d515072..eb5bf94d9 100644 --- a/certificate.conf +++ b/certificate.conf @@ -4,7 +4,9 @@ server { include /etc/nginx/conf.d/self-signed.conf; include /etc/nginx/conf.d/ssl-params.conf; server_name 127.0.0.1; - + proxy_read_timeout 300; + proxy_connect_timeout 300; + proxy_send_timeout 300; location / { try_files $uri @app; } diff --git a/uwsgi-nginx-entrypoint.sh b/uwsgi-nginx-entrypoint.sh index 6c568dd31..0fa2e6188 100644 --- a/uwsgi-nginx-entrypoint.sh +++ b/uwsgi-nginx-entrypoint.sh @@ -19,6 +19,9 @@ else content_server=$content_server" listen ${USE_LISTEN_PORT} default_server;\n" content_server=$content_server" listen [::]:${USE_LISTEN_PORT} default_server;\n" content_server=$content_server' server_name 127.0.0.1;\n' + content_server=$content_server' proxy_read_timeout 300;\n' + content_server=$content_server' proxy_connect_timeout 300;\n' + content_server=$content_server' proxy_send_timeout 300;\n' content_server=$content_server' location / {\n' content_server=$content_server' try_files $uri @app;\n' content_server=$content_server' }\n' From 003a29076dc793a6cb928ff130a6c5519f2e91cb Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 29 Jun 2023 12:02:43 -0700 Subject: [PATCH 074/135] only add validation rules to google sheets --- schematic/manifest/generator.py | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index a244bcd1e..8ef6abd99 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1081,6 +1081,7 @@ def _create_requests_body( ordered_metadata_fields, json_schema, spreadsheet_id, + sheet_url, ): """Create and store all formatting changes for the google sheet to execute at once. @@ -1103,9 +1104,13 @@ def _create_requests_body( requests_body = {} requests_body["requests"] = [] for i, req in enumerate(ordered_metadata_fields[0]): - # Gather validation rules and valid values for attribute - validation_rules = self.sg.get_node_validation_rules(req) + # Gather validation rules and valid values for attribute, if using google sheets. + if sheet_url: + validation_rules = self.sg.get_node_validation_rules(req) + else: + validation_rules = "" + if validation_rules: requests_body =self._request_regex_match_vr_formatting( validation_rules, i, spreadsheet_id, requests_body @@ -1164,7 +1169,7 @@ def _create_requests_body( requests_body["requests"].append(borders_formatting) return requests_body - def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id): + def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id, sheet_url): """Generate requests to add columns and format the google sheet. Args: required_metadata_fields(dict): @@ -1194,6 +1199,7 @@ def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id ordered_metadata_fields, json_schema, spreadsheet_id, + sheet_url, ) # Execute requests @@ -1236,7 +1242,7 @@ def _gather_all_fields(self, fields, json_schema): ) return required_metadata_fields - def get_empty_manifest(self, json_schema_filepath=None): + def get_empty_manifest(self, json_schema_filepath=None, sheet_url=None): """Create an empty manifest using specifications from the json schema. Args: @@ -1254,7 +1260,7 @@ def get_empty_manifest(self, json_schema_filepath=None): ) manifest_url = self._create_empty_gs( - required_metadata_fields, json_schema, spreadsheet_id + required_metadata_fields, json_schema, spreadsheet_id, sheet_url=sheet_url, ) return manifest_url @@ -1353,7 +1359,7 @@ def map_annotation_names_to_display_names( return annotations.rename(columns=label_map) def get_manifest_with_annotations( - self, annotations: pd.DataFrame + self, annotations: pd.DataFrame, sheet_url:bool, ) -> Tuple[ps.Spreadsheet, pd.DataFrame]: """Generate manifest, optionally with annotations (if requested). @@ -1378,7 +1384,7 @@ def get_manifest_with_annotations( self.additional_metadata = annotations_dict # Generate empty manifest using `additional_metadata` - manifest_url = self.get_empty_manifest() + manifest_url = self.get_empty_manifest(sheet_url) manifest_df = self.get_dataframe_by_url(manifest_url) # Annotations clashing with manifest attributes are skipped @@ -1489,7 +1495,7 @@ def get_manifest( # Handle case when no dataset ID is provided if not dataset_id: - manifest_url = self.get_empty_manifest(json_schema_filepath=json_schema) + manifest_url = self.get_empty_manifest(json_schema_filepath=json_schema, sheet_url=sheet_url) # if output_form parameter is set to "excel", return an excel spreadsheet if output_format == "excel": @@ -1514,7 +1520,7 @@ def get_manifest( manifest_record = store.updateDatasetManifestFiles(self.sg, datasetId = dataset_id, store = False) # get URL of an empty manifest file created based on schema component - empty_manifest_url = self.get_empty_manifest() + empty_manifest_url = self.get_empty_manifest(sheet_url=sheet_url) # Populate empty template with existing manifest if manifest_record: @@ -1545,7 +1551,7 @@ def get_manifest( # if there are no files with annotations just generate an empty manifest if annotations.empty: - manifest_url = self.get_empty_manifest() + manifest_url = self.get_empty_manifest(sheet_url=sheet_url) manifest_df = self.get_dataframe_by_url(manifest_url) else: # Subset columns if no interested in user-defined annotations and there are files present @@ -1553,7 +1559,7 @@ def get_manifest( annotations = annotations[["Filename", "eTag", "entityId"]] # Update `additional_metadata` and generate manifest - manifest_url, manifest_df = self.get_manifest_with_annotations(annotations) + manifest_url, manifest_df = self.get_manifest_with_annotations(annotations, sheet_url=sheet_url) # Update df with existing manifest. Agnostic to output format updated_df, out_of_schema_columns = self._update_dataframe_with_existing_df(empty_manifest_url=empty_manifest_url, existing_df=manifest_df) From 5b82bc82ae987817d2d226bf8c41d0543afc8d64 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 29 Jun 2023 14:08:40 -0700 Subject: [PATCH 075/135] expose strict_validation for manifest/generate endpoint --- schematic/manifest/generator.py | 25 +++++++++++++------------ schematic_api/api/openapi/api.yaml | 7 +++++++ schematic_api/api/routes.py | 9 +++++---- 3 files changed, 25 insertions(+), 16 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index a244bcd1e..acecb013e 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -700,7 +700,7 @@ def _request_regex_vr(self, gs_formula, i:int, text_color={"red": 1}): return requests_vr def _request_regex_match_vr_formatting(self, validation_rules: List[str], i: int, - spreadsheet_id: str, requests_body: dict, + spreadsheet_id: str, requests_body: dict, strict: bool = None, ): """ Purpose: @@ -740,7 +740,6 @@ def _request_regex_match_vr_formatting(self, validation_rules: List[str], i: int } ] ## Set validaiton strictness based on user specifications. - strict = None if split_rules[-1].lower() == "strict": strict = True @@ -1081,6 +1080,7 @@ def _create_requests_body( ordered_metadata_fields, json_schema, spreadsheet_id, + strict=None, ): """Create and store all formatting changes for the google sheet to execute at once. @@ -1108,7 +1108,7 @@ def _create_requests_body( if validation_rules: requests_body =self._request_regex_match_vr_formatting( - validation_rules, i, spreadsheet_id, requests_body + validation_rules, i, spreadsheet_id, requests_body, strict ) if req in json_schema["properties"].keys(): @@ -1164,7 +1164,7 @@ def _create_requests_body( requests_body["requests"].append(borders_formatting) return requests_body - def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id): + def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id, strict=None): """Generate requests to add columns and format the google sheet. Args: required_metadata_fields(dict): @@ -1194,6 +1194,7 @@ def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id ordered_metadata_fields, json_schema, spreadsheet_id, + strict, ) # Execute requests @@ -1236,7 +1237,7 @@ def _gather_all_fields(self, fields, json_schema): ) return required_metadata_fields - def get_empty_manifest(self, json_schema_filepath=None): + def get_empty_manifest(self, json_schema_filepath=None, strict=None): """Create an empty manifest using specifications from the json schema. Args: @@ -1254,7 +1255,7 @@ def get_empty_manifest(self, json_schema_filepath=None): ) manifest_url = self._create_empty_gs( - required_metadata_fields, json_schema, spreadsheet_id + required_metadata_fields, json_schema, spreadsheet_id, strict ) return manifest_url @@ -1353,7 +1354,7 @@ def map_annotation_names_to_display_names( return annotations.rename(columns=label_map) def get_manifest_with_annotations( - self, annotations: pd.DataFrame + self, annotations: pd.DataFrame, strict: bool=None, ) -> Tuple[ps.Spreadsheet, pd.DataFrame]: """Generate manifest, optionally with annotations (if requested). @@ -1378,7 +1379,7 @@ def get_manifest_with_annotations( self.additional_metadata = annotations_dict # Generate empty manifest using `additional_metadata` - manifest_url = self.get_empty_manifest() + manifest_url = self.get_empty_manifest(strict) manifest_df = self.get_dataframe_by_url(manifest_url) # Annotations clashing with manifest attributes are skipped @@ -1471,7 +1472,7 @@ def _handle_output_format_logic(self, output_format: str = None, output_path: st return dataframe def get_manifest( - self, dataset_id: str = None, sheet_url: bool = None, json_schema: str = None, output_format: str = None, output_path: str = None, access_token: str = None + self, dataset_id: str = None, sheet_url: bool = None, json_schema: str = None, output_format: str = None, output_path: str = None, access_token: str = None, strict: bool = None, ) -> Union[str, pd.DataFrame]: """Gets manifest for a given dataset on Synapse. TODO: move this function to class MetadatModel (after MetadataModel is refactored) @@ -1489,7 +1490,7 @@ def get_manifest( # Handle case when no dataset ID is provided if not dataset_id: - manifest_url = self.get_empty_manifest(json_schema_filepath=json_schema) + manifest_url = self.get_empty_manifest(json_schema_filepath=json_schema, strict=strict) # if output_form parameter is set to "excel", return an excel spreadsheet if output_format == "excel": @@ -1514,7 +1515,7 @@ def get_manifest( manifest_record = store.updateDatasetManifestFiles(self.sg, datasetId = dataset_id, store = False) # get URL of an empty manifest file created based on schema component - empty_manifest_url = self.get_empty_manifest() + empty_manifest_url = self.get_empty_manifest(strict) # Populate empty template with existing manifest if manifest_record: @@ -1545,7 +1546,7 @@ def get_manifest( # if there are no files with annotations just generate an empty manifest if annotations.empty: - manifest_url = self.get_empty_manifest() + manifest_url = self.get_empty_manifest(strict) manifest_df = self.get_dataframe_by_url(manifest_url) else: # Subset columns if no interested in user-defined annotations and there are files present diff --git a/schematic_api/api/openapi/api.yaml b/schematic_api/api/openapi/api.yaml index 66332f3b1..d6788c5aa 100644 --- a/schematic_api/api/openapi/api.yaml +++ b/schematic_api/api/openapi/api.yaml @@ -86,6 +86,13 @@ paths: enum: ["excel", "google_sheet", "dataframe (only if getting existing manifests)"] description: If "excel" gets selected, this approach would avoid sending metadata to Google sheet APIs; if "google_sheet" gets selected, this would return a Google sheet URL. This parameter could potentially override sheet_url parameter. required: false + - in: query + name: strict_validation + schema: + type: boolean + default: True + description: If using Google Sheets, can set the strictness of Google Sheets regex match validation. True (default) will block users from entering incorrect values, False will throw a warning to users. + required: false operationId: schematic_api.api.routes.get_manifest_route responses: "200": diff --git a/schematic_api/api/routes.py b/schematic_api/api/routes.py index 1246fae21..9703b0743 100644 --- a/schematic_api/api/routes.py +++ b/schematic_api/api/routes.py @@ -207,7 +207,7 @@ def get_temp_jsonld(schema_url): return tmp_file.name # @before_request -def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, asset_view = None, output_format=None, title=None, access_token=None): +def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, asset_view = None, output_format=None, title=None, access_token=None, strict_validation:bool=True): """Get the immediate dependencies that are related to a given source node. Args: schema_url: link to data model in json ld format @@ -217,6 +217,7 @@ def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, use_annotations: Whether to use existing annotations during manifest generation asset_view: ID of view listing all project data assets. For example, for Synapse this would be the Synapse ID of the fileview listing all data assets for a given project. access_token: Token + strict: bool, strictness with which to apply validation rules to google sheets. Returns: Googlesheet URL (if sheet_url is True), or pandas dataframe (if sheet_url is False). """ @@ -231,7 +232,7 @@ def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, all_args = connexion.request.args args_dict = dict(all_args.lists()) data_type = args_dict['data_type'] - + # Gather all dataset_ids try: dataset_ids = args_dict['dataset_id'] @@ -262,7 +263,7 @@ def get_manifest_route(schema_url: str, use_annotations: bool, dataset_ids=None, ) - def create_single_manifest(data_type, title, dataset_id=None, output_format=None, access_token=None): + def create_single_manifest(data_type, title, dataset_id=None, output_format=None, access_token=None, strict=strict_validation): # create object of type ManifestGenerator manifest_generator = ManifestGenerator( path_to_json_ld=jsonld, @@ -278,7 +279,7 @@ def create_single_manifest(data_type, title, dataset_id=None, output_format=None output_format = "dataframe" result = manifest_generator.get_manifest( - dataset_id=dataset_id, sheet_url=True, output_format=output_format, access_token=access_token + dataset_id=dataset_id, sheet_url=True, output_format=output_format, access_token=access_token, strict=strict, ) # return an excel file if output_format is set to "excel" From 79eb2e550b3fac910aa415409ece93b6f74caf19 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 3 Jul 2023 10:29:53 -0700 Subject: [PATCH 076/135] remove depreciated method and test --- schematic/store/synapse.py | 57 -------------------------------------- tests/test_store.py | 27 ------------------ 2 files changed, 84 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 4e5e5d577..1e70c4847 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -298,63 +298,6 @@ def wrapper(*args, **kwargs): raise ex return wrapper - def send_api_request(self, request_type: str, uri: str, body: Union[Dict, str], headers: Dict = None, endpoint: str = None, **kwargs): - """ - Method to send API request to synapse via the python client - - Args: - request_type: type of request to send (restGET, restPOST, restPUT, or restDELETE) - uri: uri on which request is performed - headers: Optional, dictionary of headers to use rather than the API-key-signed default set of headers - If none passed in, use headers from synapse store object - body: body of the request, can be dictionary or JSON formatted string - endpoint: Optional, name of the endpoint to use, defaults to none which is evaluated by the client as self.repoEndpoint - **kwargs: other keyword arguments to pass to the request method - - Returns: - response: response from the request sent - - TODO: - Allow asynchronous calls to operate asynchronously on the schematic side - Further generalize this function for other calls, ie. GET calls without a body - """ - - # Make a dictionary for two purposes: - # to be used for chekcing that the value entered is a valid request type - # and to map from any capitalization of the string to the format required by the python client - request_types = { - 'restget': 'restGET', - 'restpost': 'restPOST', - 'restput': 'restPUT', - 'restdelete': 'restDELETE', - - } - - # intialize response variable to None - response = None - - # Use existing headers from synapse store object if none passed in - if not headers: - headers = self.syn.default_headers - - # If the user passed in a dictionary, convert to JSON string, - # if they passed in a string, assume it's formatted appropriately - if isinstance(body, Dict): - body = json.dumps(body) - - # Validate that entered request type is valid, and get the appropriate method from the python client - if request_type.lower() in request_types.keys(): - request = getattr(self.syn, request_types[request_type.lower()]) - else: - raise NotImplementedError( - f"The selected request: {request_type} is currenlty not exposed in the synaspePythonClient and cannot be used." - ) - - # Store request and return - response = request(uri, body, endpoint, headers, **kwargs) - - return response - def getStorageFileviewTable(self): """ Returns the storageFileviewTable obtained during initialization. """ diff --git a/tests/test_store.py b/tests/test_store.py index dfa8cd076..0e12d5b31 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -218,33 +218,6 @@ def test_getDatasetManifest(self, synapse_store, downloadFile): # return manifest id assert manifest_data == "syn51204513" - def test_synapse_rest_request(self, synapse_store): - """ - Test to ensure that we can send successfully send a request to the Synaspe API - TODO: - Add more test cases as more use cases for request method arise - """ - - # Build a dictionary to send to the API to create a new column - testColumnDict = { - "id": None, - "name": "TestColumn", - "defaultValue": None, - "columnType": "STRING", - "maximumSize": 64, - "maximumListLength": 1, - } - - # Send POST /column request to define new column - newColResponse = synapse_store.send_api_request( - request_type = "restPOST", - uri = "https://repo-prod.prod.sagebase.org/repo/v1/column", - body = testColumnDict, - ) - - assert newColResponse is not None - - class TestDatasetFileView: def test_init(self, dataset_id, dataset_fileview, synapse_store): From 17b511e6f41e487e7783a8db2def8c9d1b11e590 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 5 Jul 2023 11:07:07 -0700 Subject: [PATCH 077/135] pull file curation into its own method --- schematic/store/synapse.py | 35 +++++++++++++++++++----- tests/data/mock_manifests/BulkRNAseq.csv | 3 ++ 2 files changed, 31 insertions(+), 7 deletions(-) create mode 100644 tests/data/mock_manifests/BulkRNAseq.csv diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 1f77715b0..e5cb1565d 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -601,13 +601,7 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: # the columns Filename and entityId are assumed to be present in manifest schema # TODO: use idiomatic panda syntax if dataset_files: - new_files = {"Filename": [], "entityId": []} - - # find new files if any - for file_id, file_name in dataset_files: - if not file_id in manifest["entityId"].values: - new_files["Filename"].append(file_name) - new_files["entityId"].append(file_id) + new_files = self._get_file_entityIDs(manifest, dataset_files, True) # update manifest so that it contain new files new_files = pd.DataFrame(new_files) @@ -627,6 +621,33 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: manifest = manifest.fillna("") return manifest_id, manifest + + def _get_file_entityIDs(self, manifest: pd.DataFrame, dataset_files: List, only_new_files: bool = False): + """ + Get a dictionary of files in a dataset. Either files that are not in the current manifest or all files + + Args: + manifest: metadata manifest + dataset_file: List of all files in a dataset + only_new_files: boolean to control whether only new files are returned or all files in the dataset + Returns: + files: dictionary of file names and entityIDs, with scope as specified by `only_new_files` + """ + files = {"Filename": [], "entityId": []} + + if only_new_files: + # find new files if any + for file_id, file_name in dataset_files: + if not file_id in manifest["entityId"].values: + files["Filename"].append(file_name) + files["entityId"].append(file_id) + else: + # get all files + for file_id, file_name in dataset_files: + files["Filename"].append(file_name) + files["entityId"].append(file_id) + + return files def getProjectManifests(self, projectId: str) -> List[str]: """Gets all metadata manifest files across all datasets in a specified project. diff --git a/tests/data/mock_manifests/BulkRNAseq.csv b/tests/data/mock_manifests/BulkRNAseq.csv new file mode 100644 index 000000000..facfa3f6a --- /dev/null +++ b/tests/data/mock_manifests/BulkRNAseq.csv @@ -0,0 +1,3 @@ +Filename,Sample ID,File Format,Component,Genome Build,Genome FASTA +TestRNA-seqDataset1/TestRNA-seq-dummy-dataset.rtf,ABCD,BAM,BulkRNA-seqAssay,GRCh38, +TestRNA-seqDataset1/TestRNA-seq-dummy-dataset2.rtf,EFGH,CRAM,BulkRNA-seqAssay,GRCm39, From 1ed0ee80a0177664c0dd98f907ac9bf0a0026136 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 5 Jul 2023 13:47:10 -0700 Subject: [PATCH 078/135] get file entityIds, split behavior for file based metadata --- schematic/store/synapse.py | 34 ++++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index e5cb1565d..9e0beba2a 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -1377,15 +1377,33 @@ def add_entities( manifest (pd.DataFrame): modified to add entitiyId as appropriate. ''' + + # Expected behavior is to annotate files if `Filename` is present regardless of `-mrt` setting + if 'filename' in [col.lower() for col in manifest.columns]: + # get current list of files and store as dataframe + dataset_files = self.getFilesInStorageDataset(datasetId) + files = self._get_file_entityIDs(manifest, dataset_files, False) + file_df = pd.DataFrame(files) + + # TODO: Adjust merge params according to expected behavior ie. inner vs outer join, dropping of entityId col + # Add the file entityIDs to the manifest + manifest = manifest.merge(file_df, how = 'left', on='Filename', suffixes=['_x',None]).drop('entityId_x',axis=1) + + for idx, row in manifest.iterrows(): - if not row["entityId"] and (manifest_record_type == 'file_and_entities' or - manifest_record_type == 'table_file_and_entities'): - manifest, entityId = self._create_entity_id(idx, row, manifest, datasetId) - elif not row["entityId"] and manifest_record_type == 'table_and_file': - # If not using entityIds, fill with manifest_table_id so - row["entityId"] = manifest_synapse_table_id - manifest.loc[idx, "entityId"] = manifest_synapse_table_id - entityId = '' + if 'filename' not in [col.lower() for col in manifest.columns]: + if not row["entityId"] and (manifest_record_type == 'file_and_entities' or + manifest_record_type == 'table_file_and_entities'): + manifest, entityId = self._create_entity_id(idx, row, manifest, datasetId) + elif not row["entityId"] and manifest_record_type == 'table_and_file': + # If not using entityIds, fill with manifest_table_id so + row["entityId"] = manifest_synapse_table_id + manifest.loc[idx, "entityId"] = manifest_synapse_table_id + entityId = '' + else: + # get the entity id corresponding to this row + entityId = row["entityId"] + # If entityIds were gathered from files, just read what was stored else: # get the entity id corresponding to this row entityId = row["entityId"] From 1d1cb612067e0545ec7829c14765a91e8d450548 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 5 Jul 2023 13:55:50 -0700 Subject: [PATCH 079/135] use kwargs --- schematic/store/synapse.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 9e0beba2a..6d3e518ed 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -601,7 +601,7 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: # the columns Filename and entityId are assumed to be present in manifest schema # TODO: use idiomatic panda syntax if dataset_files: - new_files = self._get_file_entityIDs(manifest, dataset_files, True) + new_files = self._get_file_entityIDs(manifest=manifest, dataset_files=dataset_files, only_new_files=True) # update manifest so that it contain new files new_files = pd.DataFrame(new_files) @@ -1382,7 +1382,7 @@ def add_entities( if 'filename' in [col.lower() for col in manifest.columns]: # get current list of files and store as dataframe dataset_files = self.getFilesInStorageDataset(datasetId) - files = self._get_file_entityIDs(manifest, dataset_files, False) + files = self._get_file_entityIDs(manifest=manifest,dataset_files=dataset_files, only_new_files=False) file_df = pd.DataFrame(files) # TODO: Adjust merge params according to expected behavior ie. inner vs outer join, dropping of entityId col From 14c94c7b2e9c93d59fde08dd4437bb014f531764 Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 6 Jul 2023 10:47:43 -0400 Subject: [PATCH 080/135] remove profile decorator --- schematic/store/synapse.py | 1 - 1 file changed, 1 deletion(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 2a38aa92c..1a7f2d8a8 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -2156,7 +2156,6 @@ def upsertTable(self, sg: SchemaGenerator,): return self.existingTableId - @profile(sort_by='cumulative', strip_dirs=True) def _update_table_uuid_column(self, sg: SchemaGenerator,) -> None: """Removes the `Uuid` column when present, and relpaces with an `Id` column Used to enable backwards compatability for manifests using the old `Uuid` convention From 104d420866084b7646ce785cd7665f3f3e7b7d87 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 6 Jul 2023 11:42:59 -0700 Subject: [PATCH 081/135] fix ref before assignment error --- schematic/models/commands.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/schematic/models/commands.py b/schematic/models/commands.py index f37c730f4..eeeb7c809 100644 --- a/schematic/models/commands.py +++ b/schematic/models/commands.py @@ -110,9 +110,9 @@ def submit_manifest( """ Running CLI with manifest validation (optional) and submission options. """ - if jsonld is None: - jsonld = CONFIG.model_location - log_value_from_config("jsonld", jsonld) + + jsonld = CONFIG.model_location + log_value_from_config("jsonld", jsonld) metadata_model = MetadataModel( inputMModelLocation=jsonld, inputMModelLocationType="local" From dd959d8eb4ca9293117752596340455b3d347fee Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 6 Jul 2023 11:43:38 -0700 Subject: [PATCH 082/135] remove testing manifest --- tests/data/mock_manifests/BulkRNAseq.csv | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 tests/data/mock_manifests/BulkRNAseq.csv diff --git a/tests/data/mock_manifests/BulkRNAseq.csv b/tests/data/mock_manifests/BulkRNAseq.csv deleted file mode 100644 index facfa3f6a..000000000 --- a/tests/data/mock_manifests/BulkRNAseq.csv +++ /dev/null @@ -1,3 +0,0 @@ -Filename,Sample ID,File Format,Component,Genome Build,Genome FASTA -TestRNA-seqDataset1/TestRNA-seq-dummy-dataset.rtf,ABCD,BAM,BulkRNA-seqAssay,GRCh38, -TestRNA-seqDataset1/TestRNA-seq-dummy-dataset2.rtf,EFGH,CRAM,BulkRNA-seqAssay,GRCm39, From b9807e9a114055427280db408ca150ed114eabd2 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 6 Jul 2023 14:00:01 -0700 Subject: [PATCH 083/135] update comments --- schematic/store/synapse.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 7e8ba4efe..fb167d632 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -1372,8 +1372,7 @@ def add_entities( files = self._get_file_entityIDs(manifest=manifest,dataset_files=dataset_files, only_new_files=False) file_df = pd.DataFrame(files) - # TODO: Adjust merge params according to expected behavior ie. inner vs outer join, dropping of entityId col - # Add the file entityIDs to the manifest + # Merge dataframes to add entityIds manifest = manifest.merge(file_df, how = 'left', on='Filename', suffixes=['_x',None]).drop('entityId_x',axis=1) From 6c716c2a5d3c305d20bc46dec6419334c92600c1 Mon Sep 17 00:00:00 2001 From: Anthony Williams Date: Fri, 7 Jul 2023 07:07:03 -0700 Subject: [PATCH 084/135] use service desk link in README.md Instead of prompting users to create issues in the github repo, link them to the D&T service desk. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3eb9e1d3d..533885802 100644 --- a/README.md +++ b/README.md @@ -288,7 +288,7 @@ docker run -v %cd%:/schematic \ If you install external libraries by using `poetry add `, please make sure that you include `pyproject.toml` and `poetry.lock` file in your commit. ## Reporting bugs or feature requests -You can use the [`Issues`](https://github.com/Sage-Bionetworks/schematic/issues) tab to **create bug and feature requests**. Providing enough details to the developers to verify and troubleshoot your issue is paramount: +You can **create bug and feature requests** through [Sage Bionetwork's FAIR Data service desk](https://sagebionetworks.jira.com/servicedesk/customer/portal/5/group/8). Providing enough details to the developers to verify and troubleshoot your issue is paramount: - **Provide a clear and descriptive title as well as a concise summary** of the issue to identify the problem. - **Describe the exact steps which reproduce the problem** in as many details as possible. - **Describe the behavior you observed after following the steps** and point out what exactly is the problem with that behavior. From 8533b15d05fabba8c336663f4dd3b01ecb939494 Mon Sep 17 00:00:00 2001 From: Anthony Williams Date: Fri, 7 Jul 2023 07:11:12 -0700 Subject: [PATCH 085/135] Update CONTRIBUTION.md Change issues link to FAIR Data service desk instead of github issues. --- CONTRIBUTION.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTION.md b/CONTRIBUTION.md index 52762c9a1..a9876d4df 100644 --- a/CONTRIBUTION.md +++ b/CONTRIBUTION.md @@ -8,7 +8,7 @@ Please note we have a [code of conduct](CODE_OF_CONDUCT.md), please follow it in ### Reporting bugs or feature requests -You can use the [`Issues`](https://github.com/Sage-Bionetworks/schematic/issues) tab to **create bug and feature requests**. Providing enough details to the developers to verify and troubleshoot your issue is paramount: +You can use [Sage Bionetwork's FAIR Data service desk](https://sagebionetworks.jira.com/servicedesk/customer/portal/5/group/8) to **create bug and feature requests**. Providing enough details to the developers to verify and troubleshoot your issue is paramount: - **Provide a clear and descriptive title as well as a concise summary** of the issue to identify the problem. - **Describe the exact steps which reproduce the problem** in as many details as possible. - **Describe the behavior you observed after following the steps** and point out what exactly is the problem with that behavior. From 5a3cf01ac547d807194d461677cca628d0194a30 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Mon, 10 Jul 2023 11:35:43 -0700 Subject: [PATCH 086/135] move to checking for sheet_url only when creating regex formatting --- schematic/manifest/generator.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 8ef6abd99..f75101944 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1105,13 +1105,10 @@ def _create_requests_body( requests_body["requests"] = [] for i, req in enumerate(ordered_metadata_fields[0]): # Gather validation rules and valid values for attribute, if using google sheets. - if sheet_url: - validation_rules = self.sg.get_node_validation_rules(req) - else: - validation_rules = "" + validation_rules = self.sg.get_node_validation_rules(req) - if validation_rules: + if validation_rules and sheet_url: requests_body =self._request_regex_match_vr_formatting( validation_rules, i, spreadsheet_id, requests_body ) From 2d0bb77afb29418e4fae6104596d6cd5d6e455d8 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 10 Jul 2023 14:05:13 -0700 Subject: [PATCH 087/135] rm 2nd `filename` check --- schematic/store/synapse.py | 21 ++++++++------------- 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index fb167d632..d821fc2e9 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -1377,19 +1377,14 @@ def add_entities( for idx, row in manifest.iterrows(): - if 'filename' not in [col.lower() for col in manifest.columns]: - if not row["entityId"] and (manifest_record_type == 'file_and_entities' or - manifest_record_type == 'table_file_and_entities'): - manifest, entityId = self._create_entity_id(idx, row, manifest, datasetId) - elif not row["entityId"] and manifest_record_type == 'table_and_file': - # If not using entityIds, fill with manifest_table_id so - row["entityId"] = manifest_synapse_table_id - manifest.loc[idx, "entityId"] = manifest_synapse_table_id - entityId = '' - else: - # get the entity id corresponding to this row - entityId = row["entityId"] - # If entityIds were gathered from files, just read what was stored + if not row["entityId"] and (manifest_record_type == 'file_and_entities' or + manifest_record_type == 'table_file_and_entities'): + manifest, entityId = self._create_entity_id(idx, row, manifest, datasetId) + elif not row["entityId"] and manifest_record_type == 'table_and_file': + # If not using entityIds, fill with manifest_table_id so + row["entityId"] = manifest_synapse_table_id + manifest.loc[idx, "entityId"] = manifest_synapse_table_id + entityId = '' else: # get the entity id corresponding to this row entityId = row["entityId"] From 192a1eaa0d252a0a0b896c12adb1c73783a2efbd Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 10 Jul 2023 14:06:45 -0700 Subject: [PATCH 088/135] add comment --- schematic/store/synapse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index d821fc2e9..e2cd974f7 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -1375,7 +1375,7 @@ def add_entities( # Merge dataframes to add entityIds manifest = manifest.merge(file_df, how = 'left', on='Filename', suffixes=['_x',None]).drop('entityId_x',axis=1) - + # Fill `entityId` for each row if missing and annotate entity as appropriate for idx, row in manifest.iterrows(): if not row["entityId"] and (manifest_record_type == 'file_and_entities' or manifest_record_type == 'table_file_and_entities'): From 8a7d6d672c576bd33139c0ed91c3ae7096298ce8 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 10 Jul 2023 14:11:18 -0700 Subject: [PATCH 089/135] change param order --- schematic/store/synapse.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index e2cd974f7..d41d2ea01 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -574,7 +574,7 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: # the columns Filename and entityId are assumed to be present in manifest schema # TODO: use idiomatic panda syntax if dataset_files: - new_files = self._get_file_entityIDs(manifest=manifest, dataset_files=dataset_files, only_new_files=True) + new_files = self._get_file_entityIDs(dataset_files=dataset_files, manifest=manifest, only_new_files=True) # update manifest so that it contain new files new_files = pd.DataFrame(new_files) @@ -595,7 +595,7 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: return manifest_id, manifest - def _get_file_entityIDs(self, manifest: pd.DataFrame, dataset_files: List, only_new_files: bool = False): + def _get_file_entityIDs(self, dataset_files: List, manifest: pd.DataFrame = None, only_new_files: bool = False): """ Get a dictionary of files in a dataset. Either files that are not in the current manifest or all files @@ -1369,7 +1369,7 @@ def add_entities( if 'filename' in [col.lower() for col in manifest.columns]: # get current list of files and store as dataframe dataset_files = self.getFilesInStorageDataset(datasetId) - files = self._get_file_entityIDs(manifest=manifest,dataset_files=dataset_files, only_new_files=False) + files = self._get_file_entityIDs(dataset_files=dataset_files, manifest=manifest, only_new_files=False) file_df = pd.DataFrame(files) # Merge dataframes to add entityIds From 3e2262e69a967bcd8da1b4743052b2ab69c73e7e Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 10 Jul 2023 14:15:41 -0700 Subject: [PATCH 090/135] make `manifest` param optional --- schematic/store/synapse.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index d41d2ea01..03cef320c 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -574,7 +574,7 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: # the columns Filename and entityId are assumed to be present in manifest schema # TODO: use idiomatic panda syntax if dataset_files: - new_files = self._get_file_entityIDs(dataset_files=dataset_files, manifest=manifest, only_new_files=True) + new_files = self._get_file_entityIDs(dataset_files=dataset_files, only_new_files=True, manifest=manifest) # update manifest so that it contain new files new_files = pd.DataFrame(new_files) @@ -595,7 +595,7 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: return manifest_id, manifest - def _get_file_entityIDs(self, dataset_files: List, manifest: pd.DataFrame = None, only_new_files: bool = False): + def _get_file_entityIDs(self, dataset_files: List, only_new_files: bool = False, manifest: pd.DataFrame = None): """ Get a dictionary of files in a dataset. Either files that are not in the current manifest or all files @@ -609,6 +609,11 @@ def _get_file_entityIDs(self, dataset_files: List, manifest: pd.DataFrame = None files = {"Filename": [], "entityId": []} if only_new_files: + if not manifest: + raise UnboundLocalError( + "No manifest was passed in, a manifest is required when `only_new_files` is True." + ) + # find new files if any for file_id, file_name in dataset_files: if not file_id in manifest["entityId"].values: @@ -1369,7 +1374,7 @@ def add_entities( if 'filename' in [col.lower() for col in manifest.columns]: # get current list of files and store as dataframe dataset_files = self.getFilesInStorageDataset(datasetId) - files = self._get_file_entityIDs(dataset_files=dataset_files, manifest=manifest, only_new_files=False) + files = self._get_file_entityIDs(dataset_files=dataset_files, only_new_files=False) file_df = pd.DataFrame(files) # Merge dataframes to add entityIds From 7bd2323af65d1c0b6d45b5e4eb4b759e60414977 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 10 Jul 2023 14:18:22 -0700 Subject: [PATCH 091/135] add test manfiest for file based annotations --- tests/data/mock_manifests/test_BulkRNAseq.csv | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 tests/data/mock_manifests/test_BulkRNAseq.csv diff --git a/tests/data/mock_manifests/test_BulkRNAseq.csv b/tests/data/mock_manifests/test_BulkRNAseq.csv new file mode 100644 index 000000000..facfa3f6a --- /dev/null +++ b/tests/data/mock_manifests/test_BulkRNAseq.csv @@ -0,0 +1,3 @@ +Filename,Sample ID,File Format,Component,Genome Build,Genome FASTA +TestRNA-seqDataset1/TestRNA-seq-dummy-dataset.rtf,ABCD,BAM,BulkRNA-seqAssay,GRCh38, +TestRNA-seqDataset1/TestRNA-seq-dummy-dataset2.rtf,EFGH,CRAM,BulkRNA-seqAssay,GRCm39, From ccf1709b64d712bb14a9218243194f5de17e2131 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 10 Jul 2023 14:25:36 -0700 Subject: [PATCH 092/135] update annotations test manifest to new `Id` col --- tests/data/mock_manifests/annotations_test_manifest.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/data/mock_manifests/annotations_test_manifest.csv b/tests/data/mock_manifests/annotations_test_manifest.csv index 8cb700f55..ba15606bc 100644 --- a/tests/data/mock_manifests/annotations_test_manifest.csv +++ b/tests/data/mock_manifests/annotations_test_manifest.csv @@ -1,3 +1,3 @@ -Component,CheckList,CheckRegexList,CheckRegexSingle,CheckNum,CheckFloat,CheckInt,CheckString,CheckURL,CheckMatchatLeast,CheckMatchatLeastvalues,CheckMatchExactly,CheckMatchExactlyvalues,CheckRecommended,CheckAges,CheckUnique,Uuid,entityId +Component,CheckList,CheckRegexList,CheckRegexSingle,CheckNum,CheckFloat,CheckInt,CheckString,CheckURL,CheckMatchatLeast,CheckMatchatLeastvalues,CheckMatchExactly,CheckMatchExactlyvalues,CheckRecommended,CheckAges,CheckUnique,Id,entityId MockComponent,"valid,list,values","a,c,f",a,6,99.65,7,valid,https://www.google.com/,1985,4891,23487492,24323472834,,6571,str1,0f7812cc-8a0e-4f54-b8c4-e497cb7b34d0,syn35367245 MockComponent,"valid,list,values","a,c,f",a,6,99.65,8.52,valid,https://www.google.com/,1985,4891,23487492,24323472834,,6571,str1,da82f8e2-c7b0-428f-8f9d-677252ef5f68,syn35367246 From 323f660299ac04dfbb13ff5c0036ab2bfb30eb89 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 10 Jul 2023 14:25:54 -0700 Subject: [PATCH 093/135] parametrize annotations test --- tests/test_store.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/tests/test_store.py b/tests/test_store.py index bcd31cb99..075e4ecce 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -113,9 +113,11 @@ def test_getFileAnnotations(self, synapse_store): assert expected_dict == actual_dict - def test_annotation_submission(self, synapse_store, helpers, config: Configuration): - manifest_path = "mock_manifests/annotations_test_manifest.csv" - + @pytest.mark.parametrize('manifest_path, test_annotations', + [ + ("mock_manifests/annotations_test_manifest.csv", {'CheckInt': '7', 'CheckList': 'valid, list, values'}), + ]) + def test_annotation_submission(self, synapse_store, helpers, manifest_path, test_annotations, config: Configuration): # Upload dataset annotations sg = SchemaGenerator(config.model_location) @@ -143,9 +145,12 @@ def test_annotation_submission(self, synapse_store, helpers, config: Configurati annotations = synapse_store.getFileAnnotations(entity_id) # Check annotations of interest - assert annotations['CheckInt'] == '7' - assert annotations['CheckList'] == 'valid, list, values' - assert 'CheckRecommended' not in annotations.keys() + for key in test_annotations.keys(): + assert key in annotations.keys() + assert annotations[key] == test_annotations[key] + + if manifest_path.endswith('annoations_tset_manifest.csv'): + assert 'CheckRecommended' not in annotations.keys() From d553a31f73df9ff1bf3d0e8549267e750eb10600 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 10 Jul 2023 15:00:07 -0700 Subject: [PATCH 094/135] rename var --- schematic/store/synapse.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 03cef320c..713b23a03 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -1374,8 +1374,8 @@ def add_entities( if 'filename' in [col.lower() for col in manifest.columns]: # get current list of files and store as dataframe dataset_files = self.getFilesInStorageDataset(datasetId) - files = self._get_file_entityIDs(dataset_files=dataset_files, only_new_files=False) - file_df = pd.DataFrame(files) + files_and_entityIds = self._get_file_entityIDs(dataset_files=dataset_files, only_new_files=False) + file_df = pd.DataFrame(files_and_entityIds) # Merge dataframes to add entityIds manifest = manifest.merge(file_df, how = 'left', on='Filename', suffixes=['_x',None]).drop('entityId_x',axis=1) From 85f0bb592ca28a6cec41e69a82909cc40eaa750a Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 10 Jul 2023 16:08:04 -0700 Subject: [PATCH 095/135] update annotations test for file annos --- tests/test_store.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/tests/test_store.py b/tests/test_store.py index 075e4ecce..2c6a83732 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -14,6 +14,7 @@ from schematic.store.synapse import SynapseStorage, DatasetFileView, ManifestDownload from schematic.schemas.generator import SchemaGenerator from synapseclient.core.exceptions import SynapseHTTPError +from synapseclient.entity import File from schematic.configuration.configuration import Configuration logging.basicConfig(level=logging.DEBUG) @@ -113,11 +114,12 @@ def test_getFileAnnotations(self, synapse_store): assert expected_dict == actual_dict - @pytest.mark.parametrize('manifest_path, test_annotations', - [ - ("mock_manifests/annotations_test_manifest.csv", {'CheckInt': '7', 'CheckList': 'valid, list, values'}), - ]) - def test_annotation_submission(self, synapse_store, helpers, manifest_path, test_annotations, config: Configuration): + @pytest.mark.parametrize('manifest_path, test_annotations, datasetId, manifest_record_type', + [ ("mock_manifests/annotations_test_manifest.csv", {'CheckInt': '7', 'CheckList': 'valid, list, values'}, 'syn34295552', 'file_and_entities'), + ("mock_manifests/test_BulkRNAseq.csv", {'FileFormat': 'BAM', 'GenomeBuild': 'GRCh38'}, 'syn39241199', 'table_and_file')], + ids = ['non file-based', + 'file-based']) + def test_annotation_submission(self, synapse_store, helpers, manifest_path, test_annotations, datasetId, manifest_record_type, config: Configuration): # Upload dataset annotations sg = SchemaGenerator(config.model_location) @@ -131,8 +133,8 @@ def test_annotation_submission(self, synapse_store, helpers, manifest_path, test manifest_id = synapse_store.associateMetadataWithFiles( schemaGenerator = sg, metadataManifestPath = helpers.get_data_path(manifest_path), - datasetId = 'syn34295552', - manifest_record_type = 'file_and_entities', + datasetId = datasetId, + manifest_record_type = manifest_record_type, useSchemaLabel = True, hideBlanks = True, restrict_manifest = False, @@ -141,7 +143,7 @@ def test_annotation_submission(self, synapse_store, helpers, manifest_path, test pass # Retrive annotations - entity_id, entity_id_spare = helpers.get_data_frame(manifest_path)["entityId"][0:2] + entity_id = helpers.get_data_frame(manifest_path)["entityId"][0] annotations = synapse_store.getFileAnnotations(entity_id) # Check annotations of interest @@ -151,6 +153,9 @@ def test_annotation_submission(self, synapse_store, helpers, manifest_path, test if manifest_path.endswith('annoations_tset_manifest.csv'): assert 'CheckRecommended' not in annotations.keys() + elif manifest_path.endswith('test_BulkRNAseq.csv'): + entity = synapse_store.syn.get(entity_id) + assert type(entity) == File From 467c3b9da765600ef2aa74d74ba4e8d97d0aed92 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 11 Jul 2023 10:02:49 -0700 Subject: [PATCH 096/135] update manifest checking logic --- schematic/store/synapse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 713b23a03..ef27ac744 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -609,7 +609,7 @@ def _get_file_entityIDs(self, dataset_files: List, only_new_files: bool = False files = {"Filename": [], "entityId": []} if only_new_files: - if not manifest: + if manifest is None: raise UnboundLocalError( "No manifest was passed in, a manifest is required when `only_new_files` is True." ) From 9df3a87afadc3ba61dac647b560675c44d11ac72 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 11 Jul 2023 10:03:02 -0700 Subject: [PATCH 097/135] fix spacing --- tests/test_store.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/test_store.py b/tests/test_store.py index 2c6a83732..203d7aa95 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -157,10 +157,6 @@ def test_annotation_submission(self, synapse_store, helpers, manifest_path, test entity = synapse_store.syn.get(entity_id) assert type(entity) == File - - - - @pytest.mark.parametrize("force_batch", [True, False], ids=["batch", "non_batch"]) def test_getDatasetAnnotations(self, dataset_id, synapse_store, force_batch): expected_df = pd.DataFrame.from_records( From dbcc604e7b5f73d71d035545029b5ec858e19ed3 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 11 Jul 2023 10:03:27 -0700 Subject: [PATCH 098/135] also spacing --- tests/test_store.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_store.py b/tests/test_store.py index 203d7aa95..bcd28c105 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -472,7 +472,6 @@ def test_upsertTable(self, helpers, synapse_store, config:Configuration, project # delete table synapse_store.syn.delete(tableId) - class TestDownloadManifest: @pytest.mark.parametrize("datasetFileView", [{"id": ["syn51203973", "syn51203943"], "name": ["synapse_storage_manifest.csv", "synapse_storage_manifest_censored.csv"]}, {"id": ["syn51203973"], "name": ["synapse_storage_manifest.csv"]}, {"id": ["syn51203943"], "name": ["synapse_storage_manifest_censored.csv"]}]) def test_get_manifest_id(self, synapse_store, datasetFileView): From d61dd916bcd02903d1a777f06284e768a067b34b Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 11 Jul 2023 10:03:38 -0700 Subject: [PATCH 099/135] add test for getting file entityIds --- tests/test_store.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/tests/test_store.py b/tests/test_store.py index bcd28c105..28508aa6f 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -223,6 +223,31 @@ def test_getDatasetManifest(self, synapse_store, downloadFile): # return manifest id assert manifest_data == "syn51204513" + @pytest.mark.parametrize('only_new_files',[True, False]) + def test_file_entityIds(self, helpers, synapse_store, only_new_files): + manifest_path = "mock_manifests/test_BulkRNAseq.csv" + dataset_files = synapse_store.getFilesInStorageDataset('syn39241199') + + if only_new_files: + # Prepare manifest is getting Ids for new files only + manifest = helpers.get_data_frame(manifest_path) + entityIds = pd.DataFrame({'entityId': ['syn39242580', 'syn51900502']}) + manifest = manifest.join(entityIds) + + # get entityIds for new files + files_and_Ids = synapse_store._get_file_entityIDs(dataset_files=dataset_files, only_new_files=only_new_files, manifest=manifest) + + # Assert that there are no new files + for value in files_and_Ids.values(): + assert value == [] + + else: + # get entityIds for all files + files_and_Ids = synapse_store._get_file_entityIDs(dataset_files=dataset_files, only_new_files=only_new_files) + + # assert that the correct number of files were found + assert len(files_and_Ids['entityId']) == 2 + class TestDatasetFileView: def test_init(self, dataset_id, dataset_fileview, synapse_store): From 6fcbf63a26613ca3265b3a7ee0b09adc5ee48c42 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 11 Jul 2023 10:04:15 -0700 Subject: [PATCH 100/135] update method name --- schematic/store/synapse.py | 6 +++--- tests/test_store.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index ef27ac744..6f7caa3e5 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -574,7 +574,7 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: # the columns Filename and entityId are assumed to be present in manifest schema # TODO: use idiomatic panda syntax if dataset_files: - new_files = self._get_file_entityIDs(dataset_files=dataset_files, only_new_files=True, manifest=manifest) + new_files = self._get_file_entityIds(dataset_files=dataset_files, only_new_files=True, manifest=manifest) # update manifest so that it contain new files new_files = pd.DataFrame(new_files) @@ -595,7 +595,7 @@ def updateDatasetManifestFiles(self, sg: SchemaGenerator, datasetId: str, store: return manifest_id, manifest - def _get_file_entityIDs(self, dataset_files: List, only_new_files: bool = False, manifest: pd.DataFrame = None): + def _get_file_entityIds(self, dataset_files: List, only_new_files: bool = False, manifest: pd.DataFrame = None): """ Get a dictionary of files in a dataset. Either files that are not in the current manifest or all files @@ -1374,7 +1374,7 @@ def add_entities( if 'filename' in [col.lower() for col in manifest.columns]: # get current list of files and store as dataframe dataset_files = self.getFilesInStorageDataset(datasetId) - files_and_entityIds = self._get_file_entityIDs(dataset_files=dataset_files, only_new_files=False) + files_and_entityIds = self._get_file_entityIds(dataset_files=dataset_files, only_new_files=False) file_df = pd.DataFrame(files_and_entityIds) # Merge dataframes to add entityIds diff --git a/tests/test_store.py b/tests/test_store.py index 28508aa6f..58c3de2ec 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -235,7 +235,7 @@ def test_file_entityIds(self, helpers, synapse_store, only_new_files): manifest = manifest.join(entityIds) # get entityIds for new files - files_and_Ids = synapse_store._get_file_entityIDs(dataset_files=dataset_files, only_new_files=only_new_files, manifest=manifest) + files_and_Ids = synapse_store._get_file_entityIds(dataset_files=dataset_files, only_new_files=only_new_files, manifest=manifest) # Assert that there are no new files for value in files_and_Ids.values(): @@ -243,7 +243,7 @@ def test_file_entityIds(self, helpers, synapse_store, only_new_files): else: # get entityIds for all files - files_and_Ids = synapse_store._get_file_entityIDs(dataset_files=dataset_files, only_new_files=only_new_files) + files_and_Ids = synapse_store._get_file_entityIds(dataset_files=dataset_files, only_new_files=only_new_files) # assert that the correct number of files were found assert len(files_and_Ids['entityId']) == 2 From 87e09d13b53d52d7f3823338685e9aee55d50910 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 11 Jul 2023 11:06:37 -0700 Subject: [PATCH 101/135] change location of test --- tests/test_store.py | 50 ++++++++++++++++++++++----------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/tests/test_store.py b/tests/test_store.py index 58c3de2ec..6408ae195 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -114,6 +114,31 @@ def test_getFileAnnotations(self, synapse_store): assert expected_dict == actual_dict + @pytest.mark.parametrize('only_new_files',[True, False]) + def test_file_entityIds(self, helpers, synapse_store, only_new_files): + manifest_path = "mock_manifests/test_BulkRNAseq.csv" + dataset_files = synapse_store.getFilesInStorageDataset('syn39241199') + + if only_new_files: + # Prepare manifest is getting Ids for new files only + manifest = helpers.get_data_frame(manifest_path) + entityIds = pd.DataFrame({'entityId': ['syn39242580', 'syn51900502']}) + manifest = manifest.join(entityIds) + + # get entityIds for new files + files_and_Ids = synapse_store._get_file_entityIds(dataset_files=dataset_files, only_new_files=only_new_files, manifest=manifest) + + # Assert that there are no new files + for value in files_and_Ids.values(): + assert value == [] + + else: + # get entityIds for all files + files_and_Ids = synapse_store._get_file_entityIds(dataset_files=dataset_files, only_new_files=only_new_files) + + # assert that the correct number of files were found + assert len(files_and_Ids['entityId']) == 2 + @pytest.mark.parametrize('manifest_path, test_annotations, datasetId, manifest_record_type', [ ("mock_manifests/annotations_test_manifest.csv", {'CheckInt': '7', 'CheckList': 'valid, list, values'}, 'syn34295552', 'file_and_entities'), ("mock_manifests/test_BulkRNAseq.csv", {'FileFormat': 'BAM', 'GenomeBuild': 'GRCh38'}, 'syn39241199', 'table_and_file')], @@ -223,31 +248,6 @@ def test_getDatasetManifest(self, synapse_store, downloadFile): # return manifest id assert manifest_data == "syn51204513" - @pytest.mark.parametrize('only_new_files',[True, False]) - def test_file_entityIds(self, helpers, synapse_store, only_new_files): - manifest_path = "mock_manifests/test_BulkRNAseq.csv" - dataset_files = synapse_store.getFilesInStorageDataset('syn39241199') - - if only_new_files: - # Prepare manifest is getting Ids for new files only - manifest = helpers.get_data_frame(manifest_path) - entityIds = pd.DataFrame({'entityId': ['syn39242580', 'syn51900502']}) - manifest = manifest.join(entityIds) - - # get entityIds for new files - files_and_Ids = synapse_store._get_file_entityIds(dataset_files=dataset_files, only_new_files=only_new_files, manifest=manifest) - - # Assert that there are no new files - for value in files_and_Ids.values(): - assert value == [] - - else: - # get entityIds for all files - files_and_Ids = synapse_store._get_file_entityIds(dataset_files=dataset_files, only_new_files=only_new_files) - - # assert that the correct number of files were found - assert len(files_and_Ids['entityId']) == 2 - class TestDatasetFileView: def test_init(self, dataset_id, dataset_fileview, synapse_store): From 684cd1d11a232e43400a3ed4452d775c625ad222 Mon Sep 17 00:00:00 2001 From: linglp Date: Tue, 11 Jul 2023 15:47:00 -0400 Subject: [PATCH 102/135] update param in test_api --- tests/test_api.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index 03ff06e58..564479a7c 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -401,15 +401,16 @@ def ifPandasDataframe(self, response_dt): #@pytest.mark.parametrize("output_format", [None, "excel", "google_sheet", "dataframe (only if getting existing manifests)"]) @pytest.mark.parametrize("output_format", ["excel"]) - @pytest.mark.parametrize("data_type", ["Biospecimen", "Patient", "all manifests", ["Biospecimen", "Patient"]]) + @pytest.mark.parametrize("data_type", ["Biospecimen"]) + # @pytest.mark.parametrize("data_type", ["Biospecimen", "Patient", "all manifests", ["Biospecimen", "Patient"]]) def test_generate_existing_manifest(self, client, data_model_jsonld, data_type, output_format, caplog): # set dataset if data_type == "Patient": - dataset_id = ["syn42171373"] #Mock Patient Manifest folder on synapse + dataset_id = ["syn51730545"] #Mock Patient Manifest folder on synapse elif data_type == "Biospecimen": - dataset_id = ["syn42171508"] #Mock biospecimen manifest folder + dataset_id = ["syn51730547"] #Mock biospecimen manifest folder elif data_type == ["Biospecimen", "Patient"]: - dataset_id = ["syn42171508", "syn42171373"] + dataset_id = ["syn51730547", "syn51730545"] else: dataset_id = None #if "all manifests", dataset id is None From 191ab7e50f9cda036d6c77e0b60f9fbd9468cd9b Mon Sep 17 00:00:00 2001 From: linglp Date: Tue, 11 Jul 2023 15:53:38 -0400 Subject: [PATCH 103/135] deprecate json_schema_filepath param in _get_json_schema --- schematic/manifest/generator.py | 28 +++++++++------------------- 1 file changed, 9 insertions(+), 19 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 478d3f16d..bf65def3c 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -346,22 +346,12 @@ def _get_valid_values_from_jsonschema_property(self, prop: dict) -> List[str]: return [] - def _get_json_schema(self, json_schema_filepath: str) -> Dict: + def _get_json_schema(self) -> Dict: """Open json schema as a dictionary. - Args: - json_schema_filepath(str): path to json schema file Returns: Dictionary, containing portions of the json schema """ - if not json_schema_filepath: - # if no json schema is provided; there must be - # schema explorer defined for schema.org schema - # o.w. this will throw an error - # TODO: catch error - json_schema = self.sg.get_json_schema_requirements(self.root, self.title) - else: - with open(json_schema_filepath) as jsonfile: - json_schema = json.load(jsonfile) + json_schema = self.sg.get_json_schema_requirements(self.root, self.title) return json_schema def _get_required_metadata_fields(self, json_schema, fields): @@ -1236,18 +1226,18 @@ def _gather_all_fields(self, fields, json_schema): ) return required_metadata_fields - def get_empty_manifest(self, json_schema_filepath=None, strict=None): + def get_empty_manifest(self, strict: bool=None): """Create an empty manifest using specifications from the json schema. Args: - json_schema_filepath (str): path to json schema file + strict (bool): strictness with which to apply validation rules to google sheets. Returns: manifest_url (str): url of the google sheet manifest. TODO: Refactor to not be dependent on GS. """ spreadsheet_id = self._create_empty_manifest_spreadsheet(self.title) - json_schema = self._get_json_schema(json_schema_filepath) + json_schema = self._get_json_schema() required_metadata_fields = self._gather_all_fields( json_schema["properties"].keys(), json_schema @@ -1378,7 +1368,7 @@ def get_manifest_with_annotations( self.additional_metadata = annotations_dict # Generate empty manifest using `additional_metadata` - manifest_url = self.get_empty_manifest(strict) + manifest_url = self.get_empty_manifest(strict=strict) manifest_df = self.get_dataframe_by_url(manifest_url) # Annotations clashing with manifest attributes are skipped @@ -1489,7 +1479,7 @@ def get_manifest( # Handle case when no dataset ID is provided if not dataset_id: - manifest_url = self.get_empty_manifest(json_schema_filepath=json_schema, strict=strict) + manifest_url = self.get_empty_manifest(strict=strict) # if output_form parameter is set to "excel", return an excel spreadsheet if output_format == "excel": @@ -1514,7 +1504,7 @@ def get_manifest( manifest_record = store.updateDatasetManifestFiles(self.sg, datasetId = dataset_id, store = False) # get URL of an empty manifest file created based on schema component - empty_manifest_url = self.get_empty_manifest(strict) + empty_manifest_url = self.get_empty_manifest(strict=strict) # Populate empty template with existing manifest if manifest_record: @@ -1545,7 +1535,7 @@ def get_manifest( # if there are no files with annotations just generate an empty manifest if annotations.empty: - manifest_url = self.get_empty_manifest(strict) + manifest_url = self.get_empty_manifest(strict=strict) manifest_df = self.get_dataframe_by_url(manifest_url) else: # Subset columns if no interested in user-defined annotations and there are files present From 14940103b4907bbb608aa9455fbdc1d78bf2078a Mon Sep 17 00:00:00 2001 From: linglp Date: Tue, 11 Jul 2023 16:38:28 -0400 Subject: [PATCH 104/135] remove changes related to get_empty_manifest --- schematic/manifest/generator.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index bf65def3c..c7d7f9fbe 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -346,12 +346,22 @@ def _get_valid_values_from_jsonschema_property(self, prop: dict) -> List[str]: return [] - def _get_json_schema(self) -> Dict: + def _get_json_schema(self, json_schema_filepath: str) -> Dict: """Open json schema as a dictionary. + Args: + json_schema_filepath(str): path to json schema file Returns: Dictionary, containing portions of the json schema """ - json_schema = self.sg.get_json_schema_requirements(self.root, self.title) + if not json_schema_filepath: + # if no json schema is provided; there must be + # schema explorer defined for schema.org schema + # o.w. this will throw an error + # TODO: catch error + json_schema = self.sg.get_json_schema_requirements(self.root, self.title) + else: + with open(json_schema_filepath) as jsonfile: + json_schema = json.load(jsonfile) return json_schema def _get_required_metadata_fields(self, json_schema, fields): @@ -1226,18 +1236,19 @@ def _gather_all_fields(self, fields, json_schema): ) return required_metadata_fields - def get_empty_manifest(self, strict: bool=None): + def get_empty_manifest(self, strict: bool=None, json_schema_filepath: str=None,): """Create an empty manifest using specifications from the json schema. Args: strict (bool): strictness with which to apply validation rules to google sheets. + json_schema_filepath (str): path to json schema file Returns: manifest_url (str): url of the google sheet manifest. TODO: Refactor to not be dependent on GS. """ spreadsheet_id = self._create_empty_manifest_spreadsheet(self.title) - json_schema = self._get_json_schema() + json_schema = self._get_json_schema(json_schema_filepath=json_schema_filepath) required_metadata_fields = self._gather_all_fields( json_schema["properties"].keys(), json_schema From 88d31e55e972e37c1e58fbce66e8bdaab3ec3cd3 Mon Sep 17 00:00:00 2001 From: linglp Date: Tue, 11 Jul 2023 16:52:53 -0400 Subject: [PATCH 105/135] revert --- schematic/manifest/generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index c7d7f9fbe..13ebc0681 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1490,7 +1490,7 @@ def get_manifest( # Handle case when no dataset ID is provided if not dataset_id: - manifest_url = self.get_empty_manifest(strict=strict) + manifest_url = self.get_empty_manifest(json_schema_filepath=json_schema, strict=strict) # if output_form parameter is set to "excel", return an excel spreadsheet if output_format == "excel": From 9bd9ad1c92bf451545d8da265c9cebec491c256e Mon Sep 17 00:00:00 2001 From: linglp Date: Tue, 11 Jul 2023 17:01:40 -0400 Subject: [PATCH 106/135] use all parameters --- tests/test_api.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_api.py b/tests/test_api.py index 564479a7c..ea597a9ce 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -401,8 +401,7 @@ def ifPandasDataframe(self, response_dt): #@pytest.mark.parametrize("output_format", [None, "excel", "google_sheet", "dataframe (only if getting existing manifests)"]) @pytest.mark.parametrize("output_format", ["excel"]) - @pytest.mark.parametrize("data_type", ["Biospecimen"]) - # @pytest.mark.parametrize("data_type", ["Biospecimen", "Patient", "all manifests", ["Biospecimen", "Patient"]]) + @pytest.mark.parametrize("data_type", ["Biospecimen", "Patient", "all manifests", ["Biospecimen", "Patient"]]) def test_generate_existing_manifest(self, client, data_model_jsonld, data_type, output_format, caplog): # set dataset if data_type == "Patient": From 4a5f928e0e9e6c604e984e01c44aa1612e315ec1 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 11 Jul 2023 14:17:30 -0700 Subject: [PATCH 107/135] update Black to `23.7.0` --- poetry.lock | 1167 ++++++++++++++++++++++++++---------------------- pyproject.toml | 2 +- 2 files changed, 640 insertions(+), 529 deletions(-) diff --git a/poetry.lock b/poetry.lock index b52c2d473..f7a0cbd50 100644 --- a/poetry.lock +++ b/poetry.lock @@ -27,7 +27,7 @@ dev = ["black", "docutils", "flake8", "ipython", "m2r", "mistune (<2.0.0)", "pyt [[package]] name = "anyio" -version = "3.7.0" +version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "main" optional = false @@ -39,7 +39,7 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx (>=6.1.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme", "sphinxcontrib-jquery"] +doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (<0.22)"] @@ -95,7 +95,7 @@ python-dateutil = ">=2.7.0" [[package]] name = "astroid" -version = "2.15.5" +version = "2.15.6" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -168,18 +168,19 @@ lxml = ["lxml"] [[package]] name = "black" -version = "22.12.0" +version = "23.7.0" description = "The uncompromising code formatter." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] @@ -232,7 +233,7 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.1.0" +version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -240,7 +241,7 @@ python-versions = ">=3.7.0" [[package]] name = "click" -version = "8.1.3" +version = "8.1.4" description = "Composable command line interface toolkit" category = "main" optional = false @@ -339,7 +340,7 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.0" +version = "41.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false @@ -452,7 +453,7 @@ python-versions = ">=3.6" [[package]] name = "exceptiongroup" -version = "1.1.1" +version = "1.1.2" description = "Backport of PEP 654 (exception groups)" category = "main" optional = false @@ -537,41 +538,41 @@ python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" [[package]] name = "google-api-core" -version = "2.11.0" +version = "2.11.1" description = "Google API client core library" category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -google-auth = ">=2.14.1,<3.0dev" -googleapis-common-protos = ">=1.56.2,<2.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" -requests = ">=2.18.0,<3.0.0dev" +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" [package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)", "grpcio-status (>=1.49.1,<2.0dev)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.88.0" +version = "2.93.0" description = "Google API Client Library for Python" category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.19.0,<3.0.0dev" +google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0.dev0" +google-auth = ">=1.19.0,<3.0.0.dev0" google-auth-httplib2 = ">=0.1.0" -httplib2 = ">=0.15.0,<1dev" +httplib2 = ">=0.15.0,<1.dev0" uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.19.0" +version = "2.22.0" description = "Google Authentication Library" category = "main" optional = false @@ -585,11 +586,11 @@ six = ">=1.9.0" urllib3 = "<2.0" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0dev)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-auth-httplib2" @@ -621,17 +622,17 @@ tool = ["click (>=6.0.0)"] [[package]] name = "googleapis-common-protos" -version = "1.59.0" +version = "1.59.1" description = "Common protobufs used in Google APIs" category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" [package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0dev)"] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "graphviz" @@ -812,7 +813,7 @@ tests = ["pytest", "pytest-cov", "pytest-mock"] [[package]] name = "ipykernel" -version = "6.23.1" +version = "6.24.0" description = "IPython Kernel for Jupyter" category = "main" optional = false @@ -842,7 +843,7 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" [[package]] name = "ipython" -version = "8.13.2" +version = "8.14.0" description = "IPython: Productive Interactive Computing" category = "main" optional = false @@ -886,7 +887,7 @@ python-versions = "*" [[package]] name = "ipywidgets" -version = "8.0.6" +version = "8.0.7" description = "Jupyter interactive widgets" category = "main" optional = false @@ -990,40 +991,42 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonpatch" -version = "1.32" +version = "1.33" description = "Apply JSON-Patches (RFC 6902)" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" [package.dependencies] jsonpointer = ">=1.9" [[package]] name = "jsonpointer" -version = "2.3" +version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" [[package]] name = "jsonschema" -version = "4.17.3" +version = "4.18.0" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.dependencies] -attrs = ">=17.4.0" +attrs = ">=22.2.0" fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} +rpds-py = ">=0.7.1" uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} @@ -1031,9 +1034,20 @@ webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-n format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +[[package]] +name = "jsonschema-specifications" +version = "2023.6.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +referencing = ">=0.28.0" + [[package]] name = "jupyter-client" -version = "8.2.0" +version = "8.3.0" description = "Jupyter protocol implementation and client libraries" category = "main" optional = false @@ -1053,7 +1067,7 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt [[package]] name = "jupyter-core" -version = "5.3.0" +version = "5.3.1" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "main" optional = false @@ -1091,7 +1105,7 @@ test = ["click", "coverage", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>= [[package]] name = "jupyter-server" -version = "2.6.0" +version = "2.7.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "main" optional = false @@ -1120,7 +1134,7 @@ websocket-client = "*" [package.extras] docs = ["ipykernel", "jinja2", "jupyter-client", "jupyter-server", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] +test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] [[package]] name = "jupyter-server-terminals" @@ -1148,7 +1162,7 @@ python-versions = ">=3.7" [[package]] name = "jupyterlab-widgets" -version = "3.0.7" +version = "3.0.8" description = "Jupyter interactive widgets for JupyterLab" category = "main" optional = false @@ -1249,11 +1263,11 @@ python-versions = ">=3.6" [[package]] name = "mistune" -version = "2.0.5" -description = "A sane Markdown parser with useful plugins and renderers" +version = "3.0.1" +description = "A sane and fast Markdown parser with useful plugins and renderers" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" [[package]] name = "mypy" @@ -1334,7 +1348,7 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.4.0" +version = "7.6.0" description = "Converting Jupyter Notebooks" category = "main" optional = false @@ -1342,21 +1356,21 @@ python-versions = ">=3.7" [package.dependencies] beautifulsoup4 = "*" -bleach = "*" +bleach = "!=5.0.0" defusedxml = "*" importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} jinja2 = ">=3.0" jupyter-core = ">=4.7" jupyterlab-pygments = "*" markupsafe = ">=2.0" -mistune = ">=2.0.3,<3" +mistune = ">=2.0.3,<4" nbclient = ">=0.5.0" -nbformat = ">=5.1" +nbformat = ">=5.7" packaging = "*" pandocfilters = ">=1.4.1" pygments = ">=2.4.1" tinycss2 = "*" -traitlets = ">=5.0" +traitlets = ">=5.1" [package.extras] all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] @@ -1369,7 +1383,7 @@ webpdf = ["pyppeteer (>=1,<1.1)"] [[package]] name = "nbformat" -version = "5.9.0" +version = "5.9.1" description = "The Jupyter Notebook format" category = "main" optional = false @@ -1455,11 +1469,11 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" [[package]] name = "numpy" -version = "1.24.3" +version = "1.25.1" description = "Fundamental package for array computing in Python" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" [[package]] name = "oauth2client" @@ -1617,23 +1631,23 @@ python-versions = "*" [[package]] name = "platformdirs" -version = "3.5.1" +version = "3.8.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.2.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] dev = ["pre-commit", "tox"] @@ -1641,7 +1655,7 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "prometheus-client" -version = "0.17.0" +version = "0.17.1" description = "Python client for the Prometheus monitoring system." category = "main" optional = false @@ -1652,7 +1666,7 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.38" +version = "3.0.39" description = "Library for building powerful interactive command lines in Python" category = "main" optional = false @@ -1663,7 +1677,7 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "4.23.2" +version = "4.23.4" description = "" category = "main" optional = false @@ -1744,7 +1758,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.10.8" +version = "1.10.11" description = "Data validation and settings management using python type hints" category = "main" optional = false @@ -1831,7 +1845,7 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] [[package]] name = "pyparsing" -version = "3.0.9" +version = "3.1.0" description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "main" optional = false @@ -1840,17 +1854,9 @@ python-versions = ">=3.6.8" [package.extras] diagrams = ["jinja2", "railroad-diagrams"] -[[package]] -name = "pyrsistent" -version = "0.19.3" -description = "Persistent/Functional/Immutable data structures" -category = "main" -optional = false -python-versions = ">=3.7" - [[package]] name = "pytest" -version = "7.3.1" +version = "7.4.0" description = "pytest: simple powerful testing with Python" category = "dev" optional = false @@ -1865,7 +1871,7 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" @@ -1884,7 +1890,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-mock" -version = "3.10.0" +version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" category = "dev" optional = false @@ -1944,11 +1950,11 @@ python-versions = "*" [[package]] name = "pywin32-ctypes" -version = "0.2.0" -description = "" +version = "0.2.2" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "pywinpty" @@ -1995,9 +2001,21 @@ html = ["html5lib (>=1.0,<2.0)"] lxml = ["lxml (>=4.3.0,<5.0.0)"] networkx = ["networkx (>=2.0.0,<3.0.0)"] +[[package]] +name = "referencing" +version = "0.29.1" +description = "JSON Referencing + Python" +category = "main" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + [[package]] name = "regex" -version = "2023.5.5" +version = "2023.6.3" description = "Alternative regular expression module, to replace re." category = "main" optional = false @@ -2055,6 +2073,14 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "rpds-py" +version = "0.8.10" +description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" +optional = false +python-versions = ">=3.8" + [[package]] name = "rsa" version = "4.9" @@ -2117,18 +2143,18 @@ synapse = ["synapseclient (>=2.7.0,<3.0.0)"] [[package]] name = "scipy" -version = "1.10.1" +version = "1.11.1" description = "Fundamental algorithms for scientific computing in Python" category = "main" optional = false -python-versions = "<3.12,>=3.8" +python-versions = "<3.13,>=3.9" [package.dependencies] -numpy = ">=1.19.5,<1.27.0" +numpy = ">=1.21.6,<1.28.0" [package.extras] -dev = ["click", "doit (>=0.36.0)", "flake8", "mypy", "pycodestyle", "pydevtool", "rich-click", "typing_extensions"] -doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] +dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] @@ -2319,7 +2345,7 @@ test = ["pytest"] [[package]] name = "sqlalchemy" -version = "1.4.48" +version = "1.4.49" description = "Database Abstraction Library" category = "main" optional = false @@ -2403,7 +2429,7 @@ Jinja2 = ">=2.0" [[package]] name = "synapseclient" -version = "2.7.1" +version = "2.7.2" description = "A client for Synapse, a collaborative compute space that allows scientists to share and analyze data together." category = "main" optional = false @@ -2415,10 +2441,11 @@ importlib-metadata = "<5.0" keyring = ">=15,<23.5" "keyrings.alt" = {version = "3.1", markers = "sys_platform == \"linux\""} requests = ">=2.22.0,<3.0" +urllib3 = "<2" [package.extras] boto3 = ["boto3 (>=1.7.0,<2.0)"] -docs = ["sphinx (>=3.0,<4.0)", "sphinx-argparse (>=0.2,<0.3)"] +docs = ["sphinx (>=4.0,<5.0)", "sphinx-argparse (>=0.2,<0.3)"] pandas = ["pandas (>=0.25.0,<1.5)"] pysftp = ["pysftp (>=0.2.8,<0.3)"] tests = ["flake8 (>=3.7.0,<4.0)", "pytest (>=5.0.0,<7.0)", "pytest-mock (>=3.0,<4.0)", "pytest-xdist[psutil] (>=2.2,<3.0.0)"] @@ -2578,14 +2605,14 @@ devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pyte [[package]] name = "uri-template" -version = "1.2.0" +version = "1.3.0" description = "RFC 6570 URI Template Processor" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.extras] -dev = ["flake8 (<4.0.0)", "flake8-annotations", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-noqa", "flake8-requirements", "flake8-type-annotations", "flake8-use-fstring", "mypy", "pep8-naming"] +dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] [[package]] name = "uritemplate" @@ -2660,7 +2687,7 @@ python-versions = "*" [[package]] name = "websocket-client" -version = "1.5.2" +version = "1.6.1" description = "WebSocket client for Python with low level API options" category = "main" optional = false @@ -2684,7 +2711,7 @@ watchdog = ["watchdog"] [[package]] name = "widgetsnbextension" -version = "4.0.7" +version = "4.0.8" description = "Jupyter interactive widgets for Jupyter Notebook" category = "main" optional = false @@ -2700,20 +2727,20 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "zipp" -version = "3.15.0" +version = "3.16.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] lock-version = "1.1" python-versions = ">=3.9.0,<3.11" -content-hash = "b20d2b7dcf507cfe6397de135cbf25eca23c9c6241a5900199e59ae88dfc00d4" +content-hash = "e86937006793a1cc256c37c77eaa05e1fd6d1db6bac6a5b22ceb161e2cf2b578" [metadata.files] alabaster = [ @@ -2725,8 +2752,8 @@ altair = [ {file = "altair-4.2.0.tar.gz", hash = "sha256:d87d9372e63b48cd96b2a6415f0cf9457f50162ab79dc7a31cd7e024dd840026"}, ] anyio = [ - {file = "anyio-3.7.0-py3-none-any.whl", hash = "sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0"}, - {file = "anyio-3.7.0.tar.gz", hash = "sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce"}, + {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, + {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, ] appnope = [ {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, @@ -2764,8 +2791,8 @@ arrow = [ {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, ] astroid = [ - {file = "astroid-2.15.5-py3-none-any.whl", hash = "sha256:078e5212f9885fa85fbb0cf0101978a336190aadea6e13305409d099f71b2324"}, - {file = "astroid-2.15.5.tar.gz", hash = "sha256:1039262575027b441137ab4a62a793a9b43defb42c32d5670f38686207cd780f"}, + {file = "astroid-2.15.6-py3-none-any.whl", hash = "sha256:389656ca57b6108f939cf5d2f9a2a825a3be50ba9d589670f393236e0a03b91c"}, + {file = "astroid-2.15.6.tar.gz", hash = "sha256:903f024859b7c7687d7a7f3a3f73b17301f8e42dfd9cc9df9d4418172d3e2dbd"}, ] asttokens = [ {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, @@ -2788,18 +2815,28 @@ beautifulsoup4 = [ {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, ] black = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, + {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, + {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, + {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, + {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, + {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, + {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, + {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, + {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, + {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, + {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, + {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, + {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, + {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, ] bleach = [ {file = "bleach-6.0.0-py3-none-any.whl", hash = "sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4"}, @@ -2880,85 +2917,85 @@ cffi = [ {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, ] charset-normalizer = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, ] click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.4-py3-none-any.whl", hash = "sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3"}, + {file = "click-8.1.4.tar.gz", hash = "sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37"}, ] click-log = [ {file = "click-log-0.4.0.tar.gz", hash = "sha256:3970f8570ac54491237bcdb3d8ab5e3eef6c057df29f8c3d1151a51a9c23b975"}, @@ -3043,25 +3080,29 @@ coverage = [ {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] cryptography = [ - {file = "cryptography-41.0.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8"}, - {file = "cryptography-41.0.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0"}, - {file = "cryptography-41.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d"}, - {file = "cryptography-41.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46"}, - {file = "cryptography-41.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237"}, - {file = "cryptography-41.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4"}, - {file = "cryptography-41.0.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75"}, - {file = "cryptography-41.0.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d"}, - {file = "cryptography-41.0.0-cp37-abi3-win32.whl", hash = "sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928"}, - {file = "cryptography-41.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be"}, - {file = "cryptography-41.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5"}, - {file = "cryptography-41.0.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb"}, - {file = "cryptography-41.0.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be"}, - {file = "cryptography-41.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9"}, - {file = "cryptography-41.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2"}, - {file = "cryptography-41.0.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d"}, - {file = "cryptography-41.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895"}, - {file = "cryptography-41.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55"}, - {file = "cryptography-41.0.0.tar.gz", hash = "sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78"}, + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"}, + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"}, + {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"}, + {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"}, + {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"}, ] dateparser = [ {file = "dateparser-1.1.8-py2.py3-none-any.whl", hash = "sha256:070b29b5bbf4b1ec2cd51c96ea040dc68a614de703910a91ad1abba18f9f379f"}, @@ -3116,8 +3157,8 @@ et-xmlfile = [ {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, + {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, + {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, ] executing = [ {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, @@ -3144,16 +3185,16 @@ fqdn = [ {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, ] google-api-core = [ - {file = "google-api-core-2.11.0.tar.gz", hash = "sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22"}, - {file = "google_api_core-2.11.0-py3-none-any.whl", hash = "sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e"}, + {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, + {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, ] google-api-python-client = [ - {file = "google-api-python-client-2.88.0.tar.gz", hash = "sha256:37068453f79ea28e5394a8fe20a4ba620594e7f8541068bea2e844dacdcc9d33"}, - {file = "google_api_python_client-2.88.0-py2.py3-none-any.whl", hash = "sha256:d003008400a779524ea21b5a3ddc6fc59327d401fb8c37c466d413694c279cae"}, + {file = "google-api-python-client-2.93.0.tar.gz", hash = "sha256:62ee28e96031a10a1c341f226a75ac6a4f16bdb1d888dc8222b2cdca133d0031"}, + {file = "google_api_python_client-2.93.0-py2.py3-none-any.whl", hash = "sha256:f34abb671afd488bd19d30721ea20fb30d3796ddd825d6f91f26d8c718a9f07d"}, ] google-auth = [ - {file = "google-auth-2.19.0.tar.gz", hash = "sha256:f39d528077ac540793dd3c22a8706178f157642a67d874db25c640b7fead277e"}, - {file = "google_auth-2.19.0-py2.py3-none-any.whl", hash = "sha256:be617bfaf77774008e9d177573f782e109188c8a64ae6e744285df5cea3e7df6"}, + {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, + {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, ] google-auth-httplib2 = [ {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, @@ -3164,8 +3205,8 @@ google-auth-oauthlib = [ {file = "google_auth_oauthlib-0.8.0-py2.py3-none-any.whl", hash = "sha256:40cc612a13c3336d5433e94e2adb42a0c88f6feb6c55769e44500fc70043a576"}, ] googleapis-common-protos = [ - {file = "googleapis-common-protos-1.59.0.tar.gz", hash = "sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44"}, - {file = "googleapis_common_protos-1.59.0-py2.py3-none-any.whl", hash = "sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f"}, + {file = "googleapis-common-protos-1.59.1.tar.gz", hash = "sha256:b35d530fe825fb4227857bc47ad84c33c809ac96f312e13182bdeaa2abe1178a"}, + {file = "googleapis_common_protos-1.59.1-py2.py3-none-any.whl", hash = "sha256:0cbedb6fb68f1c07e18eb4c48256320777707e7d0c55063ae56c15db3224a61e"}, ] graphviz = [ {file = "graphviz-0.20.1-py3-none-any.whl", hash = "sha256:587c58a223b51611c0cf461132da386edd896a029524ca61a1462b880bf97977"}, @@ -3266,20 +3307,20 @@ interrogate = [ {file = "interrogate-1.5.0.tar.gz", hash = "sha256:b6f325f0aa84ac3ac6779d8708264d366102226c5af7d69058cecffcff7a6d6c"}, ] ipykernel = [ - {file = "ipykernel-6.23.1-py3-none-any.whl", hash = "sha256:77aeffab056c21d16f1edccdc9e5ccbf7d96eb401bd6703610a21be8b068aadc"}, - {file = "ipykernel-6.23.1.tar.gz", hash = "sha256:1aba0ae8453e15e9bc6b24e497ef6840114afcdb832ae597f32137fa19d42a6f"}, + {file = "ipykernel-6.24.0-py3-none-any.whl", hash = "sha256:2f5fffc7ad8f1fd5aadb4e171ba9129d9668dbafa374732cf9511ada52d6547f"}, + {file = "ipykernel-6.24.0.tar.gz", hash = "sha256:29cea0a716b1176d002a61d0b0c851f34536495bc4ef7dd0222c88b41b816123"}, ] ipython = [ - {file = "ipython-8.13.2-py3-none-any.whl", hash = "sha256:ffca270240fbd21b06b2974e14a86494d6d29290184e788275f55e0b55914926"}, - {file = "ipython-8.13.2.tar.gz", hash = "sha256:7dff3fad32b97f6488e02f87b970f309d082f758d7b7fc252e3b19ee0e432dbb"}, + {file = "ipython-8.14.0-py3-none-any.whl", hash = "sha256:248aca623f5c99a6635bc3857677b7320b9b8039f99f070ee0d20a5ca5a8e6bf"}, + {file = "ipython-8.14.0.tar.gz", hash = "sha256:1d197b907b6ba441b692c48cf2a3a2de280dc0ac91a3405b39349a50272ca0a1"}, ] ipython-genutils = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, ] ipywidgets = [ - {file = "ipywidgets-8.0.6-py3-none-any.whl", hash = "sha256:a60bf8d2528997e05ac83fd19ea2fbe65f2e79fbe1b2b35779bdfc46c2941dcc"}, - {file = "ipywidgets-8.0.6.tar.gz", hash = "sha256:de7d779f2045d60de9f6c25f653fdae2dba57898e6a1284494b3ba20b6893bb8"}, + {file = "ipywidgets-8.0.7-py3-none-any.whl", hash = "sha256:e0aed0c95a1e55b6a123f64305245578bdc09e52965a34941c2b6a578b8c64a0"}, + {file = "ipywidgets-8.0.7.tar.gz", hash = "sha256:50ace0a8886e9a0d68b980db82f94c25d55d21ff2340ed36f802dd9365e94acf"}, ] isodate = [ {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, @@ -3310,32 +3351,36 @@ jinja2 = [ {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] jsonpatch = [ - {file = "jsonpatch-1.32-py2.py3-none-any.whl", hash = "sha256:26ac385719ac9f54df8a2f0827bb8253aa3ea8ab7b3368457bcdb8c14595a397"}, - {file = "jsonpatch-1.32.tar.gz", hash = "sha256:b6ddfe6c3db30d81a96aaeceb6baf916094ffa23d7dd5fa2c13e13f8b6e600c2"}, + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, ] jsonpointer = [ - {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"}, - {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"}, + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, ] jsonschema = [ - {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, - {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, + {file = "jsonschema-4.18.0-py3-none-any.whl", hash = "sha256:b508dd6142bd03f4c3670534c80af68cd7bbff9ea830b9cf2625d4a3c49ddf60"}, + {file = "jsonschema-4.18.0.tar.gz", hash = "sha256:8caf5b57a990a98e9b39832ef3cb35c176fe331414252b6e1b26fd5866f891a4"}, +] +jsonschema-specifications = [ + {file = "jsonschema_specifications-2023.6.1-py3-none-any.whl", hash = "sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7"}, + {file = "jsonschema_specifications-2023.6.1.tar.gz", hash = "sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28"}, ] jupyter-client = [ - {file = "jupyter_client-8.2.0-py3-none-any.whl", hash = "sha256:b18219aa695d39e2ad570533e0d71fb7881d35a873051054a84ee2a17c4b7389"}, - {file = "jupyter_client-8.2.0.tar.gz", hash = "sha256:9fe233834edd0e6c0aa5f05ca2ab4bdea1842bfd2d8a932878212fc5301ddaf0"}, + {file = "jupyter_client-8.3.0-py3-none-any.whl", hash = "sha256:7441af0c0672edc5d28035e92ba5e32fadcfa8a4e608a434c228836a89df6158"}, + {file = "jupyter_client-8.3.0.tar.gz", hash = "sha256:3af69921fe99617be1670399a0b857ad67275eefcfa291e2c81a160b7b650f5f"}, ] jupyter-core = [ - {file = "jupyter_core-5.3.0-py3-none-any.whl", hash = "sha256:d4201af84559bc8c70cead287e1ab94aeef3c512848dde077b7684b54d67730d"}, - {file = "jupyter_core-5.3.0.tar.gz", hash = "sha256:6db75be0c83edbf1b7c9f91ec266a9a24ef945da630f3120e1a0046dc13713fc"}, + {file = "jupyter_core-5.3.1-py3-none-any.whl", hash = "sha256:ae9036db959a71ec1cac33081eeb040a79e681f08ab68b0883e9a676c7a90dce"}, + {file = "jupyter_core-5.3.1.tar.gz", hash = "sha256:5ba5c7938a7f97a6b0481463f7ff0dbac7c15ba48cf46fa4035ca6e838aa1aba"}, ] jupyter-events = [ {file = "jupyter_events-0.6.3-py3-none-any.whl", hash = "sha256:57a2749f87ba387cd1bfd9b22a0875b889237dbf2edc2121ebb22bde47036c17"}, {file = "jupyter_events-0.6.3.tar.gz", hash = "sha256:9a6e9995f75d1b7146b436ea24d696ce3a35bfa8bfe45e0c33c334c79464d0b3"}, ] jupyter-server = [ - {file = "jupyter_server-2.6.0-py3-none-any.whl", hash = "sha256:19525a1515b5999618a91b3e99ec9f6869aa8c5ba73e0b6279fcda918b54ba36"}, - {file = "jupyter_server-2.6.0.tar.gz", hash = "sha256:ae4af349f030ed08dd78cb7ac1a03a92d886000380c9ea6283f3c542a81f4b06"}, + {file = "jupyter_server-2.7.0-py3-none-any.whl", hash = "sha256:6a77912aff643e53fa14bdb2634884b52b784a4be77ce8e93f7283faed0f0849"}, + {file = "jupyter_server-2.7.0.tar.gz", hash = "sha256:36da0a266d31a41ac335a366c88933c17dfa5bb817a48f5c02c16d303bc9477f"}, ] jupyter-server-terminals = [ {file = "jupyter_server_terminals-0.4.4-py3-none-any.whl", hash = "sha256:75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36"}, @@ -3346,8 +3391,8 @@ jupyterlab-pygments = [ {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, ] jupyterlab-widgets = [ - {file = "jupyterlab_widgets-3.0.7-py3-none-any.whl", hash = "sha256:c73f8370338ec19f1bec47254752d6505b03601cbd5a67e6a0b184532f73a459"}, - {file = "jupyterlab_widgets-3.0.7.tar.gz", hash = "sha256:c3a50ed5bf528a0c7a869096503af54702f86dda1db469aee1c92dc0c01b43ca"}, + {file = "jupyterlab_widgets-3.0.8-py3-none-any.whl", hash = "sha256:4715912d6ceab839c9db35953c764b3214ebbc9161c809f6e0510168845dfdf5"}, + {file = "jupyterlab_widgets-3.0.8.tar.gz", hash = "sha256:d428ab97b8d87cc7c54cbf37644d6e0f0e662f23876e05fa460a73ec3257252a"}, ] keyring = [ {file = "keyring-23.4.1-py3-none-any.whl", hash = "sha256:17e49fb0d6883c2b4445359434dba95aad84aabb29bbff044ad0ed7100232eca"}, @@ -3454,8 +3499,8 @@ mccabe = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] mistune = [ - {file = "mistune-2.0.5-py2.py3-none-any.whl", hash = "sha256:bad7f5d431886fcbaf5f758118ecff70d31f75231b34024a1341120340a65ce8"}, - {file = "mistune-2.0.5.tar.gz", hash = "sha256:0246113cb2492db875c6be56974a7c893333bf26cd92891c85f63151cee09d34"}, + {file = "mistune-3.0.1-py3-none-any.whl", hash = "sha256:b9b3e438efbb57c62b5beb5e134dab664800bdf1284a7ee09e8b12b13eb1aac6"}, + {file = "mistune-3.0.1.tar.gz", hash = "sha256:e912116c13aa0944f9dc530db38eb88f6a77087ab128f49f84a48f4c05ea163c"}, ] mypy = [ {file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"}, @@ -3496,12 +3541,12 @@ nbclient = [ {file = "nbclient-0.8.0.tar.gz", hash = "sha256:f9b179cd4b2d7bca965f900a2ebf0db4a12ebff2f36a711cb66861e4ae158e55"}, ] nbconvert = [ - {file = "nbconvert-7.4.0-py3-none-any.whl", hash = "sha256:af5064a9db524f9f12f4e8be7f0799524bd5b14c1adea37e34e83c95127cc818"}, - {file = "nbconvert-7.4.0.tar.gz", hash = "sha256:51b6c77b507b177b73f6729dba15676e42c4e92bcb00edc8cc982ee72e7d89d7"}, + {file = "nbconvert-7.6.0-py3-none-any.whl", hash = "sha256:5a445c6794b0791984bc5436608fe2c066cb43c83920c7bc91bde3b765e9a264"}, + {file = "nbconvert-7.6.0.tar.gz", hash = "sha256:24fcf27efdef2b51d7f090cc5ce5a9b178766a55be513c4ebab08c91899ab550"}, ] nbformat = [ - {file = "nbformat-5.9.0-py3-none-any.whl", hash = "sha256:8c8fa16d6d05062c26177754bfbfac22de644888e2ef69d27ad2a334cf2576e5"}, - {file = "nbformat-5.9.0.tar.gz", hash = "sha256:e98ebb6120c3efbafdee2a40af2a140cadee90bb06dd69a2a63d9551fcc7f976"}, + {file = "nbformat-5.9.1-py3-none-any.whl", hash = "sha256:b7968ebf4811178a4108ee837eae1442e3f054132100f0359219e9ed1ce3ca45"}, + {file = "nbformat-5.9.1.tar.gz", hash = "sha256:3a7f52d040639cbd8a3890218c8b0ffb93211588c57446c90095e32ba5881b5d"}, ] nest-asyncio = [ {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, @@ -3520,34 +3565,31 @@ notebook-shim = [ {file = "notebook_shim-0.2.3.tar.gz", hash = "sha256:f69388ac283ae008cd506dda10d0288b09a017d822d5e8c7129a152cbd3ce7e9"}, ] numpy = [ - {file = "numpy-1.24.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570"}, - {file = "numpy-1.24.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6"}, - {file = "numpy-1.24.3-cp310-cp310-win32.whl", hash = "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b"}, - {file = "numpy-1.24.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950"}, - {file = "numpy-1.24.3-cp311-cp311-win32.whl", hash = "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096"}, - {file = "numpy-1.24.3-cp311-cp311-win_amd64.whl", hash = "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80"}, - {file = "numpy-1.24.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7776ea65423ca6a15255ba1872d82d207bd1e09f6d0894ee4a64678dd2204078"}, - {file = "numpy-1.24.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ae8d0be48d1b6ed82588934aaaa179875e7dc4f3d84da18d7eae6eb3f06c242c"}, - {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecde0f8adef7dfdec993fd54b0f78183051b6580f606111a6d789cd14c61ea0c"}, - {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4749e053a29364d3452c034827102ee100986903263e89884922ef01a0a6fd2f"}, - {file = "numpy-1.24.3-cp38-cp38-win32.whl", hash = "sha256:d933fabd8f6a319e8530d0de4fcc2e6a61917e0b0c271fded460032db42a0fe4"}, - {file = "numpy-1.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:56e48aec79ae238f6e4395886b5eaed058abb7231fb3361ddd7bfdf4eed54289"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4"}, - {file = "numpy-1.24.3-cp39-cp39-win32.whl", hash = "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c"}, - {file = "numpy-1.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4"}, - {file = "numpy-1.24.3.tar.gz", hash = "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155"}, + {file = "numpy-1.25.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77d339465dff3eb33c701430bcb9c325b60354698340229e1dff97745e6b3efa"}, + {file = "numpy-1.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d736b75c3f2cb96843a5c7f8d8ccc414768d34b0a75f466c05f3a739b406f10b"}, + {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a90725800caeaa160732d6b31f3f843ebd45d6b5f3eec9e8cc287e30f2805bf"}, + {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c6c9261d21e617c6dc5eacba35cb68ec36bb72adcff0dee63f8fbc899362588"}, + {file = "numpy-1.25.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0def91f8af6ec4bb94c370e38c575855bf1d0be8a8fbfba42ef9c073faf2cf19"}, + {file = "numpy-1.25.1-cp310-cp310-win32.whl", hash = "sha256:fd67b306320dcadea700a8f79b9e671e607f8696e98ec255915c0c6d6b818503"}, + {file = "numpy-1.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:c1516db588987450b85595586605742879e50dcce923e8973f79529651545b57"}, + {file = "numpy-1.25.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b82655dd8efeea69dbf85d00fca40013d7f503212bc5259056244961268b66e"}, + {file = "numpy-1.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e8f6049c4878cb16960fbbfb22105e49d13d752d4d8371b55110941fb3b17800"}, + {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41a56b70e8139884eccb2f733c2f7378af06c82304959e174f8e7370af112e09"}, + {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5154b1a25ec796b1aee12ac1b22f414f94752c5f94832f14d8d6c9ac40bcca6"}, + {file = "numpy-1.25.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38eb6548bb91c421261b4805dc44def9ca1a6eef6444ce35ad1669c0f1a3fc5d"}, + {file = "numpy-1.25.1-cp311-cp311-win32.whl", hash = "sha256:791f409064d0a69dd20579345d852c59822c6aa087f23b07b1b4e28ff5880fcb"}, + {file = "numpy-1.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:c40571fe966393b212689aa17e32ed905924120737194b5d5c1b20b9ed0fb171"}, + {file = "numpy-1.25.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3d7abcdd85aea3e6cdddb59af2350c7ab1ed764397f8eec97a038ad244d2d105"}, + {file = "numpy-1.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a180429394f81c7933634ae49b37b472d343cccb5bb0c4a575ac8bbc433722f"}, + {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d412c1697c3853c6fc3cb9751b4915859c7afe6a277c2bf00acf287d56c4e625"}, + {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e1266411120a4f16fad8efa8e0454d21d00b8c7cee5b5ccad7565d95eb42dd"}, + {file = "numpy-1.25.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f76aebc3358ade9eacf9bc2bb8ae589863a4f911611694103af05346637df1b7"}, + {file = "numpy-1.25.1-cp39-cp39-win32.whl", hash = "sha256:247d3ffdd7775bdf191f848be8d49100495114c82c2bd134e8d5d075fb386a1c"}, + {file = "numpy-1.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:1d5d3c68e443c90b38fdf8ef40e60e2538a27548b39b12b73132456847f4b631"}, + {file = "numpy-1.25.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:35a9527c977b924042170a0887de727cd84ff179e478481404c5dc66b4170009"}, + {file = "numpy-1.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d3fe3dd0506a28493d82dc3cf254be8cd0d26f4008a417385cbf1ae95b54004"}, + {file = "numpy-1.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:012097b5b0d00a11070e8f2e261128c44157a8689f7dedcf35576e525893f4fe"}, + {file = "numpy-1.25.1.tar.gz", hash = "sha256:9a3a9f3a61480cc086117b426a8bd86869c213fc4072e606f01c4e4b66eb92bf"}, ] oauth2client = [ {file = "oauth2client-4.1.3-py2.py3-none-any.whl", hash = "sha256:b8a81cc5d60e2d364f0b1b98f958dbd472887acaf1a5b05e21c28c31a2d6d3ac"}, @@ -3626,35 +3668,35 @@ pickleshare = [ {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] platformdirs = [ - {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, - {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, + {file = "platformdirs-3.8.1-py3-none-any.whl", hash = "sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c"}, + {file = "platformdirs-3.8.1.tar.gz", hash = "sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528"}, ] pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] prometheus-client = [ - {file = "prometheus_client-0.17.0-py3-none-any.whl", hash = "sha256:a77b708cf083f4d1a3fb3ce5c95b4afa32b9c521ae363354a4a910204ea095ce"}, - {file = "prometheus_client-0.17.0.tar.gz", hash = "sha256:9c3b26f1535945e85b8934fb374678d263137b78ef85f305b1156c7c881cd11b"}, + {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, + {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, ] prompt-toolkit = [ - {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, - {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, + {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, + {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, ] protobuf = [ - {file = "protobuf-4.23.2-cp310-abi3-win32.whl", hash = "sha256:384dd44cb4c43f2ccddd3645389a23ae61aeb8cfa15ca3a0f60e7c3ea09b28b3"}, - {file = "protobuf-4.23.2-cp310-abi3-win_amd64.whl", hash = "sha256:09310bce43353b46d73ba7e3bca78273b9bc50349509b9698e64d288c6372c2a"}, - {file = "protobuf-4.23.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2cfab63a230b39ae603834718db74ac11e52bccaaf19bf20f5cce1a84cf76df"}, - {file = "protobuf-4.23.2-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:c52cfcbfba8eb791255edd675c1fe6056f723bf832fa67f0442218f8817c076e"}, - {file = "protobuf-4.23.2-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:86df87016d290143c7ce3be3ad52d055714ebaebb57cc659c387e76cfacd81aa"}, - {file = "protobuf-4.23.2-cp37-cp37m-win32.whl", hash = "sha256:281342ea5eb631c86697e1e048cb7e73b8a4e85f3299a128c116f05f5c668f8f"}, - {file = "protobuf-4.23.2-cp37-cp37m-win_amd64.whl", hash = "sha256:ce744938406de1e64b91410f473736e815f28c3b71201302612a68bf01517fea"}, - {file = "protobuf-4.23.2-cp38-cp38-win32.whl", hash = "sha256:6c081863c379bb1741be8f8193e893511312b1d7329b4a75445d1ea9955be69e"}, - {file = "protobuf-4.23.2-cp38-cp38-win_amd64.whl", hash = "sha256:25e3370eda26469b58b602e29dff069cfaae8eaa0ef4550039cc5ef8dc004511"}, - {file = "protobuf-4.23.2-cp39-cp39-win32.whl", hash = "sha256:efabbbbac1ab519a514579ba9ec52f006c28ae19d97915951f69fa70da2c9e91"}, - {file = "protobuf-4.23.2-cp39-cp39-win_amd64.whl", hash = "sha256:54a533b971288af3b9926e53850c7eb186886c0c84e61daa8444385a4720297f"}, - {file = "protobuf-4.23.2-py3-none-any.whl", hash = "sha256:8da6070310d634c99c0db7df48f10da495cc283fd9e9234877f0cd182d43ab7f"}, - {file = "protobuf-4.23.2.tar.gz", hash = "sha256:20874e7ca4436f683b64ebdbee2129a5a2c301579a67d1a7dda2cdf62fb7f5f7"}, + {file = "protobuf-4.23.4-cp310-abi3-win32.whl", hash = "sha256:5fea3c64d41ea5ecf5697b83e41d09b9589e6f20b677ab3c48e5f242d9b7897b"}, + {file = "protobuf-4.23.4-cp310-abi3-win_amd64.whl", hash = "sha256:7b19b6266d92ca6a2a87effa88ecc4af73ebc5cfde194dc737cf8ef23a9a3b12"}, + {file = "protobuf-4.23.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8547bf44fe8cec3c69e3042f5c4fb3e36eb2a7a013bb0a44c018fc1e427aafbd"}, + {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:fee88269a090ada09ca63551bf2f573eb2424035bcf2cb1b121895b01a46594a"}, + {file = "protobuf-4.23.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:effeac51ab79332d44fba74660d40ae79985901ac21bca408f8dc335a81aa597"}, + {file = "protobuf-4.23.4-cp37-cp37m-win32.whl", hash = "sha256:c3e0939433c40796ca4cfc0fac08af50b00eb66a40bbbc5dee711998fb0bbc1e"}, + {file = "protobuf-4.23.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9053df6df8e5a76c84339ee4a9f5a2661ceee4a0dab019e8663c50ba324208b0"}, + {file = "protobuf-4.23.4-cp38-cp38-win32.whl", hash = "sha256:e1c915778d8ced71e26fcf43c0866d7499891bca14c4368448a82edc61fdbc70"}, + {file = "protobuf-4.23.4-cp38-cp38-win_amd64.whl", hash = "sha256:351cc90f7d10839c480aeb9b870a211e322bf05f6ab3f55fcb2f51331f80a7d2"}, + {file = "protobuf-4.23.4-cp39-cp39-win32.whl", hash = "sha256:6dd9b9940e3f17077e820b75851126615ee38643c2c5332aa7a359988820c720"}, + {file = "protobuf-4.23.4-cp39-cp39-win_amd64.whl", hash = "sha256:0a5759f5696895de8cc913f084e27fd4125e8fb0914bb729a17816a33819f474"}, + {file = "protobuf-4.23.4-py3-none-any.whl", hash = "sha256:e9d0be5bf34b275b9f87ba7407796556abeeba635455d036c7351f7c183ef8ff"}, + {file = "protobuf-4.23.4.tar.gz", hash = "sha256:ccd9430c0719dce806b93f89c91de7977304729e55377f872a92465d548329a9"}, ] psutil = [ {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, @@ -3701,42 +3743,42 @@ pycparser = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pydantic = [ - {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"}, - {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"}, - {file = "pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"}, - {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"}, - {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"}, - {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"}, - {file = "pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"}, - {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"}, - {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"}, - {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"}, - {file = "pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"}, - {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"}, - {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = "sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"}, - {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"}, - {file = "pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"}, - {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"}, - {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"}, - {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"}, - {file = "pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"}, - {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"}, - {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"}, - {file = "pydantic-1.10.8-py3-none-any.whl", hash = "sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"}, - {file = "pydantic-1.10.8.tar.gz", hash = "sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"}, + {file = "pydantic-1.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ff44c5e89315b15ff1f7fdaf9853770b810936d6b01a7bcecaa227d2f8fe444f"}, + {file = "pydantic-1.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c098d4ab5e2d5b3984d3cb2527e2d6099d3de85630c8934efcfdc348a9760e"}, + {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16928fdc9cb273c6af00d9d5045434c39afba5f42325fb990add2c241402d151"}, + {file = "pydantic-1.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0588788a9a85f3e5e9ebca14211a496409cb3deca5b6971ff37c556d581854e7"}, + {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9baf78b31da2dc3d3f346ef18e58ec5f12f5aaa17ac517e2ffd026a92a87588"}, + {file = "pydantic-1.10.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:373c0840f5c2b5b1ccadd9286782852b901055998136287828731868027a724f"}, + {file = "pydantic-1.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:c3339a46bbe6013ef7bdd2844679bfe500347ac5742cd4019a88312aa58a9847"}, + {file = "pydantic-1.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08a6c32e1c3809fbc49debb96bf833164f3438b3696abf0fbeceb417d123e6eb"}, + {file = "pydantic-1.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a451ccab49971af043ec4e0d207cbc8cbe53dbf148ef9f19599024076fe9c25b"}, + {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b02d24f7b2b365fed586ed73582c20f353a4c50e4be9ba2c57ab96f8091ddae"}, + {file = "pydantic-1.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f34739a89260dfa420aa3cbd069fbcc794b25bbe5c0a214f8fb29e363484b66"}, + {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e297897eb4bebde985f72a46a7552a7556a3dd11e7f76acda0c1093e3dbcf216"}, + {file = "pydantic-1.10.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d185819a7a059550ecb85d5134e7d40f2565f3dd94cfd870132c5f91a89cf58c"}, + {file = "pydantic-1.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:4400015f15c9b464c9db2d5d951b6a780102cfa5870f2c036d37c23b56f7fc1b"}, + {file = "pydantic-1.10.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2417de68290434461a266271fc57274a138510dca19982336639484c73a07af6"}, + {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:331c031ba1554b974c98679bd0780d89670d6fd6f53f5d70b10bdc9addee1713"}, + {file = "pydantic-1.10.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8268a735a14c308923e8958363e3a3404f6834bb98c11f5ab43251a4e410170c"}, + {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:44e51ba599c3ef227e168424e220cd3e544288c57829520dc90ea9cb190c3248"}, + {file = "pydantic-1.10.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7781f1d13b19700b7949c5a639c764a077cbbdd4322ed505b449d3ca8edcb36"}, + {file = "pydantic-1.10.11-cp37-cp37m-win_amd64.whl", hash = "sha256:7522a7666157aa22b812ce14c827574ddccc94f361237ca6ea8bb0d5c38f1629"}, + {file = "pydantic-1.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc64eab9b19cd794a380179ac0e6752335e9555d214cfcb755820333c0784cb3"}, + {file = "pydantic-1.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8dc77064471780262b6a68fe67e013298d130414d5aaf9b562c33987dbd2cf4f"}, + {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe429898f2c9dd209bd0632a606bddc06f8bce081bbd03d1c775a45886e2c1cb"}, + {file = "pydantic-1.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:192c608ad002a748e4a0bed2ddbcd98f9b56df50a7c24d9a931a8c5dd053bd3d"}, + {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef55392ec4bb5721f4ded1096241e4b7151ba6d50a50a80a2526c854f42e6a2f"}, + {file = "pydantic-1.10.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e0bb6efe86281623abbeeb0be64eab740c865388ee934cd3e6a358784aca6e"}, + {file = "pydantic-1.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:265a60da42f9f27e0b1014eab8acd3e53bd0bad5c5b4884e98a55f8f596b2c19"}, + {file = "pydantic-1.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:469adf96c8e2c2bbfa655fc7735a2a82f4c543d9fee97bd113a7fb509bf5e622"}, + {file = "pydantic-1.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e6cbfbd010b14c8a905a7b10f9fe090068d1744d46f9e0c021db28daeb8b6de1"}, + {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abade85268cc92dff86d6effcd917893130f0ff516f3d637f50dadc22ae93999"}, + {file = "pydantic-1.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9738b0f2e6c70f44ee0de53f2089d6002b10c33264abee07bdb5c7f03038303"}, + {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:787cf23e5a0cde753f2eabac1b2e73ae3844eb873fd1f5bdbff3048d8dbb7604"}, + {file = "pydantic-1.10.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:174899023337b9fc685ac8adaa7b047050616136ccd30e9070627c1aaab53a13"}, + {file = "pydantic-1.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:1954f8778489a04b245a1e7b8b22a9d3ea8ef49337285693cf6959e4b757535e"}, + {file = "pydantic-1.10.11-py3-none-any.whl", hash = "sha256:008c5e266c8aada206d0627a011504e14268a62091450210eda7c07fabe6963e"}, + {file = "pydantic-1.10.11.tar.gz", hash = "sha256:f66d479cf7eb331372c470614be6511eae96f1f120344c25f3f9bb59fb1b5528"}, ] pyflakes = [ {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, @@ -3759,49 +3801,20 @@ pyopenssl = [ {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, ] pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pyrsistent = [ - {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, - {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, - {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, - {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, - {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, - {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, + {file = "pyparsing-3.1.0-py3-none-any.whl", hash = "sha256:d554a96d1a7d3ddaf7183104485bc19fd80543ad6ac5bdb6426719d766fb06c1"}, + {file = "pyparsing-3.1.0.tar.gz", hash = "sha256:edb662d6fe322d6e990b1594b5feaeadf806803359e3d4d42f11e295e588f0ea"}, ] pytest = [ - {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, - {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, ] pytest-cov = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] pytest-mock = [ - {file = "pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f"}, - {file = "pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b"}, + {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, + {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, ] python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, @@ -3836,8 +3849,8 @@ pywin32 = [ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] pywin32-ctypes = [ - {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"}, - {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, + {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, + {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, ] pywinpty = [ {file = "pywinpty-2.0.10-cp310-none-win_amd64.whl", hash = "sha256:4c7d06ad10f6e92bc850a467f26d98f4f30e73d2fe5926536308c6ae0566bc16"}, @@ -3972,95 +3985,99 @@ rdflib = [ {file = "rdflib-6.3.2-py3-none-any.whl", hash = "sha256:36b4e74a32aa1e4fa7b8719876fb192f19ecd45ff932ea5ebbd2e417a0247e63"}, {file = "rdflib-6.3.2.tar.gz", hash = "sha256:72af591ff704f4caacea7ecc0c5a9056b8553e0489dd4f35a9bc52dbd41522e0"}, ] +referencing = [ + {file = "referencing-0.29.1-py3-none-any.whl", hash = "sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f"}, + {file = "referencing-0.29.1.tar.gz", hash = "sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e"}, +] regex = [ - {file = "regex-2023.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48c9ec56579d4ba1c88f42302194b8ae2350265cb60c64b7b9a88dcb7fbde309"}, - {file = "regex-2023.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f4541550459c08fdd6f97aa4e24c6f1932eec780d58a2faa2068253df7d6ff"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e22e4460f0245b468ee645156a4f84d0fc35a12d9ba79bd7d79bdcd2f9629d"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b870b6f632fc74941cadc2a0f3064ed8409e6f8ee226cdfd2a85ae50473aa94"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:171c52e320fe29260da550d81c6b99f6f8402450dc7777ef5ced2e848f3b6f8f"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad5524c2aedaf9aa14ef1bc9327f8abd915699dea457d339bebbe2f0d218f86"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a0f874ee8c0bc820e649c900243c6d1e6dc435b81da1492046716f14f1a2a96"}, - {file = "regex-2023.5.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e645c757183ee0e13f0bbe56508598e2d9cd42b8abc6c0599d53b0d0b8dd1479"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a4c5da39bca4f7979eefcbb36efea04471cd68db2d38fcbb4ee2c6d440699833"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5e3f4468b8c6fd2fd33c218bbd0a1559e6a6fcf185af8bb0cc43f3b5bfb7d636"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:59e4b729eae1a0919f9e4c0fc635fbcc9db59c74ad98d684f4877be3d2607dd6"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ba73a14e9c8f9ac409863543cde3290dba39098fc261f717dc337ea72d3ebad2"}, - {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0bbd5dcb19603ab8d2781fac60114fb89aee8494f4505ae7ad141a3314abb1f9"}, - {file = "regex-2023.5.5-cp310-cp310-win32.whl", hash = "sha256:40005cbd383438aecf715a7b47fe1e3dcbc889a36461ed416bdec07e0ef1db66"}, - {file = "regex-2023.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:59597cd6315d3439ed4b074febe84a439c33928dd34396941b4d377692eca810"}, - {file = "regex-2023.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f08276466fedb9e36e5193a96cb944928301152879ec20c2d723d1031cd4ddd"}, - {file = "regex-2023.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cd46f30e758629c3ee91713529cfbe107ac50d27110fdcc326a42ce2acf4dafc"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2910502f718828cecc8beff004917dcf577fc5f8f5dd40ffb1ea7612124547b"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:445d6f4fc3bd9fc2bf0416164454f90acab8858cd5a041403d7a11e3356980e8"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18196c16a584619c7c1d843497c069955d7629ad4a3fdee240eb347f4a2c9dbe"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33d430a23b661629661f1fe8395be2004006bc792bb9fc7c53911d661b69dd7e"}, - {file = "regex-2023.5.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72a28979cc667e5f82ef433db009184e7ac277844eea0f7f4d254b789517941d"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f764e4dfafa288e2eba21231f455d209f4709436baeebb05bdecfb5d8ddc3d35"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23d86ad2121b3c4fc78c58f95e19173790e22ac05996df69b84e12da5816cb17"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:690a17db524ee6ac4a27efc5406530dd90e7a7a69d8360235323d0e5dafb8f5b"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1ecf3dcff71f0c0fe3e555201cbe749fa66aae8d18f80d2cc4de8e66df37390a"}, - {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:811040d7f3dd9c55eb0d8b00b5dcb7fd9ae1761c454f444fd9f37fe5ec57143a"}, - {file = "regex-2023.5.5-cp311-cp311-win32.whl", hash = "sha256:c8c143a65ce3ca42e54d8e6fcaf465b6b672ed1c6c90022794a802fb93105d22"}, - {file = "regex-2023.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:586a011f77f8a2da4b888774174cd266e69e917a67ba072c7fc0e91878178a80"}, - {file = "regex-2023.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b6365703e8cf1644b82104cdd05270d1a9f043119a168d66c55684b1b557d008"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a56c18f21ac98209da9c54ae3ebb3b6f6e772038681d6cb43b8d53da3b09ee81"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8b942d8b3ce765dbc3b1dad0a944712a89b5de290ce8f72681e22b3c55f3cc8"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:844671c9c1150fcdac46d43198364034b961bd520f2c4fdaabfc7c7d7138a2dd"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2ce65bdeaf0a386bb3b533a28de3994e8e13b464ac15e1e67e4603dd88787fa"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fee0016cc35a8a91e8cc9312ab26a6fe638d484131a7afa79e1ce6165328a135"}, - {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:18f05d14f14a812fe9723f13afafefe6b74ca042d99f8884e62dbd34dcccf3e2"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:941b3f1b2392f0bcd6abf1bc7a322787d6db4e7457be6d1ffd3a693426a755f2"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:921473a93bcea4d00295799ab929522fc650e85c6b9f27ae1e6bb32a790ea7d3"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:e2205a81f815b5bb17e46e74cc946c575b484e5f0acfcb805fb252d67e22938d"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:385992d5ecf1a93cb85adff2f73e0402dd9ac29b71b7006d342cc920816e6f32"}, - {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:890a09cb0a62198bff92eda98b2b507305dd3abf974778bae3287f98b48907d3"}, - {file = "regex-2023.5.5-cp36-cp36m-win32.whl", hash = "sha256:821a88b878b6589c5068f4cc2cfeb2c64e343a196bc9d7ac68ea8c2a776acd46"}, - {file = "regex-2023.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:7918a1b83dd70dc04ab5ed24c78ae833ae8ea228cef84e08597c408286edc926"}, - {file = "regex-2023.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:338994d3d4ca4cf12f09822e025731a5bdd3a37aaa571fa52659e85ca793fb67"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a69cf0c00c4d4a929c6c7717fd918414cab0d6132a49a6d8fc3ded1988ed2ea"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f5e06df94fff8c4c85f98c6487f6636848e1dc85ce17ab7d1931df4a081f657"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8906669b03c63266b6a7693d1f487b02647beb12adea20f8840c1a087e2dfb5"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fda3e50abad8d0f48df621cf75adc73c63f7243cbe0e3b2171392b445401550"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ac2b7d341dc1bd102be849d6dd33b09701223a851105b2754339e390be0627a"}, - {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fb2b495dd94b02de8215625948132cc2ea360ae84fe6634cd19b6567709c8ae2"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aa7d032c1d84726aa9edeb6accf079b4caa87151ca9fabacef31fa028186c66d"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3d45864693351c15531f7e76f545ec35000d50848daa833cead96edae1665559"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21e90a288e6ba4bf44c25c6a946cb9b0f00b73044d74308b5e0afd190338297c"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:10250a093741ec7bf74bcd2039e697f519b028518f605ff2aa7ac1e9c9f97423"}, - {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6b8d0c153f07a953636b9cdb3011b733cadd4178123ef728ccc4d5969e67f3c2"}, - {file = "regex-2023.5.5-cp37-cp37m-win32.whl", hash = "sha256:10374c84ee58c44575b667310d5bbfa89fb2e64e52349720a0182c0017512f6c"}, - {file = "regex-2023.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9b320677521aabf666cdd6e99baee4fb5ac3996349c3b7f8e7c4eee1c00dfe3a"}, - {file = "regex-2023.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:afb1c70ec1e594a547f38ad6bf5e3d60304ce7539e677c1429eebab115bce56e"}, - {file = "regex-2023.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf123225945aa58b3057d0fba67e8061c62d14cc8a4202630f8057df70189051"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99757ad7fe5c8a2bb44829fc57ced11253e10f462233c1255fe03888e06bc19"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a623564d810e7a953ff1357f7799c14bc9beeab699aacc8b7ab7822da1e952b8"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ced02e3bd55e16e89c08bbc8128cff0884d96e7f7a5633d3dc366b6d95fcd1d6"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cbe6b5be3b9b698d8cc4ee4dee7e017ad655e83361cd0ea8e653d65e469468"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a6e4b0e0531223f53bad07ddf733af490ba2b8367f62342b92b39b29f72735a"}, - {file = "regex-2023.5.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e9c4f778514a560a9c9aa8e5538bee759b55f6c1dcd35613ad72523fd9175b8"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:256f7f4c6ba145f62f7a441a003c94b8b1af78cee2cccacfc1e835f93bc09426"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd7b68fd2e79d59d86dcbc1ccd6e2ca09c505343445daaa4e07f43c8a9cc34da"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4a5059bd585e9e9504ef9c07e4bc15b0a621ba20504388875d66b8b30a5c4d18"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:6893544e06bae009916a5658ce7207e26ed17385149f35a3125f5259951f1bbe"}, - {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c64d5abe91a3dfe5ff250c6bb267ef00dbc01501518225b45a5f9def458f31fb"}, - {file = "regex-2023.5.5-cp38-cp38-win32.whl", hash = "sha256:7923470d6056a9590247ff729c05e8e0f06bbd4efa6569c916943cb2d9b68b91"}, - {file = "regex-2023.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:4035d6945cb961c90c3e1c1ca2feb526175bcfed44dfb1cc77db4fdced060d3e"}, - {file = "regex-2023.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50fd2d9b36938d4dcecbd684777dd12a407add4f9f934f235c66372e630772b0"}, - {file = "regex-2023.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d19e57f888b00cd04fc38f5e18d0efbd91ccba2d45039453ab2236e6eec48d4d"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd966475e963122ee0a7118ec9024388c602d12ac72860f6eea119a3928be053"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db09e6c18977a33fea26fe67b7a842f706c67cf8bda1450974d0ae0dd63570df"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6164d4e2a82f9ebd7752a06bd6c504791bedc6418c0196cd0a23afb7f3e12b2d"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84397d3f750d153ebd7f958efaa92b45fea170200e2df5e0e1fd4d85b7e3f58a"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c3efee9bb53cbe7b285760c81f28ac80dc15fa48b5fe7e58b52752e642553f1"}, - {file = "regex-2023.5.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:144b5b017646b5a9392a5554a1e5db0000ae637be4971c9747566775fc96e1b2"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1189fbbb21e2c117fda5303653b61905aeeeea23de4a94d400b0487eb16d2d60"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f83fe9e10f9d0b6cf580564d4d23845b9d692e4c91bd8be57733958e4c602956"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:72aa4746993a28c841e05889f3f1b1e5d14df8d3daa157d6001a34c98102b393"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:de2f780c3242ea114dd01f84848655356af4dd561501896c751d7b885ea6d3a1"}, - {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:290fd35219486dfbc00b0de72f455ecdd63e59b528991a6aec9fdfc0ce85672e"}, - {file = "regex-2023.5.5-cp39-cp39-win32.whl", hash = "sha256:732176f5427e72fa2325b05c58ad0b45af341c459910d766f814b0584ac1f9ac"}, - {file = "regex-2023.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:1307aa4daa1cbb23823d8238e1f61292fd07e4e5d8d38a6efff00b67a7cdb764"}, - {file = "regex-2023.5.5.tar.gz", hash = "sha256:7d76a8a1fc9da08296462a18f16620ba73bcbf5909e42383b253ef34d9d5141e"}, + {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, + {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, + {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, + {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, + {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, + {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, + {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, + {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, + {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, + {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, + {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, + {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, + {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, + {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, + {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, + {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, + {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, + {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, + {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, + {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, + {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, + {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, + {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, + {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, + {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, + {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, + {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, + {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, + {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, + {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, + {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, + {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, ] requests = [ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, @@ -4078,6 +4095,105 @@ rfc3986-validator = [ {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, ] +rpds-py = [ + {file = "rpds_py-0.8.10-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711"}, + {file = "rpds_py-0.8.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7c6304b894546b5a6bdc0fe15761fa53fe87d28527a7142dae8de3c663853e1"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad3bfb44c8840fb4be719dc58e229f435e227fbfbe133dc33f34981ff622a8f8"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14f1c356712f66653b777ecd8819804781b23dbbac4eade4366b94944c9e78ad"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82bb361cae4d0a627006dadd69dc2f36b7ad5dc1367af9d02e296ec565248b5b"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2e3c4f2a8e3da47f850d7ea0d7d56720f0f091d66add889056098c4b2fd576c"}, + {file = "rpds_py-0.8.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15a90d0ac11b4499171067ae40a220d1ca3cb685ec0acc356d8f3800e07e4cb8"}, + {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:70bb9c8004b97b4ef7ae56a2aa56dfaa74734a0987c78e7e85f00004ab9bf2d0"}, + {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d64f9f88d5203274a002b54442cafc9c7a1abff2a238f3e767b70aadf919b451"}, + {file = "rpds_py-0.8.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ccbbd276642788c4376fbe8d4e6c50f0fb4972ce09ecb051509062915891cbf0"}, + {file = "rpds_py-0.8.10-cp310-none-win32.whl", hash = "sha256:fafc0049add8043ad07ab5382ee80d80ed7e3699847f26c9a5cf4d3714d96a84"}, + {file = "rpds_py-0.8.10-cp310-none-win_amd64.whl", hash = "sha256:915031002c86a5add7c6fd4beb601b2415e8a1c956590a5f91d825858e92fe6e"}, + {file = "rpds_py-0.8.10-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:84eb541a44f7a18f07a6bfc48b95240739e93defe1fdfb4f2a295f37837945d7"}, + {file = "rpds_py-0.8.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f59996d0550894affaad8743e97b9b9c98f638b221fac12909210ec3d9294786"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9adb5664b78fcfcd830000416c8cc69853ef43cb084d645b3f1f0296edd9bae"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f96f3f98fbff7af29e9edf9a6584f3c1382e7788783d07ba3721790625caa43e"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:376b8de737401050bd12810003d207e824380be58810c031f10ec563ff6aef3d"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d1c2bc319428d50b3e0fa6b673ab8cc7fa2755a92898db3a594cbc4eeb6d1f7"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73a1e48430f418f0ac3dfd87860e4cc0d33ad6c0f589099a298cb53724db1169"}, + {file = "rpds_py-0.8.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134ec8f14ca7dbc6d9ae34dac632cdd60939fe3734b5d287a69683c037c51acb"}, + {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4b519bac7c09444dd85280fd60f28c6dde4389c88dddf4279ba9b630aca3bbbe"}, + {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9cd57981d9fab04fc74438d82460f057a2419974d69a96b06a440822d693b3c0"}, + {file = "rpds_py-0.8.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:69d089c026f6a8b9d64a06ff67dc3be196707b699d7f6ca930c25f00cf5e30d8"}, + {file = "rpds_py-0.8.10-cp311-none-win32.whl", hash = "sha256:220bdcad2d2936f674650d304e20ac480a3ce88a40fe56cd084b5780f1d104d9"}, + {file = "rpds_py-0.8.10-cp311-none-win_amd64.whl", hash = "sha256:6c6a0225b8501d881b32ebf3f5807a08ad3685b5eb5f0a6bfffd3a6e039b2055"}, + {file = "rpds_py-0.8.10-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e3d0cd3dff0e7638a7b5390f3a53057c4e347f4ef122ee84ed93fc2fb7ea4aa2"}, + {file = "rpds_py-0.8.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d77dff3a5aa5eedcc3da0ebd10ff8e4969bc9541aa3333a8d41715b429e99f47"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41c89a366eae49ad9e65ed443a8f94aee762931a1e3723749d72aeac80f5ef2f"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3793c21494bad1373da517001d0849eea322e9a049a0e4789e50d8d1329df8e7"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:805a5f3f05d186c5d50de2e26f765ba7896d0cc1ac5b14ffc36fae36df5d2f10"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b01b39ad5411563031ea3977bbbc7324d82b088e802339e6296f082f78f6115c"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f1e860be21f3e83011116a65e7310486300e08d9a3028e73e8d13bb6c77292"}, + {file = "rpds_py-0.8.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a13c8e56c46474cd5958d525ce6a9996727a83d9335684e41f5192c83deb6c58"}, + {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:93d99f957a300d7a4ced41615c45aeb0343bb8f067c42b770b505de67a132346"}, + {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:148b0b38d719c0760e31ce9285a9872972bdd7774969a4154f40c980e5beaca7"}, + {file = "rpds_py-0.8.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3cc5e5b5514796f45f03a568981971b12a3570f3de2e76114f7dc18d4b60a3c4"}, + {file = "rpds_py-0.8.10-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:e8e24b210a4deb5a7744971f8f77393005bae7f873568e37dfd9effe808be7f7"}, + {file = "rpds_py-0.8.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b41941583adce4242af003d2a8337b066ba6148ca435f295f31ac6d9e4ea2722"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c490204e16bca4f835dba8467869fe7295cdeaa096e4c5a7af97f3454a97991"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee45cd1d84beed6cbebc839fd85c2e70a3a1325c8cfd16b62c96e2ffb565eca"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a8ca409f1252e1220bf09c57290b76cae2f14723746215a1e0506472ebd7bdf"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96b293c0498c70162effb13100624c5863797d99df75f2f647438bd10cbf73e4"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4627520a02fccbd324b33c7a83e5d7906ec746e1083a9ac93c41ac7d15548c7"}, + {file = "rpds_py-0.8.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e39d7ab0c18ac99955b36cd19f43926450baba21e3250f053e0704d6ffd76873"}, + {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ba9f1d1ebe4b63801977cec7401f2d41e888128ae40b5441270d43140efcad52"}, + {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:802f42200d8caf7f25bbb2a6464cbd83e69d600151b7e3b49f49a47fa56b0a38"}, + {file = "rpds_py-0.8.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d19db6ba816e7f59fc806c690918da80a7d186f00247048cd833acdab9b4847b"}, + {file = "rpds_py-0.8.10-cp38-none-win32.whl", hash = "sha256:7947e6e2c2ad68b1c12ee797d15e5f8d0db36331200b0346871492784083b0c6"}, + {file = "rpds_py-0.8.10-cp38-none-win_amd64.whl", hash = "sha256:fa326b3505d5784436d9433b7980171ab2375535d93dd63fbcd20af2b5ca1bb6"}, + {file = "rpds_py-0.8.10-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7b38a9ac96eeb6613e7f312cd0014de64c3f07000e8bf0004ad6ec153bac46f8"}, + {file = "rpds_py-0.8.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c4d42e83ddbf3445e6514f0aff96dca511421ed0392d9977d3990d9f1ba6753c"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b21575031478609db6dbd1f0465e739fe0e7f424a8e7e87610a6c7f68b4eb16"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:574868858a7ff6011192c023a5289158ed20e3f3b94b54f97210a773f2f22921"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae40f4a70a1f40939d66ecbaf8e7edc144fded190c4a45898a8cfe19d8fc85ea"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f7ee4dc86db7af3bac6d2a2cedbecb8e57ce4ed081f6464510e537589f8b1e"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:695f642a3a5dbd4ad2ffbbacf784716ecd87f1b7a460843b9ddf965ccaeafff4"}, + {file = "rpds_py-0.8.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f43ab4cb04bde6109eb2555528a64dfd8a265cc6a9920a67dcbde13ef53a46c8"}, + {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a11ab0d97be374efd04f640c04fe5c2d3dabc6dfb998954ea946ee3aec97056d"}, + {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:92cf5b3ee60eef41f41e1a2cabca466846fb22f37fc580ffbcb934d1bcab225a"}, + {file = "rpds_py-0.8.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ceaac0c603bf5ac2f505a78b2dcab78d3e6b706be6596c8364b64cc613d208d2"}, + {file = "rpds_py-0.8.10-cp39-none-win32.whl", hash = "sha256:dd4f16e57c12c0ae17606c53d1b57d8d1c8792efe3f065a37cb3341340599d49"}, + {file = "rpds_py-0.8.10-cp39-none-win_amd64.whl", hash = "sha256:c03a435d26c3999c2a8642cecad5d1c4d10c961817536af52035f6f4ee2f5dd0"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0da53292edafecba5e1d8c1218f99babf2ed0bf1c791d83c0ab5c29b57223068"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d20a8ed227683401cc508e7be58cba90cc97f784ea8b039c8cd01111e6043e0"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97cab733d303252f7c2f7052bf021a3469d764fc2b65e6dbef5af3cbf89d4892"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8c398fda6df361a30935ab4c4bccb7f7a3daef2964ca237f607c90e9f3fdf66f"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2eb4b08c45f8f8d8254cdbfacd3fc5d6b415d64487fb30d7380b0d0569837bf1"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7dfb1cbb895810fa2b892b68153c17716c6abaa22c7dc2b2f6dcf3364932a1c"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89c92b74e8bf6f53a6f4995fd52f4bd510c12f103ee62c99e22bc9e05d45583c"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e9c0683cb35a9b5881b41bc01d5568ffc667910d9dbc632a1fba4e7d59e98773"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0eeb2731708207d0fe2619afe6c4dc8cb9798f7de052da891de5f19c0006c315"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:7495010b658ec5b52835f21d8c8b1a7e52e194c50f095d4223c0b96c3da704b1"}, + {file = "rpds_py-0.8.10-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c72ebc22e70e04126158c46ba56b85372bc4d54d00d296be060b0db1671638a4"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2cd3045e7f6375dda64ed7db1c5136826facb0159ea982f77d9cf6125025bd34"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2418cf17d653d24ffb8b75e81f9f60b7ba1b009a23298a433a4720b2a0a17017"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a2edf8173ac0c7a19da21bc68818be1321998528b5e3f748d6ee90c0ba2a1fd"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f29b8c55fd3a2bc48e485e37c4e2df3317f43b5cc6c4b6631c33726f52ffbb3"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a7d20c1cf8d7b3960c5072c265ec47b3f72a0c608a9a6ee0103189b4f28d531"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:521fc8861a86ae54359edf53a15a05fabc10593cea7b3357574132f8427a5e5a"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5c191713e98e7c28800233f039a32a42c1a4f9a001a8a0f2448b07391881036"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:083df0fafe199371206111583c686c985dddaf95ab3ee8e7b24f1fda54515d09"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ed41f3f49507936a6fe7003985ea2574daccfef999775525d79eb67344e23767"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:2614c2732bf45de5c7f9e9e54e18bc78693fa2f635ae58d2895b7965e470378c"}, + {file = "rpds_py-0.8.10-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c60528671d9d467009a6ec284582179f6b88651e83367d0ab54cb739021cd7de"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ee744fca8d1ea822480a2a4e7c5f2e1950745477143668f0b523769426060f29"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a38b9f526d0d6cbdaa37808c400e3d9f9473ac4ff64d33d9163fd05d243dbd9b"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60e0e86e870350e03b3e25f9b1dd2c6cc72d2b5f24e070249418320a6f9097b7"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f53f55a8852f0e49b0fc76f2412045d6ad9d5772251dea8f55ea45021616e7d5"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c493365d3fad241d52f096e4995475a60a80f4eba4d3ff89b713bc65c2ca9615"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:300eb606e6b94a7a26f11c8cc8ee59e295c6649bd927f91e1dbd37a4c89430b6"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a665f6f1a87614d1c3039baf44109094926dedf785e346d8b0a728e9cabd27a"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:927d784648211447201d4c6f1babddb7971abad922b32257ab74de2f2750fad0"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:c200b30dd573afa83847bed7e3041aa36a8145221bf0cfdfaa62d974d720805c"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:08166467258fd0240a1256fce272f689f2360227ee41c72aeea103e9e4f63d2b"}, + {file = "rpds_py-0.8.10-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:996cc95830de9bc22b183661d95559ec6b3cd900ad7bc9154c4cbf5be0c9b734"}, + {file = "rpds_py-0.8.10.tar.gz", hash = "sha256:13e643ce8ad502a0263397362fb887594b49cf84bf518d6038c16f235f2bcea4"}, +] rsa = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -4126,27 +4242,25 @@ schematic-db = [ {file = "schematic_db-0.0.20.tar.gz", hash = "sha256:577cdb32004b6ab5d383a3411e7c812410ae56d46d5a7065af57b488ffe5fe0a"}, ] scipy = [ - {file = "scipy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7354fd7527a4b0377ce55f286805b34e8c54b91be865bac273f527e1b839019"}, - {file = "scipy-1.10.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4b3f429188c66603a1a5c549fb414e4d3bdc2a24792e061ffbd607d3d75fd84e"}, - {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1553b5dcddd64ba9a0d95355e63fe6c3fc303a8fd77c7bc91e77d61363f7433f"}, - {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c0ff64b06b10e35215abce517252b375e580a6125fd5fdf6421b98efbefb2d2"}, - {file = "scipy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:fae8a7b898c42dffe3f7361c40d5952b6bf32d10c4569098d276b4c547905ee1"}, - {file = "scipy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f1564ea217e82c1bbe75ddf7285ba0709ecd503f048cb1236ae9995f64217bd"}, - {file = "scipy-1.10.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d925fa1c81b772882aa55bcc10bf88324dadb66ff85d548c71515f6689c6dac5"}, - {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaea0a6be54462ec027de54fca511540980d1e9eea68b2d5c1dbfe084797be35"}, - {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a35c4242ec5f292c3dd364a7c71a61be87a3d4ddcc693372813c0b73c9af1d"}, - {file = "scipy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:43b8e0bcb877faf0abfb613d51026cd5cc78918e9530e375727bf0625c82788f"}, - {file = "scipy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5678f88c68ea866ed9ebe3a989091088553ba12c6090244fdae3e467b1139c35"}, - {file = "scipy-1.10.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:39becb03541f9e58243f4197584286e339029e8908c46f7221abeea4b749fa88"}, - {file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bce5869c8d68cf383ce240e44c1d9ae7c06078a9396df68ce88a1230f93a30c1"}, - {file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07c3457ce0b3ad5124f98a86533106b643dd811dd61b548e78cf4c8786652f6f"}, - {file = "scipy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:049a8bbf0ad95277ffba9b3b7d23e5369cc39e66406d60422c8cfef40ccc8415"}, - {file = "scipy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd9f1027ff30d90618914a64ca9b1a77a431159df0e2a195d8a9e8a04c78abf9"}, - {file = "scipy-1.10.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:79c8e5a6c6ffaf3a2262ef1be1e108a035cf4f05c14df56057b64acc5bebffb6"}, - {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51af417a000d2dbe1ec6c372dfe688e041a7084da4fdd350aeb139bd3fb55353"}, - {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b4735d6c28aad3cdcf52117e0e91d6b39acd4272f3f5cd9907c24ee931ad601"}, - {file = "scipy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ff7f37b1bf4417baca958d254e8e2875d0cc23aaadbe65b3d5b3077b0eb23ea"}, - {file = "scipy-1.10.1.tar.gz", hash = "sha256:2cf9dfb80a7b4589ba4c40ce7588986d6d5cebc5457cad2c2880f6bc2d42f3a5"}, + {file = "scipy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aec8c62fbe52914f9cf28d846cf0401dd80ab80788bbab909434eb336ed07c04"}, + {file = "scipy-1.11.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:3b9963798df1d8a52db41a6fc0e6fa65b1c60e85d73da27ae8bb754de4792481"}, + {file = "scipy-1.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e8eb42db36526b130dfbc417609498a6192381abc1975b91e3eb238e0b41c1a"}, + {file = "scipy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:366a6a937110d80dca4f63b3f5b00cc89d36f678b2d124a01067b154e692bab1"}, + {file = "scipy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:08d957ca82d3535b3b9ba6c8ff355d78fe975271874e2af267cb5add5bd78625"}, + {file = "scipy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:e866514bc2d660608447b6ba95c8900d591f2865c07cca0aa4f7ff3c4ca70f30"}, + {file = "scipy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba94eeef3c9caa4cea7b402a35bb02a5714ee1ee77eb98aca1eed4543beb0f4c"}, + {file = "scipy-1.11.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:512fdc18c65f76dadaca139348e525646d440220d8d05f6d21965b8d4466bccd"}, + {file = "scipy-1.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce154372f0ebe88556ed06d7b196e9c2e0c13080ecb58d0f35062dc7cc28b47"}, + {file = "scipy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4bb943010203465ac81efa392e4645265077b4d9e99b66cf3ed33ae12254173"}, + {file = "scipy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:249cfa465c379c9bb2c20123001e151ff5e29b351cbb7f9c91587260602c58d0"}, + {file = "scipy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:ffb28e3fa31b9c376d0fb1f74c1f13911c8c154a760312fbee87a21eb21efe31"}, + {file = "scipy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:39154437654260a52871dfde852adf1b93b1d1bc5dc0ffa70068f16ec0be2624"}, + {file = "scipy-1.11.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:b588311875c58d1acd4ef17c983b9f1ab5391755a47c3d70b6bd503a45bfaf71"}, + {file = "scipy-1.11.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d51565560565a0307ed06fa0ec4c6f21ff094947d4844d6068ed04400c72d0c3"}, + {file = "scipy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b41a0f322b4eb51b078cb3441e950ad661ede490c3aca66edef66f4b37ab1877"}, + {file = "scipy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:396fae3f8c12ad14c5f3eb40499fd06a6fef8393a6baa352a652ecd51e74e029"}, + {file = "scipy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:be8c962a821957fdde8c4044efdab7a140c13294997a407eaee777acf63cbf0c"}, + {file = "scipy-1.11.1.tar.gz", hash = "sha256:fb5b492fa035334fd249f0973cc79ecad8b09c604b42a127a677b45a9a3d4289"}, ] secretstorage = [ {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, @@ -4209,47 +4323,44 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] sqlalchemy = [ - {file = "SQLAlchemy-1.4.48-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:4bac3aa3c3d8bc7408097e6fe8bf983caa6e9491c5d2e2488cfcfd8106f13b6a"}, - {file = "SQLAlchemy-1.4.48-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dbcae0e528d755f4522cad5842f0942e54b578d79f21a692c44d91352ea6d64e"}, - {file = "SQLAlchemy-1.4.48-cp27-cp27m-win32.whl", hash = "sha256:cbbe8b8bffb199b225d2fe3804421b7b43a0d49983f81dc654d0431d2f855543"}, - {file = "SQLAlchemy-1.4.48-cp27-cp27m-win_amd64.whl", hash = "sha256:627e04a5d54bd50628fc8734d5fc6df2a1aa5962f219c44aad50b00a6cdcf965"}, - {file = "SQLAlchemy-1.4.48-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9af1db7a287ef86e0f5cd990b38da6bd9328de739d17e8864f1817710da2d217"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:ce7915eecc9c14a93b73f4e1c9d779ca43e955b43ddf1e21df154184f39748e5"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5381ddd09a99638f429f4cbe1b71b025bed318f6a7b23e11d65f3eed5e181c33"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:87609f6d4e81a941a17e61a4c19fee57f795e96f834c4f0a30cee725fc3f81d9"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb0808ad34167f394fea21bd4587fc62f3bd81bba232a1e7fbdfa17e6cfa7cd7"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-win32.whl", hash = "sha256:d53cd8bc582da5c1c8c86b6acc4ef42e20985c57d0ebc906445989df566c5603"}, - {file = "SQLAlchemy-1.4.48-cp310-cp310-win_amd64.whl", hash = "sha256:4355e5915844afdc5cf22ec29fba1010166e35dd94a21305f49020022167556b"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:066c2b0413e8cb980e6d46bf9d35ca83be81c20af688fedaef01450b06e4aa5e"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c99bf13e07140601d111a7c6f1fc1519914dd4e5228315bbda255e08412f61a4"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee26276f12614d47cc07bc85490a70f559cba965fb178b1c45d46ffa8d73fda"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-win32.whl", hash = "sha256:49c312bcff4728bffc6fb5e5318b8020ed5c8b958a06800f91859fe9633ca20e"}, - {file = "SQLAlchemy-1.4.48-cp311-cp311-win_amd64.whl", hash = "sha256:cef2e2abc06eab187a533ec3e1067a71d7bbec69e582401afdf6d8cad4ba3515"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:3509159e050bd6d24189ec7af373359f07aed690db91909c131e5068176c5a5d"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc2ab4d9f6d9218a5caa4121bdcf1125303482a1cdcfcdbd8567be8518969c0"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e1ddbbcef9bcedaa370c03771ebec7e39e3944782bef49e69430383c376a250b"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f82d8efea1ca92b24f51d3aea1a82897ed2409868a0af04247c8c1e4fef5890"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-win32.whl", hash = "sha256:e3e98d4907805b07743b583a99ecc58bf8807ecb6985576d82d5e8ae103b5272"}, - {file = "SQLAlchemy-1.4.48-cp36-cp36m-win_amd64.whl", hash = "sha256:25887b4f716e085a1c5162f130b852f84e18d2633942c8ca40dfb8519367c14f"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:0817c181271b0ce5df1aa20949f0a9e2426830fed5ecdcc8db449618f12c2730"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe1dd2562313dd9fe1778ed56739ad5d9aae10f9f43d9f4cf81d65b0c85168bb"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:68413aead943883b341b2b77acd7a7fe2377c34d82e64d1840860247cec7ff7c"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbde5642104ac6e95f96e8ad6d18d9382aa20672008cf26068fe36f3004491df"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-win32.whl", hash = "sha256:11c6b1de720f816c22d6ad3bbfa2f026f89c7b78a5c4ffafb220e0183956a92a"}, - {file = "SQLAlchemy-1.4.48-cp37-cp37m-win_amd64.whl", hash = "sha256:eb5464ee8d4bb6549d368b578e9529d3c43265007193597ddca71c1bae6174e6"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:92e6133cf337c42bfee03ca08c62ba0f2d9695618c8abc14a564f47503157be9"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d29a3fc6d9c45962476b470a81983dd8add6ad26fdbfae6d463b509d5adcda"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:005e942b451cad5285015481ae4e557ff4154dde327840ba91b9ac379be3b6ce"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8cfe951ed074ba5e708ed29c45397a95c4143255b0d022c7c8331a75ae61f3"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-win32.whl", hash = "sha256:2b9af65cc58726129d8414fc1a1a650dcdd594ba12e9c97909f1f57d48e393d3"}, - {file = "SQLAlchemy-1.4.48-cp38-cp38-win_amd64.whl", hash = "sha256:2b562e9d1e59be7833edf28b0968f156683d57cabd2137d8121806f38a9d58f4"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a1fc046756cf2a37d7277c93278566ddf8be135c6a58397b4c940abf837011f4"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d9b55252d2ca42a09bcd10a697fa041e696def9dfab0b78c0aaea1485551a08"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6dab89874e72a9ab5462997846d4c760cdb957958be27b03b49cf0de5e5c327c"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fd8b5ee5a3acc4371f820934b36f8109ce604ee73cc668c724abb054cebcb6e"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-win32.whl", hash = "sha256:eee09350fd538e29cfe3a496ec6f148504d2da40dbf52adefb0d2f8e4d38ccc4"}, - {file = "SQLAlchemy-1.4.48-cp39-cp39-win_amd64.whl", hash = "sha256:7ad2b0f6520ed5038e795cc2852eb5c1f20fa6831d73301ced4aafbe3a10e1f6"}, - {file = "SQLAlchemy-1.4.48.tar.gz", hash = "sha256:b47bc287096d989a0838ce96f7d8e966914a24da877ed41a7531d44b55cdb8df"}, + {file = "SQLAlchemy-1.4.49-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e126cf98b7fd38f1e33c64484406b78e937b1a280e078ef558b95bf5b6895f6"}, + {file = "SQLAlchemy-1.4.49-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03db81b89fe7ef3857b4a00b63dedd632d6183d4ea5a31c5d8a92e000a41fc71"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:95b9df9afd680b7a3b13b38adf6e3a38995da5e162cc7524ef08e3be4e5ed3e1"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63e43bf3f668c11bb0444ce6e809c1227b8f067ca1068898f3008a273f52b09"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f835c050ebaa4e48b18403bed2c0fda986525896efd76c245bdd4db995e51a4c"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c21b172dfb22e0db303ff6419451f0cac891d2e911bb9fbf8003d717f1bcf91"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-win32.whl", hash = "sha256:5fb1ebdfc8373b5a291485757bd6431de8d7ed42c27439f543c81f6c8febd729"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-win_amd64.whl", hash = "sha256:f8a65990c9c490f4651b5c02abccc9f113a7f56fa482031ac8cb88b70bc8ccaa"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8923dfdf24d5aa8a3adb59723f54118dd4fe62cf59ed0d0d65d940579c1170a4"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9ab2c507a7a439f13ca4499db6d3f50423d1d65dc9b5ed897e70941d9e135b0"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debe7d49b8acf1f3035317e63d9ec8d5e4d904c6e75a2a9246a119f5f2fdf3d"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-win32.whl", hash = "sha256:82b08e82da3756765c2e75f327b9bf6b0f043c9c3925fb95fb51e1567fa4ee87"}, + {file = "SQLAlchemy-1.4.49-cp311-cp311-win_amd64.whl", hash = "sha256:171e04eeb5d1c0d96a544caf982621a1711d078dbc5c96f11d6469169bd003f1"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:36e58f8c4fe43984384e3fbe6341ac99b6b4e083de2fe838f0fdb91cebe9e9cb"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b31e67ff419013f99ad6f8fc73ee19ea31585e1e9fe773744c0f3ce58c039c30"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c14b29d9e1529f99efd550cd04dbb6db6ba5d690abb96d52de2bff4ed518bc95"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f3470e084d31247aea228aa1c39bbc0904c2b9ccbf5d3cfa2ea2dac06f26d"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-win32.whl", hash = "sha256:706bfa02157b97c136547c406f263e4c6274a7b061b3eb9742915dd774bbc264"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-win_amd64.whl", hash = "sha256:a7f7b5c07ae5c0cfd24c2db86071fb2a3d947da7bd487e359cc91e67ac1c6d2e"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:4afbbf5ef41ac18e02c8dc1f86c04b22b7a2125f2a030e25bbb4aff31abb224b"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e300c0c2147484a002b175f4e1361f102e82c345bf263242f0449672a4bccf"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:201de072b818f8ad55c80d18d1a788729cccf9be6d9dc3b9d8613b053cd4836d"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653ed6817c710d0c95558232aba799307d14ae084cc9b1f4c389157ec50df5c"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-win32.whl", hash = "sha256:647e0b309cb4512b1f1b78471fdaf72921b6fa6e750b9f891e09c6e2f0e5326f"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-win_amd64.whl", hash = "sha256:ab73ed1a05ff539afc4a7f8cf371764cdf79768ecb7d2ec691e3ff89abbc541e"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:37ce517c011560d68f1ffb28af65d7e06f873f191eb3a73af5671e9c3fada08a"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1878ce508edea4a879015ab5215546c444233881301e97ca16fe251e89f1c55"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e8e608983e6f85d0852ca61f97e521b62e67969e6e640fe6c6b575d4db68557"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf956da45290df6e809ea12c54c02ace7f8ff4d765d6d3dfb3655ee876ce58d"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-win32.whl", hash = "sha256:f167c8175ab908ce48bd6550679cc6ea20ae169379e73c7720a28f89e53aa532"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-win_amd64.whl", hash = "sha256:45806315aae81a0c202752558f0df52b42d11dd7ba0097bf71e253b4215f34f4"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b6d0c4b15d65087738a6e22e0ff461b407533ff65a73b818089efc8eb2b3e1de"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a843e34abfd4c797018fd8d00ffffa99fd5184c421f190b6ca99def4087689bd"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c890421651b45a681181301b3497e4d57c0d01dc001e10438a40e9a9c25ee77"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d26f280b8f0a8f497bc10573849ad6dc62e671d2468826e5c748d04ed9e670d5"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-win32.whl", hash = "sha256:ec2268de67f73b43320383947e74700e95c6770d0c68c4e615e9897e46296294"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-win_amd64.whl", hash = "sha256:bbdf16372859b8ed3f4d05f925a984771cd2abd18bd187042f24be4886c2a15f"}, + {file = "SQLAlchemy-1.4.49.tar.gz", hash = "sha256:06ff25cbae30c396c4b7737464f2a7fc37a67b7da409993b182b024cec80aed9"}, ] sqlalchemy-utils = [ {file = "SQLAlchemy-Utils-0.38.3.tar.gz", hash = "sha256:9f9afba607a40455cf703adfa9846584bf26168a0c5a60a70063b70d65051f4d"}, @@ -4264,8 +4375,8 @@ swagger-ui-bundle = [ {file = "swagger_ui_bundle-0.0.9.tar.gz", hash = "sha256:b462aa1460261796ab78fd4663961a7f6f347ce01760f1303bbbdf630f11f516"}, ] synapseclient = [ - {file = "synapseclient-2.7.1-py3-none-any.whl", hash = "sha256:c15efaec148dda18faa5a1736846f427713ceaa656178d5e7044fcd87fa8aa05"}, - {file = "synapseclient-2.7.1.tar.gz", hash = "sha256:c6a7d5ff834c825390a0514f3f0020876ea4fb8c863889894b9a636458278d69"}, + {file = "synapseclient-2.7.2-py3-none-any.whl", hash = "sha256:dd8b1a1b4667d08311bb651469431f43fe2eeab83c0ef1fe5a03c2929aeb26cd"}, + {file = "synapseclient-2.7.2.tar.gz", hash = "sha256:dc5a61f9f495109a0c89aa7d42b641b6ff278280d7961fb450dd5015704fe15b"}, ] tabulate = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, @@ -4333,8 +4444,8 @@ tzlocal = [ {file = "tzlocal-5.0.1.tar.gz", hash = "sha256:46eb99ad4bdb71f3f72b7d24f4267753e240944ecfc16f25d2719ba89827a803"}, ] uri-template = [ - {file = "uri_template-1.2.0-py3-none-any.whl", hash = "sha256:f1699c77b73b925cf4937eae31ab282a86dc885c333f2e942513f08f691fc7db"}, - {file = "uri_template-1.2.0.tar.gz", hash = "sha256:934e4d09d108b70eb8a24410af8615294d09d279ce0e7cbcdaef1bd21f932b06"}, + {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, + {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, ] uritemplate = [ {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, @@ -4363,16 +4474,16 @@ webencodings = [ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] websocket-client = [ - {file = "websocket-client-1.5.2.tar.gz", hash = "sha256:c7d67c13b928645f259d9b847ab5b57fd2d127213ca41ebd880de1f553b7c23b"}, - {file = "websocket_client-1.5.2-py3-none-any.whl", hash = "sha256:f8c64e28cd700e7ba1f04350d66422b6833b82a796b525a51e740b8cc8dab4b1"}, + {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, + {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, ] werkzeug = [ {file = "Werkzeug-2.1.2-py3-none-any.whl", hash = "sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"}, {file = "Werkzeug-2.1.2.tar.gz", hash = "sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6"}, ] widgetsnbextension = [ - {file = "widgetsnbextension-4.0.7-py3-none-any.whl", hash = "sha256:be3228a73bbab189a16be2d4a3cd89ecbd4e31948bfdc64edac17dcdee3cd99c"}, - {file = "widgetsnbextension-4.0.7.tar.gz", hash = "sha256:ea67c17a7cd4ae358f8f46c3b304c40698bc0423732e3f273321ee141232c8be"}, + {file = "widgetsnbextension-4.0.8-py3-none-any.whl", hash = "sha256:2e37f0ce9da11651056280c7efe96f2db052fe8fc269508e3724f5cbd6c93018"}, + {file = "widgetsnbextension-4.0.8.tar.gz", hash = "sha256:9ec291ba87c2dfad42c3d5b6f68713fa18be1acd7476569516b2431682315c17"}, ] wrapt = [ {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, @@ -4452,6 +4563,6 @@ wrapt = [ {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, ] zipp = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, + {file = "zipp-3.16.0-py3-none-any.whl", hash = "sha256:5dadc3ad0a1f825fe42ce1bce0f2fc5a13af2e6b2d386af5b0ff295bc0a287d3"}, + {file = "zipp-3.16.0.tar.gz", hash = "sha256:1876cb065531855bbe83b6c489dcf69ecc28f1068d8e95959fe8bbc77774c941"}, ] diff --git a/pyproject.toml b/pyproject.toml index 3ae1b4cd5..8e52ee12b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,7 +80,7 @@ pytest-cov = "^4.0.0" pytest-mock = "^3.5.1" flake8 = "^6.0.0" python-dotenv = "^0.21.0" -black = "^22.6.0" +black = "^23.7.0" mypy = "^0.982" pylint = "^2.16.1" From 4be4609c05d51bd85dd45381f779457cb05e01bc Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 11 Jul 2023 14:54:21 -0700 Subject: [PATCH 108/135] update mypy to `1.4.1` --- poetry.lock | 59 ++++++++++++++++++++++++++------------------------ pyproject.toml | 2 +- 2 files changed, 32 insertions(+), 29 deletions(-) diff --git a/poetry.lock b/poetry.lock index f7a0cbd50..c6333b094 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1271,19 +1271,20 @@ python-versions = ">=3.7" [[package]] name = "mypy" -version = "0.982" +version = "1.4.1" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] @@ -2740,7 +2741,7 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "1.1" python-versions = ">=3.9.0,<3.11" -content-hash = "e86937006793a1cc256c37c77eaa05e1fd6d1db6bac6a5b22ceb161e2cf2b578" +content-hash = "57be0c9d055c5c8b02e320d6a3f966f0ae1856f8fb782ef90a7c25d3c6ed466d" [metadata.files] alabaster = [ @@ -3503,30 +3504,32 @@ mistune = [ {file = "mistune-3.0.1.tar.gz", hash = "sha256:e912116c13aa0944f9dc530db38eb88f6a77087ab128f49f84a48f4c05ea163c"}, ] mypy = [ - {file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"}, - {file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"}, - {file = "mypy-0.982-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f793e3dd95e166b66d50e7b63e69e58e88643d80a3dcc3bcd81368e0478b089c"}, - {file = "mypy-0.982-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86ebe67adf4d021b28c3f547da6aa2cce660b57f0432617af2cca932d4d378a6"}, - {file = "mypy-0.982-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:175f292f649a3af7082fe36620369ffc4661a71005aa9f8297ea473df5772046"}, - {file = "mypy-0.982-cp310-cp310-win_amd64.whl", hash = "sha256:8ee8c2472e96beb1045e9081de8e92f295b89ac10c4109afdf3a23ad6e644f3e"}, - {file = "mypy-0.982-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58f27ebafe726a8e5ccb58d896451dd9a662a511a3188ff6a8a6a919142ecc20"}, - {file = "mypy-0.982-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6af646bd46f10d53834a8e8983e130e47d8ab2d4b7a97363e35b24e1d588947"}, - {file = "mypy-0.982-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7aeaa763c7ab86d5b66ff27f68493d672e44c8099af636d433a7f3fa5596d40"}, - {file = "mypy-0.982-cp37-cp37m-win_amd64.whl", hash = "sha256:724d36be56444f569c20a629d1d4ee0cb0ad666078d59bb84f8f887952511ca1"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14d53cdd4cf93765aa747a7399f0961a365bcddf7855d9cef6306fa41de01c24"}, - {file = "mypy-0.982-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26ae64555d480ad4b32a267d10cab7aec92ff44de35a7cd95b2b7cb8e64ebe3e"}, - {file = "mypy-0.982-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6389af3e204975d6658de4fb8ac16f58c14e1bacc6142fee86d1b5b26aa52bda"}, - {file = "mypy-0.982-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b35ce03a289480d6544aac85fa3674f493f323d80ea7226410ed065cd46f206"}, - {file = "mypy-0.982-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6e564f035d25c99fd2b863e13049744d96bd1947e3d3d2f16f5828864506763"}, - {file = "mypy-0.982-cp38-cp38-win_amd64.whl", hash = "sha256:cebca7fd333f90b61b3ef7f217ff75ce2e287482206ef4a8b18f32b49927b1a2"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a705a93670c8b74769496280d2fe6cd59961506c64f329bb179970ff1d24f9f8"}, - {file = "mypy-0.982-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75838c649290d83a2b83a88288c1eb60fe7a05b36d46cbea9d22efc790002146"}, - {file = "mypy-0.982-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:91781eff1f3f2607519c8b0e8518aad8498af1419e8442d5d0afb108059881fc"}, - {file = "mypy-0.982-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa97b9ddd1dd9901a22a879491dbb951b5dec75c3b90032e2baa7336777363b"}, - {file = "mypy-0.982-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a692a8e7d07abe5f4b2dd32d731812a0175626a90a223d4b58f10f458747dd8a"}, - {file = "mypy-0.982-cp39-cp39-win_amd64.whl", hash = "sha256:eb7a068e503be3543c4bd329c994103874fa543c1727ba5288393c21d912d795"}, - {file = "mypy-0.982-py3-none-any.whl", hash = "sha256:1021c241e8b6e1ca5a47e4d52601274ac078a89845cfde66c6d5f769819ffa1d"}, - {file = "mypy-0.982.tar.gz", hash = "sha256:85f7a343542dc8b1ed0a888cdd34dca56462654ef23aa673907305b260b3d746"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, + {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, + {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, + {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, + {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, + {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, + {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, + {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, + {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, + {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, + {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, + {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, + {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, + {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, + {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, + {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, + {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, + {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, + {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, + {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, + {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, + {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, + {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, ] mypy-extensions = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, diff --git a/pyproject.toml b/pyproject.toml index 8e52ee12b..cba5a9f22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,7 +81,7 @@ pytest-mock = "^3.5.1" flake8 = "^6.0.0" python-dotenv = "^0.21.0" black = "^23.7.0" -mypy = "^0.982" +mypy = "^1.4.1" pylint = "^2.16.1" [tool.poetry.group.aws] From a72dc377d371c25ff75ab463db0466b6344fa59c Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Tue, 11 Jul 2023 15:07:19 -0700 Subject: [PATCH 109/135] fix mypy issues in `exceptions.py` No longer allowing implicit Optional --- schematic/exceptions.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/schematic/exceptions.py b/schematic/exceptions.py index 321cb4282..b89a7f9dc 100644 --- a/schematic/exceptions.py +++ b/schematic/exceptions.py @@ -1,5 +1,5 @@ """Schematic Exceptions""" -from typing import Any, Sequence +from typing import Optional, Any, Sequence class MissingConfigValueError(Exception): @@ -13,7 +13,7 @@ class MissingConfigValueError(Exception): message. """ - def __init__(self, config_keys: Sequence[Any], message: str = None) -> None: + def __init__(self, config_keys: Sequence[Any], message: Optional[str] = None) -> None: config_keys_str = " > ".join(config_keys) self.message = ( "The configuration value corresponding to the argument " @@ -41,7 +41,7 @@ class WrongEntityTypeError(Exception): message. """ - def __init__(self, syn_id: str, message: str = None) -> None: + def __init__(self, syn_id: str, message: Optional[str] = None) -> None: self.message = ( f"'{syn_id}'' is not a desired entity type" "Please ensure that you put in the right syn_id" @@ -69,7 +69,7 @@ class MissingConfigAndArgumentValueError(Exception): """ def __init__( - self, arg_name: str, config_keys: Sequence[Any], message: str = None + self, arg_name: str, config_keys: Sequence[Any], message: Optional[str] = None ) -> None: config_keys_str = " > ".join(config_keys) self.message = ( @@ -99,7 +99,7 @@ class AccessCredentialsError(Exception): message. """ - def __init__(self, project: str, message: str = None) -> None: + def __init__(self, project: str, message: Optional[str] = None) -> None: self.message = ( f"Your access to '{project}'' could not be resolved. " "Please check your credentials and try again." From 2a20733b8f5b468abddd40226cccb0710fc0e7c3 Mon Sep 17 00:00:00 2001 From: linglp Date: Wed, 12 Jul 2023 11:33:02 -0400 Subject: [PATCH 110/135] update documentation for cli --- schematic/help.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/help.py b/schematic/help.py index 94fd06a08..c738df1bc 100644 --- a/schematic/help.py +++ b/schematic/help.py @@ -48,7 +48,7 @@ "json_schema": ( "Specify the path to the JSON Validation Schema for this argument. " "You can either explicitly pass the `.json` file here or provide it in the `config.yml` file " - "as a value for the `(model > input > validation_schema)` key." + "as a value for the `(model > location)` key." ), "alphabetize_valid_values": ( "Specify to alphabetize valid attribute values either ascending (a) or descending (d)." From 4e852a4ff65fefdb086b3c1e8d186806aa236125 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 12 Jul 2023 10:04:35 -0700 Subject: [PATCH 111/135] run black on `exceptions.py` --- schematic/exceptions.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/schematic/exceptions.py b/schematic/exceptions.py index b89a7f9dc..200ffe7bc 100644 --- a/schematic/exceptions.py +++ b/schematic/exceptions.py @@ -13,7 +13,9 @@ class MissingConfigValueError(Exception): message. """ - def __init__(self, config_keys: Sequence[Any], message: Optional[str] = None) -> None: + def __init__( + self, config_keys: Sequence[Any], message: Optional[str] = None + ) -> None: config_keys_str = " > ".join(config_keys) self.message = ( "The configuration value corresponding to the argument " From a3274bf2999449e1af9e7adcdb98ece3ce83bf73 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 12 Jul 2023 11:27:35 -0700 Subject: [PATCH 112/135] supply out of schema columns to handle_output_format_logic --- schematic/manifest/generator.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 478d3f16d..5f080e8a2 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1462,7 +1462,7 @@ def _handle_output_format_logic(self, output_format: str = None, output_path: st return output_file_path # Return google sheet if sheet_url flag is raised. - elif sheet_url: + elif sheet_url: manifest_sh = self.set_dataframe_by_url(manifest_url=empty_manifest_url, manifest_df=dataframe, out_of_schema_columns=out_of_schema_columns) return manifest_sh.url @@ -1520,7 +1520,7 @@ def get_manifest( if manifest_record: # TODO: Update or remove the warning in self.__init__() if # you change the behavior here based on self.use_annotations - + breakpoint() # Update df with existing manifest. Agnostic to output format updated_df, out_of_schema_columns = self._update_dataframe_with_existing_df(empty_manifest_url=empty_manifest_url, existing_df=manifest_record[1]) @@ -1554,7 +1554,7 @@ def get_manifest( # Update `additional_metadata` and generate manifest manifest_url, manifest_df = self.get_manifest_with_annotations(annotations) - + breakpoint() # Update df with existing manifest. Agnostic to output format updated_df, out_of_schema_columns = self._update_dataframe_with_existing_df(empty_manifest_url=empty_manifest_url, existing_df=manifest_df) @@ -1564,6 +1564,7 @@ def get_manifest( sheet_url = sheet_url, empty_manifest_url=empty_manifest_url, dataframe = manifest_df, + out_of_schema_columns = out_of_schema_columns, ) return result From 694f915fcd1aee5510987341bdc6deac768a6977 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 12 Jul 2023 13:54:24 -0700 Subject: [PATCH 113/135] fix path typo --- tests/test_store.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_store.py b/tests/test_store.py index 6408ae195..c9ffa8903 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -176,7 +176,7 @@ def test_annotation_submission(self, synapse_store, helpers, manifest_path, test assert key in annotations.keys() assert annotations[key] == test_annotations[key] - if manifest_path.endswith('annoations_tset_manifest.csv'): + if manifest_path.endswith('annotations_test_manifest.csv'): assert 'CheckRecommended' not in annotations.keys() elif manifest_path.endswith('test_BulkRNAseq.csv'): entity = synapse_store.syn.get(entity_id) From ec4ca0897a1030fa694ee9d02164c380ab744b9e Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 12 Jul 2023 14:01:31 -0700 Subject: [PATCH 114/135] change test name --- tests/test_store.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_store.py b/tests/test_store.py index c9ffa8903..4005069b2 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -115,7 +115,7 @@ def test_getFileAnnotations(self, synapse_store): assert expected_dict == actual_dict @pytest.mark.parametrize('only_new_files',[True, False]) - def test_file_entityIds(self, helpers, synapse_store, only_new_files): + def test_get_file_entityIds(self, helpers, synapse_store, only_new_files): manifest_path = "mock_manifests/test_BulkRNAseq.csv" dataset_files = synapse_store.getFilesInStorageDataset('syn39241199') From 3ed79bdd27d7a69df0711c905e86006f4b0ff074 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Wed, 12 Jul 2023 14:06:19 -0700 Subject: [PATCH 115/135] update comment --- schematic/store/synapse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 6f7caa3e5..c7b81fec6 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -614,7 +614,7 @@ def _get_file_entityIds(self, dataset_files: List, only_new_files: bool = False "No manifest was passed in, a manifest is required when `only_new_files` is True." ) - # find new files if any + # find new files (that are not in the current manifest) if any for file_id, file_name in dataset_files: if not file_id in manifest["entityId"].values: files["Filename"].append(file_name) From 79abef4711b7052d842d6c9fe992a4c2a459235a Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 12 Jul 2023 15:47:03 -0700 Subject: [PATCH 116/135] Fix manifest passed, clean code, and ensure entityId is at end of manifest --- schematic/manifest/generator.py | 41 ++++++++++++++++++++++++++++----- 1 file changed, 35 insertions(+), 6 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 5f080e8a2..5d3c168d5 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1298,6 +1298,8 @@ def set_dataframe_by_url( start_col = self._column_to_letter(len(manifest_df.columns) - num_out_of_schema_columns) # find start of out of schema columns end_col = self._column_to_letter(len(manifest_df.columns) + 1) # find end of out of schema columns wb.set_data_validation(start = start_col, end = end_col, condition_type = None) + + # set permissions so that anyone with the link can edit sh.share("", role="writer", type="anyone") @@ -1554,7 +1556,6 @@ def get_manifest( # Update `additional_metadata` and generate manifest manifest_url, manifest_df = self.get_manifest_with_annotations(annotations) - breakpoint() # Update df with existing manifest. Agnostic to output format updated_df, out_of_schema_columns = self._update_dataframe_with_existing_df(empty_manifest_url=empty_manifest_url, existing_df=manifest_df) @@ -1563,11 +1564,34 @@ def get_manifest( output_path = output_path, sheet_url = sheet_url, empty_manifest_url=empty_manifest_url, - dataframe = manifest_df, + dataframe = updated_df, out_of_schema_columns = out_of_schema_columns, ) return result + def _get_end_columns(self, current_schema_headers, existing_manifest_headers, out_of_schema_columns): + """ + Gather columns to be added to the end of the manifest, and ensure entityId is at the end. + Args: + current_schema_headers: list, columns in the current manifest schema + existing_manifest_headers: list, columns in the existing manifest + out_of_schema_columns: set, columns that are in the existing manifest, but not the current schema + Returns: + end_columns: list of columns to be added to the end of the manifest. + """ + # Identify columns to add to the end of the manifest + end_columns = list(out_of_schema_columns) + + # Make sure entityId is at the end of the list + if 'entityId' in end_columns and end_columns[-1] != 'entityId': + end_columns.remove('entityId') + end_columns.append('entityId') + + # Add entity_id to the end columns if it should be there but isn't + elif 'entityId' in (current_schema_headers or existing_manfiest_headers) and 'entityId' not in end_columns: + end_columns.append('entityId') + return end_columns + def _update_dataframe_with_existing_df(self, empty_manifest_url: str, existing_df: pd.DataFrame) -> pd.DataFrame: """ Handle scenario when existing manifest does not match new manifest template due to changes in the data model: @@ -1585,13 +1609,13 @@ def _update_dataframe_with_existing_df(self, empty_manifest_url: str, existing_d # Get headers for the current schema and existing manifest df. current_schema_headers = list(self.get_dataframe_by_url(empty_manifest_url).columns) - existing_manfiest_headers = list(existing_df.columns) + existing_manifest_headers = list(existing_df.columns) # Find columns that exist in the current schema, but are not in the manifest being downloaded. - new_columns = self._get_missing_columns(current_schema_headers, existing_manfiest_headers) + new_columns = self._get_missing_columns(current_schema_headers, existing_manifest_headers) # Find columns that exist in the manifest being downloaded, but not in the current schema. - out_of_schema_columns = self._get_missing_columns(existing_manfiest_headers, current_schema_headers) + out_of_schema_columns = self._get_missing_columns(existing_manifest_headers, current_schema_headers) # clean empty columns if any are present (there should be none) # TODO: Remove this line once we start preventing empty column names @@ -1607,12 +1631,17 @@ def _update_dataframe_with_existing_df(self, empty_manifest_url: str, existing_d **dict(zip(new_columns, len(new_columns) * [""])) ) + end_columns = self._get_end_columns(current_schema_headers=current_schema_headers, + existing_manifest_headers=existing_manifest_headers, + out_of_schema_columns=out_of_schema_columns) + # sort columns in the updated manifest: # match latest schema order # move obsolete columns at the end updated_df = updated_df[self.sort_manifest_fields(updated_df.columns)] - updated_df = updated_df[[c for c in updated_df if c not in out_of_schema_columns] + list(out_of_schema_columns)] + # move obsolete columns at the end with entityId at the very end + updated_df = updated_df[[c for c in updated_df if c not in end_columns] + list(end_columns)] return updated_df, out_of_schema_columns def _format_new_excel_column(self, worksheet, new_column_index: int, col: str): From a4dff58ca69bf0f97d24abfa3670ad31debdf0c8 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Wed, 12 Jul 2023 15:47:46 -0700 Subject: [PATCH 117/135] remove breakpoint --- schematic/manifest/generator.py | 1 - 1 file changed, 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 5d3c168d5..74fd1e739 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1522,7 +1522,6 @@ def get_manifest( if manifest_record: # TODO: Update or remove the warning in self.__init__() if # you change the behavior here based on self.use_annotations - breakpoint() # Update df with existing manifest. Agnostic to output format updated_df, out_of_schema_columns = self._update_dataframe_with_existing_df(empty_manifest_url=empty_manifest_url, existing_df=manifest_record[1]) From fbe83caf38a9c0f75b9704e7f6a21cee401f0d82 Mon Sep 17 00:00:00 2001 From: linglp Date: Thu, 13 Jul 2023 11:49:45 -0400 Subject: [PATCH 118/135] change to optional bool --- schematic/manifest/generator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 13ebc0681..21d75f6bd 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -9,7 +9,7 @@ from pathlib import Path import pygsheets as ps from tempfile import NamedTemporaryFile -from typing import Dict, List, Tuple, Union +from typing import Dict, List, Tuple, Union, Optional from schematic.schemas.generator import SchemaGenerator from schematic.utils.google_api_utils import ( @@ -1236,7 +1236,7 @@ def _gather_all_fields(self, fields, json_schema): ) return required_metadata_fields - def get_empty_manifest(self, strict: bool=None, json_schema_filepath: str=None,): + def get_empty_manifest(self, strict: Optional[bool], json_schema_filepath: str=None): """Create an empty manifest using specifications from the json schema. Args: From 32a03c4176f5f315c9504eab08f017b61e5268a0 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 13 Jul 2023 12:12:34 -0700 Subject: [PATCH 119/135] add optional[bool] for strict param --- schematic/manifest/generator.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index b5850fe79..eaea3d2ed 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -9,7 +9,7 @@ from pathlib import Path import pygsheets as ps from tempfile import NamedTemporaryFile -from typing import Dict, List, Tuple, Union +from typing import Dict, List, Optional, Tuple, Union from schematic.schemas.generator import SchemaGenerator from schematic.utils.google_api_utils import ( @@ -706,7 +706,7 @@ def _request_regex_vr(self, gs_formula, i:int, text_color={"red": 1}): return requests_vr def _request_regex_match_vr_formatting(self, validation_rules: List[str], i: int, - spreadsheet_id: str, requests_body: dict, strict: bool = None, + spreadsheet_id: str, requests_body: dict, strict: Optional[bool], ): """ Purpose: @@ -1356,7 +1356,7 @@ def map_annotation_names_to_display_names( return annotations.rename(columns=label_map) def get_manifest_with_annotations( - self, annotations: pd.DataFrame, strict: bool=None, sheet_url:bool=None, + self, annotations: pd.DataFrame, strict: Optional[bool], sheet_url:bool=None, ) -> Tuple[ps.Spreadsheet, pd.DataFrame]: """Generate manifest, optionally with annotations (if requested). @@ -1475,7 +1475,7 @@ def _handle_output_format_logic(self, output_format: str = None, output_path: st return dataframe def get_manifest( - self, dataset_id: str = None, sheet_url: bool = None, json_schema: str = None, output_format: str = None, output_path: str = None, access_token: str = None, strict: bool = None, + self, dataset_id: str = None, sheet_url: bool = None, json_schema: str = None, output_format: str = None, output_path: str = None, access_token: str = None, strict: Optional[bool], ) -> Union[str, pd.DataFrame]: """Gets manifest for a given dataset on Synapse. TODO: move this function to class MetadatModel (after MetadataModel is refactored) From f8d970a8ecb9b02e5f6fbcf1e20d5022f3c6b147 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 13 Jul 2023 12:16:15 -0700 Subject: [PATCH 120/135] add docstring for create_empty_gs --- schematic/manifest/generator.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index eaea3d2ed..fe382848b 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1165,7 +1165,7 @@ def _create_requests_body( requests_body["requests"].append(borders_formatting) return requests_body - def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id, sheet_url, strict=None): + def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id, sheet_url, strict: Optional[bool]): """Generate requests to add columns and format the google sheet. Args: required_metadata_fields(dict): @@ -1176,6 +1176,8 @@ def _create_empty_gs(self, required_metadata_fields, json_schema, spreadsheet_id representing the data model, including: '$schema', '$id', 'title', 'type', 'properties', 'required' spreadsheet_id: str, of the id for the google sheet + sheet_url (str): google sheet url of template manifest + strict (Optional Bool): strictness with which to apply validation rules to google sheets. True, blocks incorrect entries, False, raises a warning Returns: manifest_url (str): url of the google sheet manifest. """ From 3dedebcaed9ee4b9c2cf823dd4fd66f55df9a394 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 13 Jul 2023 12:17:41 -0700 Subject: [PATCH 121/135] add optional[bool] for strict param in more pos --- schematic/manifest/generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index fe382848b..0023d2a3c 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1080,7 +1080,7 @@ def _create_requests_body( json_schema, spreadsheet_id, sheet_url, - strict=None, + strict: Optional[bool], ): """Create and store all formatting changes for the google sheet to execute at once. From a729b52087ed7fe75b4d8c516c9ecf76764d4dce Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 13 Jul 2023 12:20:03 -0700 Subject: [PATCH 122/135] add inline documentation --- schematic/manifest/generator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 0023d2a3c..1f1775b78 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1103,10 +1103,10 @@ def _create_requests_body( requests_body = {} requests_body["requests"] = [] for i, req in enumerate(ordered_metadata_fields[0]): - # Gather validation rules and valid values for attribute, if using google sheets. - + # Gather validation rules and valid values for attribute. validation_rules = self.sg.get_node_validation_rules(req) + # Add regex match validaiton rule to Google Sheets. if validation_rules and sheet_url: requests_body =self._request_regex_match_vr_formatting( validation_rules, i, spreadsheet_id, requests_body, strict From a48da8fcf9334f2979469cb645fe6d7afad4adc7 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 13 Jul 2023 12:21:43 -0700 Subject: [PATCH 123/135] remove strict parameter from get_dataframe_by_url --- schematic/manifest/generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 1f1775b78..29d507b99 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1308,7 +1308,7 @@ def set_dataframe_by_url( return sh - def get_dataframe_by_url(self, manifest_url: str, strict=None) -> pd.DataFrame: + def get_dataframe_by_url(self, manifest_url: str) -> pd.DataFrame: """Retrieve pandas DataFrame from table in Google Sheets. Args: From 8bc4c3a15029dfa5754ccce538b7502925b2f2d7 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 13 Jul 2023 12:39:19 -0700 Subject: [PATCH 124/135] remove additional parameter being passed to get_datframe_by_url --- schematic/manifest/generator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 29d507b99..6d1c91130 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1241,7 +1241,7 @@ def _gather_all_fields(self, fields, json_schema): ) return required_metadata_fields - def get_empty_manifest(self, json_schema_filepath=None, sheet_url=None, strict=None): + def get_empty_manifest(self, json_schema_filepath=None, sheet_url=None, strict: Optional[bool]): """Create an empty manifest using specifications from the json schema. Args: @@ -1551,7 +1551,7 @@ def get_manifest( # if there are no files with annotations just generate an empty manifest if annotations.empty: - manifest_df = self.get_dataframe_by_url(manifest_url=manifest_url, sheet_url=sheet_url) + manifest_df = self.get_dataframe_by_url(manifest_url=manifest_url) else: # Subset columns if no interested in user-defined annotations and there are files present if self.is_file_based and not self.use_annotations: From 0fd54d3d93293a0d9d003ae9feffe26bf4ada4ec Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 13 Jul 2023 12:48:14 -0700 Subject: [PATCH 125/135] add additional docstrings --- schematic/manifest/generator.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 6d1c91130..8d3f8234c 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -288,7 +288,7 @@ def _get_column_data_validation_values( spreadsheet_id, valid_values, column_id, - strict, + strict:Optional[bool], validation_type="ONE_OF_LIST", custom_ui=True, input_message="Choose one from dropdown", @@ -1095,6 +1095,8 @@ def _create_requests_body( representing the data model, including: '$schema', '$id', 'title', 'type', 'properties', 'required' spreadsheet_id: str, of the id for the google sheet + sheet_url (Will be deprecated): a boolean ; determine if a pandas dataframe or a google sheet url gets return + strict (Optional Bool): strictness with which to apply validation rules to google sheets. True, blocks incorrect entries, False, raises a warning Return: requests_body(dict): containing all the update requests to add to the gs @@ -1246,6 +1248,8 @@ def get_empty_manifest(self, json_schema_filepath=None, sheet_url=None, strict: json schema. Args: json_schema_filepath (str): path to json schema file + sheet_url (Will be deprecated): a boolean ; determine if a pandas dataframe or a google sheet url gets return + strict (Optional Bool): strictness with which to apply validation rules to google sheets. True, blocks incorrect entries, False, raises a warning Returns: manifest_url (str): url of the google sheet manifest. TODO: @@ -1364,7 +1368,8 @@ def get_manifest_with_annotations( Args: annotations (pd.DataFrame): Annotations table (can be empty). - + strict (Optional Bool): strictness with which to apply validation rules to google sheets. True, blocks incorrect entries, False, raises a warning + sheet_url (Will be deprecated): a boolean ; determine if a pandas dataframe or a google sheet url gets return Returns: Tuple[ps.Spreadsheet, pd.DataFrame]: Both the Google Sheet URL and the corresponding data frame is returned. @@ -1484,7 +1489,7 @@ def get_manifest( Args: dataset_id: Synapse ID of the "dataset" entity on Synapse (for a given center/project). - sheet_url: Determines if googlesheet URL or pandas dataframe should be returned. + sheet_url (Will be deprecated): a boolean ; determine if a pandas dataframe or a google sheet url gets return output_format: Determines if Google sheet URL, pandas dataframe, or Excel spreadsheet gets returned. output_path: Determines the output path of the exported manifest access_token: Token in .synapseConfig. Since we could not pre-load access_token as an environment variable on AWS, we have to add this variable. From 47d92470e13c43449664d7cd0c692d5af4ef83e4 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Thu, 13 Jul 2023 13:57:25 -0700 Subject: [PATCH 126/135] add defaults and update value passed to get manifest_df when no annotations --- schematic/manifest/generator.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 8d3f8234c..ae33e7a09 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1243,7 +1243,7 @@ def _gather_all_fields(self, fields, json_schema): ) return required_metadata_fields - def get_empty_manifest(self, json_schema_filepath=None, sheet_url=None, strict: Optional[bool]): + def get_empty_manifest(self, json_schema_filepath=None, sheet_url=None, strict: Optional[bool]=None): """Create an empty manifest using specifications from the json schema. Args: @@ -1362,7 +1362,7 @@ def map_annotation_names_to_display_names( return annotations.rename(columns=label_map) def get_manifest_with_annotations( - self, annotations: pd.DataFrame, strict: Optional[bool], sheet_url:bool=None, + self, annotations: pd.DataFrame, sheet_url:bool=None, strict: Optional[bool]=None, ) -> Tuple[ps.Spreadsheet, pd.DataFrame]: """Generate manifest, optionally with annotations (if requested). @@ -1482,7 +1482,7 @@ def _handle_output_format_logic(self, output_format: str = None, output_path: st return dataframe def get_manifest( - self, dataset_id: str = None, sheet_url: bool = None, json_schema: str = None, output_format: str = None, output_path: str = None, access_token: str = None, strict: Optional[bool], + self, dataset_id: str = None, sheet_url: bool = None, json_schema: str = None, output_format: str = None, output_path: str = None, access_token: str = None, strict: Optional[bool]=None, ) -> Union[str, pd.DataFrame]: """Gets manifest for a given dataset on Synapse. TODO: move this function to class MetadatModel (after MetadataModel is refactored) @@ -1556,14 +1556,14 @@ def get_manifest( # if there are no files with annotations just generate an empty manifest if annotations.empty: - manifest_df = self.get_dataframe_by_url(manifest_url=manifest_url) + manifest_df = self.get_dataframe_by_url(manifest_url=empty_manifest_url) else: # Subset columns if no interested in user-defined annotations and there are files present if self.is_file_based and not self.use_annotations: annotations = annotations[["Filename", "eTag", "entityId"]] # Update `additional_metadata` and generate manifest - manifest_url, manifest_df = self.get_manifest_with_annotations(annotations, sheet_url=sheet_url) + manifest_url, manifest_df = self.get_manifest_with_annotations(annotations, sheet_url=sheet_url, strict=strict) # Update df with existing manifest. Agnostic to output format updated_df, out_of_schema_columns = self._update_dataframe_with_existing_df(empty_manifest_url=empty_manifest_url, existing_df=manifest_df) From 29a6f462310c5a9548f7f12d73c486fa2cadcd9e Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Thu, 13 Jul 2023 15:21:48 -0700 Subject: [PATCH 127/135] update `schematic_db` requirements --- poetry.lock | 42 +++++++++++++++++++++++++++++------------- pyproject.toml | 2 +- 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/poetry.lock b/poetry.lock index c6333b094..d1f5b59a4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -241,7 +241,7 @@ python-versions = ">=3.7.0" [[package]] name = "click" -version = "8.1.4" +version = "8.1.5" description = "Composable command line interface toolkit" category = "main" optional = false @@ -416,6 +416,17 @@ wrapt = ">=1.10,<2" [package.extras] dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +packaging = "*" + [[package]] name = "dill" version = "0.3.6" @@ -1010,7 +1021,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, [[package]] name = "jsonschema" -version = "4.18.0" +version = "4.18.3" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false @@ -2118,13 +2129,14 @@ python-versions = ">=3.5" [[package]] name = "schematic-db" -version = "0.0.20" +version = "0.0.29" description = "" category = "main" optional = false python-versions = ">=3.9,<4.0" [package.dependencies] +deprecation = ">=2.1.0,<3.0.0" interrogate = ">=1.5.0,<2.0.0" networkx = ">=2.8.6,<3.0.0" pandas = ">=1.4.3,<2.0.0" @@ -2728,7 +2740,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "zipp" -version = "3.16.0" +version = "3.16.1" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false @@ -2741,7 +2753,7 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "1.1" python-versions = ">=3.9.0,<3.11" -content-hash = "57be0c9d055c5c8b02e320d6a3f966f0ae1856f8fb782ef90a7c25d3c6ed466d" +content-hash = "ebae29c94e793b572346ce4ca38e9744e0cda913550dc0a3c05b76f8f4796715" [metadata.files] alabaster = [ @@ -2995,8 +3007,8 @@ charset-normalizer = [ {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, ] click = [ - {file = "click-8.1.4-py3-none-any.whl", hash = "sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3"}, - {file = "click-8.1.4.tar.gz", hash = "sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37"}, + {file = "click-8.1.5-py3-none-any.whl", hash = "sha256:e576aa487d679441d7d30abb87e1b43d24fc53bffb8758443b1a9e1cee504548"}, + {file = "click-8.1.5.tar.gz", hash = "sha256:4be4b1af8d665c6d942909916d31a213a106800c47d0eeba73d34da3cbc11367"}, ] click-log = [ {file = "click-log-0.4.0.tar.gz", hash = "sha256:3970f8570ac54491237bcdb3d8ab5e3eef6c057df29f8c3d1151a51a9c23b975"}, @@ -3141,6 +3153,10 @@ deprecated = [ {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, ] +deprecation = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] dill = [ {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"}, {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"}, @@ -3360,8 +3376,8 @@ jsonpointer = [ {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, ] jsonschema = [ - {file = "jsonschema-4.18.0-py3-none-any.whl", hash = "sha256:b508dd6142bd03f4c3670534c80af68cd7bbff9ea830b9cf2625d4a3c49ddf60"}, - {file = "jsonschema-4.18.0.tar.gz", hash = "sha256:8caf5b57a990a98e9b39832ef3cb35c176fe331414252b6e1b26fd5866f891a4"}, + {file = "jsonschema-4.18.3-py3-none-any.whl", hash = "sha256:aab78b34c2de001c6b692232f08c21a97b436fe18e0b817bf0511046924fceef"}, + {file = "jsonschema-4.18.3.tar.gz", hash = "sha256:64b7104d72efe856bea49ca4af37a14a9eba31b40bb7238179f3803130fd34d9"}, ] jsonschema-specifications = [ {file = "jsonschema_specifications-2023.6.1-py3-none-any.whl", hash = "sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7"}, @@ -4241,8 +4257,8 @@ ruamel-yaml-clib = [ {file = "ruamel.yaml.clib-0.2.7.tar.gz", hash = "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497"}, ] schematic-db = [ - {file = "schematic_db-0.0.20-py3-none-any.whl", hash = "sha256:e1c5a3774156fe510c703df74fee5e7b5f38b721d870c9161dcc657b6fe18723"}, - {file = "schematic_db-0.0.20.tar.gz", hash = "sha256:577cdb32004b6ab5d383a3411e7c812410ae56d46d5a7065af57b488ffe5fe0a"}, + {file = "schematic_db-0.0.29-py3-none-any.whl", hash = "sha256:e43f1d7c06d877d47036c5a480ac8f22333daa967df67c4d8316091ff4ddc0a5"}, + {file = "schematic_db-0.0.29.tar.gz", hash = "sha256:77d338b34dd8f1e75b9df5b9b3f20de35087285079019d48d162de0d131f3ffb"}, ] scipy = [ {file = "scipy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aec8c62fbe52914f9cf28d846cf0401dd80ab80788bbab909434eb336ed07c04"}, @@ -4566,6 +4582,6 @@ wrapt = [ {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, ] zipp = [ - {file = "zipp-3.16.0-py3-none-any.whl", hash = "sha256:5dadc3ad0a1f825fe42ce1bce0f2fc5a13af2e6b2d386af5b0ff295bc0a287d3"}, - {file = "zipp-3.16.0.tar.gz", hash = "sha256:1876cb065531855bbe83b6c489dcf69ecc28f1068d8e95959fe8bbc77774c941"}, + {file = "zipp-3.16.1-py3-none-any.whl", hash = "sha256:0b37c326d826d5ca35f2b9685cd750292740774ef16190008b00a0227c256fe0"}, + {file = "zipp-3.16.1.tar.gz", hash = "sha256:857b158da2cbf427b376da1c24fd11faecbac5a4ac7523c3607f8a01f94c2ec0"}, ] diff --git a/pyproject.toml b/pyproject.toml index cba5a9f22..cb6a9d194 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,7 @@ Flask-Cors = "^3.0.10" pdoc = "^12.2.0" dateparser = "^1.1.4" pandarallel = "^1.6.4" -schematic-db = {version = "^0.0.20", extras = ["synapse"]} +schematic-db = {version = "^0.0.29", extras = ["synapse"]} pyopenssl = "^23.0.0" typing-extensions = "<4.6.0" From b7f68c4f9328dc213d48f852be0d0368d00e07a6 Mon Sep 17 00:00:00 2001 From: linglp Date: Fri, 14 Jul 2023 12:09:36 -0400 Subject: [PATCH 128/135] modify description of args --- schematic/manifest/generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 21d75f6bd..457b7fba3 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1240,7 +1240,7 @@ def get_empty_manifest(self, strict: Optional[bool], json_schema_filepath: str=N """Create an empty manifest using specifications from the json schema. Args: - strict (bool): strictness with which to apply validation rules to google sheets. + strict (bool): strictness with which to apply validation rules to google sheets. If true, blocks incorrect entries; if false, raises a warning json_schema_filepath (str): path to json schema file Returns: manifest_url (str): url of the google sheet manifest. From 11cbbe8bf7bd8c1d8a161bb2d51316bce3167751 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Fri, 14 Jul 2023 10:33:51 -0700 Subject: [PATCH 129/135] make all entity annotations comply with synapse --- schematic/store/synapse.py | 27 ++++++++------------------- 1 file changed, 8 insertions(+), 19 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index c7b81fec6..39a6d70c0 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -1060,7 +1060,7 @@ def upload_manifest_file(self, manifest, metadataManifestPath, datasetId, restri return manifest_synapse_file_id @missing_entity_handler - def format_row_annotations(self, se, sg, row, entityId, useSchemaLabel, hideBlanks): + def format_row_annotations(self, se, sg, row, entityId, hideBlanks): # prepare metadata for Synapse storage (resolve display name into a name that Synapse annotations support (e.g no spaces, parenthesis) # note: the removal of special characters, will apply only to annotation keys; we are not altering the manifest # this could create a divergence between manifest column and annotations. this should be ok for most use cases. @@ -1070,10 +1070,7 @@ def format_row_annotations(self, se, sg, row, entityId, useSchemaLabel, hideBlan for k, v in row.to_dict().items(): - if useSchemaLabel: - keySyn = se.get_class_label_from_display_name(str(k)).translate({ord(x): '' for x in blacklist_chars}) - else: - keySyn = str(k) + keySyn = se.get_class_label_from_display_name(str(k)).translate({ord(x): '' for x in blacklist_chars}) # Skip `Filename` and `ETag` columns when setting annotations if keySyn in ["Filename", "ETag", "eTag"]: @@ -1305,20 +1302,19 @@ def _generate_table_name(self, manifest): table_name = 'synapse_storage_manifest_table' return table_name, component_name - def _add_annotations(self, se, schemaGenerator, row, entityId, useSchemaLabel, hideBlanks): + def _add_annotations(self, se, schemaGenerator, row, entityId, hideBlanks): """Helper function to format and add annotations to entities in Synapse. Args: se: schemaExplorer object, schemaGenerator: schemaGenerator Object. row: current row of manifest being processed entityId (str): synapseId of entity to add annotations to - useSchemaLabel (bool): Flag to use schema label instead of display name hideBlanks: Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. Returns: Annotations are added to entities in Synapse, no return. """ # Format annotations for Synapse - annos = self.format_row_annotations(se, schemaGenerator, row, entityId, useSchemaLabel, hideBlanks) + annos = self.format_row_annotations(se, schemaGenerator, row, entityId, hideBlanks) if annos: # Store annotations for an entity folder @@ -1351,7 +1347,6 @@ def add_entities( manifest, manifest_record_type, datasetId, - useSchemaLabel, hideBlanks, manifest_synapse_table_id='' ): @@ -1362,7 +1357,6 @@ def add_entities( manifest (pd.DataFrame): loaded df containing user supplied data. manifest_record_type: valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. datasetId (str): synapse ID of folder containing the dataset - useSchemaLabel (bool): Default is True - use the schema label. If False, uses the display label from the schema. Attribute display names in the schema must not only include characters that are not accepted by Synapse. Annotation names may only contain: letters, numbers, '_' and '.'. hideBlanks (bool): Default is false -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. manifest_synapse_table_id (str): Default is an empty string ''. Returns: @@ -1396,7 +1390,7 @@ def add_entities( # Adding annotations to connected files. if entityId: - self._add_annotations(se, schemaGenerator, row, entityId, useSchemaLabel, hideBlanks) + self._add_annotations(se, schemaGenerator, row, entityId, hideBlanks) return manifest def upload_manifest_as_table( @@ -1425,7 +1419,6 @@ def upload_manifest_as_table( component_name (str): Name of the component manifest that is currently being uploaded. restrict (bool): Flag for censored data. manifest_record_type (str): valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. - useSchemaLabel(bool): Default is True - use the schema label. If False, uses the display label from the schema. Attribute display names in the schema must not only include characters that are not accepted by Synapse. Annotation names may only contain: letters, numbers, '_' and '.'. hideBlanks (bool): Default is False -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. table_malnipulation (str): Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. Return: @@ -1441,7 +1434,7 @@ def upload_manifest_as_table( useSchemaLabel, table_manipulation) - manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, useSchemaLabel, hideBlanks, manifest_synapse_table_id) + manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name = component_name) @@ -1474,7 +1467,6 @@ def upload_manifest_as_csv( datasetId, restrict, manifest_record_type, - useSchemaLabel, hideBlanks, component_name, with_entities = False,): @@ -1487,7 +1479,6 @@ def upload_manifest_as_csv( datasetId (str): synapse ID of folder containing the dataset restrict (bool): Flag for censored data. manifest_record_type: valid values are 'entity', 'table' or 'both'. Specifies whether to create entity ids and folders for each row in a manifest, a Synapse table to house the entire manifest or do both. - useSchemaLabel (bool): Default is True - use the schema label. If False, uses the display label from the schema. Attribute display names in the schema must not only include characters that are not accepted by Synapse. Annotation names may only contain: letters, numbers, '_' and '.'. hideBlanks (bool): Default is False -Boolean flag that does not upload annotation keys with blank values when true. Uploads Annotation keys with empty string values when false. table_malnipulation (str): Specify the way the manifest tables should be store as on Synapse when one with the same name already exists. Options are 'replace' and 'upsert'. with_entities (bool): Default is False - Flag to indicate whether to create entityIds and add annotations. @@ -1495,7 +1486,7 @@ def upload_manifest_as_csv( manifest_synapse_file_id (str): SynID of manifest csv uploaded to synapse. """ if with_entities: - manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, useSchemaLabel, hideBlanks) + manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, @@ -1550,7 +1541,7 @@ def upload_manifest_combo( useSchemaLabel=useSchemaLabel, table_manipulation=table_manipulation,) - manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, useSchemaLabel, hideBlanks, manifest_synapse_table_id) + manifest = self.add_entities(se, schemaGenerator, manifest, manifest_record_type, datasetId, hideBlanks, manifest_synapse_table_id) # Load manifest to synapse as a CSV File manifest_synapse_file_id = self.upload_manifest_file(manifest, metadataManifestPath, datasetId, restrict, component_name) @@ -1624,7 +1615,6 @@ def associateMetadataWithFiles( metadataManifestPath, datasetId=datasetId, restrict=restrict_manifest, - useSchemaLabel=useSchemaLabel, hideBlanks=hideBlanks, manifest_record_type=manifest_record_type, component_name = component_name, @@ -1653,7 +1643,6 @@ def associateMetadataWithFiles( metadataManifestPath, datasetId=datasetId, restrict=restrict_manifest, - useSchemaLabel=useSchemaLabel, hideBlanks=hideBlanks, manifest_record_type=manifest_record_type, component_name = component_name, From 24b53817ffe674f3710dc6753b30105ca2824dd9 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 17 Jul 2023 10:19:47 -0700 Subject: [PATCH 130/135] simplify `SynapseDatabase` init --- schematic/store/synapse.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 1a7f2d8a8..f72ae2b71 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -40,7 +40,6 @@ import uuid -from schematic_db.synapse.synapse import SynapseConfig from schematic_db.rdb.synapse_database import SynapseDatabase @@ -2137,10 +2136,9 @@ def upsertTable(self, sg: SchemaGenerator,): existingTableId: synID of the already existing table that had its metadata replaced """ - username, authtoken = self._get_schematic_db_creds() + authtoken = self._get_schematic_db_creds() - synConfig = SynapseConfig(username, authtoken, self.synStore.getDatasetProject(self.datasetId)) - synapseDB = SynapseDatabase(synConfig) + synapseDB = SynapseDatabase(authtoken=authtoken, project_id=self.synStore.getDatasetProject(self.datasetId)) try: # Try performing upsert From 59e75e0e142c6fc81569a0000de5c65bbf1c4db7 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 17 Jul 2023 10:25:26 -0700 Subject: [PATCH 131/135] refactor credential gathering method --- schematic/store/synapse.py | 22 +++++----------------- 1 file changed, 5 insertions(+), 17 deletions(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index f72ae2b71..d6029f2db 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -2070,29 +2070,25 @@ def replaceTable(self, specifySchema: bool = True, columnTypeDict: dict = None,) return self.existingTableId - def _get_schematic_db_creds(self,): - username = None + def _get_auth_token(self,): authtoken = None - # Get access token from environment variable if available # Primarily useful for testing environments, with other possible usefulness for containers env_access_token = os.getenv("SYNAPSE_ACCESS_TOKEN") if env_access_token: authtoken = env_access_token - return username, authtoken + return authtoken # Get token from authorization header # Primarily useful for API endpoint functionality if 'Authorization' in self.synStore.syn.default_headers: authtoken = self.synStore.syn.default_headers['Authorization'].split('Bearer ')[-1] - return username, authtoken + return authtoken # retrive credentials from synapse object # Primarily useful for local users, could only be stored here when a .synapseConfig file is used, but including to be safe synapse_object_creds = self.synStore.syn.credentials - if hasattr(synapse_object_creds, 'username'): - username = synapse_object_creds.username if hasattr(synapse_object_creds, '_token'): authtoken = synapse_object_creds.secret @@ -2102,24 +2098,16 @@ def _get_schematic_db_creds(self,): config = self.synStore.syn.getConfigFile(CONFIG.synapse_configuration_path) # check which credentials are provided in file - if config.has_option('authentication', 'username'): - username = config.get('authentication', 'username') if config.has_option('authentication', 'authtoken'): authtoken = config.get('authentication', 'authtoken') # raise error if required credentials are not found - # providing an authtoken without a username did not prohibit upsert functionality, - # but including username gathering for completeness for schematic_db - if not username and not authtoken: - raise NameError( - "Username and authtoken credentials could not be found in the environment, synapse object, or the .synapseConfig file" - ) if not authtoken: raise NameError( "authtoken credentials could not be found in the environment, synapse object, or the .synapseConfig file" ) - return username, authtoken + return authtoken def upsertTable(self, sg: SchemaGenerator,): """ @@ -2136,7 +2124,7 @@ def upsertTable(self, sg: SchemaGenerator,): existingTableId: synID of the already existing table that had its metadata replaced """ - authtoken = self._get_schematic_db_creds() + authtoken = self._get_auth_token() synapseDB = SynapseDatabase(authtoken=authtoken, project_id=self.synStore.getDatasetProject(self.datasetId)) From 6612d2cdddc775f44eb7d6ed013f2c926b6f0ed9 Mon Sep 17 00:00:00 2001 From: Gianna Jordan <61707471+GiaJordan@users.noreply.github.com> Date: Mon, 17 Jul 2023 14:11:01 -0700 Subject: [PATCH 132/135] update kwarg --- schematic/store/synapse.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/store/synapse.py b/schematic/store/synapse.py index 3f830d519..d13e6df07 100644 --- a/schematic/store/synapse.py +++ b/schematic/store/synapse.py @@ -2164,7 +2164,7 @@ def upsertTable(self, sg: SchemaGenerator,): authtoken = self._get_auth_token() - synapseDB = SynapseDatabase(authtoken=authtoken, project_id=self.synStore.getDatasetProject(self.datasetId)) + synapseDB = SynapseDatabase(auth_token=authtoken, project_id=self.synStore.getDatasetProject(self.datasetId)) try: # Try performing upsert From 28f7148eb7c4cd9680737be016a1659b5df03b0a Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 21 Jul 2023 14:25:38 -0700 Subject: [PATCH 133/135] Add Id columns to the end of the manifest --- schematic/manifest/generator.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 74fd1e739..2e588b403 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1581,8 +1581,13 @@ def _get_end_columns(self, current_schema_headers, existing_manifest_headers, ou # Identify columns to add to the end of the manifest end_columns = list(out_of_schema_columns) - # Make sure entityId is at the end of the list - if 'entityId' in end_columns and end_columns[-1] != 'entityId': + # Make sure want Uuids at the end before entityId is at the end of the list + for id_name in ['Uuid', 'Id']: + if id_name in end_columns: + end_columns.remove(id_name) + end_columns.append(id_name) + + if 'entityId' in end_columns: end_columns.remove('entityId') end_columns.append('entityId') From fdd45b3ebbf4bb7bd8c9afaba7bcc9254e1c2915 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 21 Jul 2023 14:26:26 -0700 Subject: [PATCH 134/135] simplify id addition logic --- schematic/manifest/generator.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 2e588b403..837580b7a 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1581,15 +1581,11 @@ def _get_end_columns(self, current_schema_headers, existing_manifest_headers, ou # Identify columns to add to the end of the manifest end_columns = list(out_of_schema_columns) - # Make sure want Uuids at the end before entityId is at the end of the list - for id_name in ['Uuid', 'Id']: + # Make sure want Ids are placed at end of manifest, in given order. + for id_name in ['Uuid', 'Id', 'entityId']: if id_name in end_columns: end_columns.remove(id_name) end_columns.append(id_name) - - if 'entityId' in end_columns: - end_columns.remove('entityId') - end_columns.append('entityId') # Add entity_id to the end columns if it should be there but isn't elif 'entityId' in (current_schema_headers or existing_manfiest_headers) and 'entityId' not in end_columns: From 1e6edfea3752fc886ed9328283fcbf20346cb249 Mon Sep 17 00:00:00 2001 From: Mialy DeFelice Date: Fri, 21 Jul 2023 15:30:26 -0700 Subject: [PATCH 135/135] fix elif legacy --- schematic/manifest/generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schematic/manifest/generator.py b/schematic/manifest/generator.py index 837580b7a..ec3072ade 100644 --- a/schematic/manifest/generator.py +++ b/schematic/manifest/generator.py @@ -1588,7 +1588,7 @@ def _get_end_columns(self, current_schema_headers, existing_manifest_headers, ou end_columns.append(id_name) # Add entity_id to the end columns if it should be there but isn't - elif 'entityId' in (current_schema_headers or existing_manfiest_headers) and 'entityId' not in end_columns: + if 'entityId' in (current_schema_headers or existing_manfiest_headers) and 'entityId' not in end_columns: end_columns.append('entityId') return end_columns