diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml deleted file mode 100644 index c62435f..0000000 --- a/.github/workflows/pull-request.yml +++ /dev/null @@ -1,14 +0,0 @@ -name: ✅ Quality Checks -on: [ push ] - -jobs: - code-quality-checks: - runs-on: ubuntu-latest - steps: - - name: Checkout Code - uses: actions/checkout@v4 - - - name: Run checks - uses: astral-sh/ruff-action@v1 - with: - args: "check" \ No newline at end of file diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml index e40d4fc..6e53cf1 100644 --- a/.github/workflows/pypi.yml +++ b/.github/workflows/pypi.yml @@ -14,8 +14,14 @@ jobs: - name: Checkout Code uses: actions/checkout@v4 + # We are pinning uv to lower version to workaround the existing + # bugs in setuptools build system. We can unpin onces the issue + # is resolved in upstream repos. + # Bug: https://github.com/pypa/setuptools/issues/4759 - name: Install uv - uses: astral-sh/setup-uv@v3 + uses: astral-sh/setup-uv@v4 + with: + version: '0.4.22' - name: Publish to pypi run: | diff --git a/.github/workflows/python-compatibility.yml b/.github/workflows/python-compatibility.yml deleted file mode 100644 index 98821b5..0000000 --- a/.github/workflows/python-compatibility.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: 🐍 Python Compatibility Check -on: [push] - -jobs: - python-compatibility: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.8.10', '3.9', '3.10', '3.11', '3.12', '3.13'] - steps: - - name: Checkout Code - uses: actions/checkout@v4 - - - name: Install uv - uses: astral-sh/setup-uv@v3 - - - name: Set up Python ${{ matrix.python-version }} - run: uv python install ${{ matrix.python-version }} - - - name: Try import - run: | - uv run python -c "import rapyuta_io_sdk_v2" \ No newline at end of file diff --git a/.github/workflows/quality-checks.yml b/.github/workflows/quality-checks.yml new file mode 100644 index 0000000..b225d91 --- /dev/null +++ b/.github/workflows/quality-checks.yml @@ -0,0 +1,31 @@ +name: ✅ Quality Checks +on: [ push ] + +jobs: + perform-checks: + name: Perform checks + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Lint code + uses: astral-sh/ruff-action@v1 + with: + args: "check" + + - name: Setup uv + uses: astral-sh/setup-uv@v4 + + - name: Set up Python ${{ matrix.python-version }} + run: uv python install ${{ matrix.python-version }} + + - name: Run unit tests + run: | + uv sync --all-extras --dev + source .venv/bin/activate + uv run pytest tests/ --cov \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4a7f258..d71b058 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -6,12 +6,13 @@ on: jobs: release: - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest steps: - name: Checkout Code - uses: actions/checkout@v4.2.2 - with: - token: ${{ secrets.GH_TOKEN }} + uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v4 - name: Run semantic-release run: | diff --git a/.gitignore b/.gitignore index ab52b27..a8a0966 100644 --- a/.gitignore +++ b/.gitignore @@ -14,4 +14,5 @@ wheels/ main_test.py test_config.json -ignore/ \ No newline at end of file +ignore/ +.coverage \ No newline at end of file diff --git a/README.md b/README.md index 416cad4..507e97a 100644 --- a/README.md +++ b/README.md @@ -1,27 +1,54 @@ -# Rapyuta IO SDK v2 -Rapyuta IO SDK v2 provides a comprehensive set of tools and functionalities to interact with the rapyut.io platform. +
+ +
+ +# rapyuta.io SDK v2 + +rapyuta.io SDK v2 provides a comprehensive set of tools and functionalities to interact with the rapyuta.io platform. ## Installation + ```bash pip install rapyuta-io-sdk-v2 ``` -### Quick Start +## Usage + +To use the SDK, you need to configure it with your rapyuta.io credentials. + +### From a Configuration File + +You can create a `Configuration` object from a JSON file. + ```python -from rapyuta_io_sdk_v2 import Configuration, Client +from rapyuta_io_sdk_v2.config import Configuration, Client + +config = Configuration.from_file("/path/to/config.json") +client = Client(config) +``` -config = Configuration(email="user@email.com", - password="password", - organization_guid="organization_guid", - project_guid="project_guid") +### Using `email` and `password` + +```python +from rapyuta_io_sdk_v2.config import Configuration, Client +config = Configuration(organization_guid="ORGANIZATION_GUID") client = Client(config) -client.login() +client.login(email="EMAIL", password="PASSWORD") +``` + +You are now set to invoke various methods on the `client` object. -# Get current project -project = client.get_project() +For example, this is how you can list projects. + +```python +projects = client.list_projects() +print(projects) ``` ## Contributing -We welcome contributions! Please read our [contributing guidelines](CONTRIBUTING.md) to get started. \ No newline at end of file +We welcome contributions. Please read our [contribution guidelines](CONTRIBUTING.md) to get started. \ No newline at end of file diff --git a/assets/v2sdk-logo-dark.png b/assets/v2sdk-logo-dark.png new file mode 100644 index 0000000..5aab18a Binary files /dev/null and b/assets/v2sdk-logo-dark.png differ diff --git a/assets/v2sdk-logo-light.png b/assets/v2sdk-logo-light.png new file mode 100644 index 0000000..15b6575 Binary files /dev/null and b/assets/v2sdk-logo-light.png differ diff --git a/pyproject.toml b/pyproject.toml index daef684..61ca75f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,11 +4,7 @@ dynamic = ["version"] description = "Python SDK for rapyuta.io v2 APIs" dependencies = [ "httpx>=0.27.2", - "mock>=5.1.0", "munch>=4.0.0", - "pytest-mock>=3.14.0", - "pytest>=8.3.3", - "tenacity>=9.0.0", ] readme = "README.md" license = { file = "LICENSE" } @@ -30,3 +26,95 @@ allow-direct-references = true [tool.hatch.build.targets.wheel] packages = ["rapyuta_io_sdk_v2"] + +[dependency-groups] +dev = [ + "coverage>=7.6.1", + "mock>=5.1.0", + "pytest-cov>=5.0.0", + "pytest-mock>=3.14.0", + "pytest>=8.3.3", + "typing-extensions>=4.12.2", + "pytest-asyncio>=0.24.0", + "asyncmock>=0.4.2", +] + + +[tool.ruff] +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", +] + +# Same as Black. +line-length = 90 +indent-width = 4 + +# Assume Python 3.8 +target-version = "py38" + +[tool.ruff.lint] +# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. +# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or +# McCabe complexity (`C901`) by default. +select = ["E4", "E7", "E9", "F", "B", "Q", "W", "N816"] +ignore = ["E741", "B904"] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[tool.ruff.format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" + +# Enable auto-formatting of code examples in docstrings. Markdown, +# reStructuredText code/literal blocks and doctests are all supported. +# +# This is currently disabled by default, but it is planned for this +# to be opt-out in the future. +docstring-code-format = false + +# Set the line length limit used when formatting code snippets in +# docstrings. +# +# This only has an effect when the `docstring-code-format` setting is +# enabled. +docstring-code-line-length = "dynamic" diff --git a/rapyuta_io_sdk_v2/__init__.py b/rapyuta_io_sdk_v2/__init__.py index 9daf4c6..3f0d943 100644 --- a/rapyuta_io_sdk_v2/__init__.py +++ b/rapyuta_io_sdk_v2/__init__.py @@ -1,5 +1,7 @@ # ruff: noqa -from rapyuta_io_sdk_v2.config import Configuration +from rapyuta_io_sdk_v2.async_client import AsyncClient from rapyuta_io_sdk_v2.client import Client +from rapyuta_io_sdk_v2.config import Configuration +from rapyuta_io_sdk_v2.utils import walk_pages __version__ = "0.0.1" diff --git a/rapyuta_io_sdk_v2/async_client.py b/rapyuta_io_sdk_v2/async_client.py new file mode 100644 index 0000000..5f72e36 --- /dev/null +++ b/rapyuta_io_sdk_v2/async_client.py @@ -0,0 +1,1438 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import platform + +import httpx +from munch import Munch + +from rapyuta_io_sdk_v2.config import Configuration +from rapyuta_io_sdk_v2.utils import handle_and_munchify_response, handle_server_errors + + +class AsyncClient(object): + """AsyncClient class for the SDK.""" + + def __init__(self, config=None, **kwargs): + self.config = config or Configuration() + timeout = kwargs.get("timeout", 10) + self.c = httpx.AsyncClient( + timeout=timeout, + limits=httpx.Limits( + max_keepalive_connections=5, + max_connections=5, + keepalive_expiry=30, + ), + headers={ + "User-Agent": ( + "rio-sdk-v2;N/A;{};{};{} {}".format( + platform.processor() or platform.machine(), + platform.system(), + platform.release(), + platform.version(), + ) + ) + }, + ) + self.sync_client = httpx.Client( + timeout=timeout, + limits=httpx.Limits( + max_keepalive_connections=5, + max_connections=5, + keepalive_expiry=30, + ), + headers={ + "User-Agent": ( + "rio-sdk-v2;N/A;{};{};{} {}".format( + platform.processor() or platform.machine(), + platform.system(), + platform.release(), + platform.version(), + ) + ) + }, + ) + self.rip_host = self.config.hosts.get("rip_host") + self.v2api_host = self.config.hosts.get("v2api_host") + + def get_auth_token(self, email: str, password: str) -> str: + """Get the authentication token for the user. + + Args: + email (str) + password (str) + + Returns: + str: authentication token + """ + response = self.sync_client.post( + url=f"{self.rip_host}/user/login", + headers={"Content-Type": "application/json"}, + json={ + "email": email, + "password": password, + }, + ) + handle_server_errors(response) + return response.json()["data"].get("token") + + def login( + self, + email: str, + password: str, + ) -> None: + """Get the authentication token for the user. + + Args: + email (str) + password (str) + + Returns: + str: authentication token + """ + + token = self.get_auth_token(email, password) + self.config.auth_token = token + + @handle_and_munchify_response + def logout(self, token: str = None) -> Munch: + """Expire the authentication token. + + Args: + token (str): The token to expire. + """ + + if token is None: + token = self.config.auth_token + + return self.sync_client.post( + url=f"{self.rip_host}/user/logout", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {token}", + }, + ) + + def refresh_token(self, token: str = None, set_token: bool = True) -> str: + """Refresh the authentication token. + + Args: + token (str): The token to refresh. + set_token (bool): Set the refreshed token in the configuration. + + Returns: + str: The refreshed token. + """ + + if token is None: + token = self.config.auth_token + + response = self.sync_client.post( + url=f"{self.rip_host}/refreshtoken", + headers={"Content-Type": "application/json"}, + json={"token": token}, + ) + handle_server_errors(response) + if set_token: + self.config.auth_token = response.json()["data"].get("token") + return response.json()["data"].get("token") + + def set_organization(self, organization_guid: str) -> None: + """Set the organization GUID. + + Args: + organization_guid (str): Organization GUID + """ + self.config.set_organization(organization_guid) + + def set_project(self, project_guid: str) -> None: + """Set the project GUID. + + Args: + project_guid (str): Project GUID + """ + self.config.set_project(project_guid) + + # ----------------- Projects ----------------- + @handle_and_munchify_response + async def list_projects( + self, + cont: int = 0, + limit: int = 50, + label_selector: list[str] = None, + status: list[str] = None, + organizations: list[str] = None, + **kwargs, + ) -> Munch: + """List all projects in an organization. + + Args: + cont (int, optional): Start index of projects. Defaults to 0. + limit (int, optional): Number of projects to list. Defaults to 50. + label_selector (list[str], optional): Define labelSelector to get projects from. Defaults to None. + status (list[str], optional): Define status to get projects from. Defaults to None. + organizations (list[str], optional): Define organizations to get projects from. Defaults to None. + + Returns: + Munch: List of projects as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/projects/", + headers=self.config.get_headers(with_project=False, **kwargs), + params={ + "continue": cont, + "limit": limit, + "status": status, + "organizations": organizations, + "labelSelector": label_selector, + }, + ) + + @handle_and_munchify_response + async def get_project(self, project_guid: str = None, **kwargs) -> Munch: + """Get a project by its GUID. + + If no project or organization GUID is provided, + the async default project and organization GUIDs will + be picked from the current configuration. + + Args: + project_guid (str): user provided project GUID or config project GUID + + Raises: + ValueError: If organization_guid or project_guid is None + + Returns: + Munch: Project details as a Munch object. + """ + if project_guid is None: + project_guid = self.config.project_guid + + if not project_guid: + raise ValueError("project_guid is required") + + return await self.c.get( + url=f"{self.v2api_host}/v2/projects/{project_guid}/", + headers=self.config.get_headers(with_project=False, **kwargs), + ) + + @handle_and_munchify_response + async def create_project(self, body: dict, **kwargs) -> Munch: + """Create a new project. + + Args: + body (object): Project details + + Returns: + Munch: Project details as a Munch object. + """ + + return await self.c.post( + url=f"{self.v2api_host}/v2/projects/", + headers=self.config.get_headers(with_project=False, **kwargs), + json=body, + ) + + @handle_and_munchify_response + async def update_project( + self, body: dict, project_guid: str = None, **kwargs + ) -> Munch: + """Update a project by its GUID. + + Returns: + Munch: Project details as a Munch object. + """ + + return await self.c.put( + url=f"{self.v2api_host}/v2/projects/{project_guid}/", + headers=self.config.get_headers(with_project=False, **kwargs), + json=body, + ) + + @handle_and_munchify_response + async def delete_project(self, project_guid: str, **kwargs) -> Munch: + """Delete a project by its GUID. + + Args: + project_guid (str): Project GUID + + Returns: + Munch: Project details as a Munch object. + """ + + return await self.c.delete( + url=f"{self.v2api_host}/v2/projects/{project_guid}/", + headers=self.config.get_headers(with_project=False, **kwargs), + ) + + @handle_and_munchify_response + async def update_project_owner( + self, body: dict, project_guid: str = None, **kwargs + ) -> Munch: + """Update the owner of a project by its GUID. + + Returns: + Munch: Project details as a Munch object. + """ + project_guid = project_guid or self.config.project_guid + + return await self.c.put( + url=f"{self.v2api_host}/v2/projects/{project_guid}/owner/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + # -------------------Package------------------- + @handle_and_munchify_response + async def list_packages( + self, + cont: int = 0, + limit: int = 50, + label_selector: list[str] = None, + name: str = None, + **kwargs, + ) -> Munch: + """List all packages in a project. + + Args: + cont (int, optional): Start index of packages. Defaults to 0. + limit (int, optional): Number of packages to list. Defaults to 50. + label_selector (list[str], optional): Define labelSelector to get packages from. Defaults to None. + name (str, optional): Define name to get packages from. Defaults to None. + + Returns: + Munch: List of packages as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/packages/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "labelSelector": label_selector, + "name": name, + }, + ) + + @handle_and_munchify_response + async def create_package(self, body: dict, **kwargs) -> Munch: + """Create a new package. + + The Payload is the JSON format of the Package Manifest. + For a documented example, run the rio explain package command. + + Returns: + Munch: Package details as a Munch object. + """ + + return await self.c.post( + url=f"{self.v2api_host}/v2/packages/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + async def get_package(self, name: str, version: str = None, **kwargs) -> Munch: + """Get a package by its name. + + Args: + name (str): Package name + version (str, optional): Package version. Defaults to None. + + Returns: + Munch: Package details as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/packages/{name}/", + headers=self.config.get_headers(**kwargs), + params={"version": version}, + ) + + @handle_and_munchify_response + async def delete_package(self, name: str, **kwargs) -> Munch: + """Delete a package by its name. + + Args: + name (str): Package name + + Returns: + Munch: Package details as a Munch object. + """ + + return await self.c.delete( + url=f"{self.v2api_host}/v2/packages/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + async def list_deployments( + self, + cont: int = 0, + limit: int = 50, + dependencies: bool = False, + device_name: str = None, + guids: list[str] = None, + label_selector: list[str] = None, + name: str = None, + names: list[str] = None, + package_name: str = None, + package_version: str = None, + phases: list[str] = None, + regions: list[str] = None, + **kwargs, + ) -> Munch: + """List all deployments in a project. + + Args: + cont (int, optional): Start index of deployments. Defaults to 0. + limit (int, optional): Number of deployments to list. Defaults to 50. + dependencies (bool, optional): Filter by dependencies. Defaults to False. + device_name (str, optional): Filter deployments by device name. Defaults to None. + guids (list[str], optional): Filter by GUIDs. Defaults to None. + label_selector (list[str], optional): Define labelSelector to get deployments from. Defaults to None. + name (str, optional): Define name to get deployments from. Defaults to None. + names (list[str], optional): Define names to get deployments from. Defaults to None. + package_name (str, optional): Filter by package name. Defaults to None. + package_version (str, optional): Filter by package version. Defaults to None. + phases (list[str], optional): Filter by phases. Available values : InProgress, Provisioning, Succeeded, FailedToUpdate, FailedToStart, Stopped. Defaults to None. + regions (list[str], optional): Filter by regions. Defaults to None. + + Returns: + Munch: List of deployments as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/deployments/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "dependencies": dependencies, + "deviceName": device_name, + "guids": guids, + "labelSelector": label_selector, + "name": name, + "names": names, + "packageName": package_name, + "packageVersion": package_version, + "phases": phases, + "regions": regions, + }, + ) + + # -------------------Deployment------------------- + + @handle_and_munchify_response + async def create_deployment(self, body: dict, **kwargs) -> Munch: + """Create a new deployment. + + Args: + body (object): Deployment details + + Returns: + Munch: Deployment details as a Munch object. + """ + + return await self.c.post( + url=f"{self.v2api_host}/v2/deployments/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + async def get_deployment(self, name: str, guid: str = None, **kwargs) -> Munch: + """Get a deployment by its name. + + Returns: + Munch: Deployment details as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/deployments/{name}/", + headers=self.config.get_headers(**kwargs), + params={"guid": guid}, + ) + + @handle_and_munchify_response + async def update_deployment(self, name: str, body: dict, **kwargs) -> Munch: + """Update a deployment by its name. + + Returns: + Munch: Deployment details as a Munch object. + """ + + return await self.c.put( + url=f"{self.v2api_host}/v2/deployments/{name}/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + async def delete_deployment(self, name: str, **kwargs) -> Munch: + """Delete a deployment by its name. + + Returns: + Munch: Deployment details as a Munch object. + """ + + return await self.c.delete( + url=f"{self.v2api_host}/v2/deployments/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + async def get_deployment_graph(self, name: str, **kwargs) -> Munch: + """Get a deployment graph by its name. [Experimental] + + Returns: + Munch: Deployment graph as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/deployments/{name}/graph/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + async def get_deployment_history( + self, name: str, guid: str = None, **kwargs + ) -> Munch: + """Get a deployment history by its name. + + Returns: + Munch: Deployment history as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/deployments/{name}/history/", + headers=self.config.get_headers(**kwargs), + params={"guid": guid}, + ) + + # -------------------Disks------------------- + @handle_and_munchify_response + async def list_disks( + self, + cont: int = 0, + label_selector: list[str] = None, + limit: int = 50, + names: list[str] = None, + regions: list[str] = None, + status: list[str] = None, + **kwargs, + ) -> Munch: + """List all disks in a project. + + Args: + cont (int, optional): Start index of disks. Defaults to 0. + label_selector (list[str], optional): Define labelSelector to get disks from. Defaults to None. + limit (int, optional): Number of disks to list. Defaults to 50. + names (list[str], optional): Define names to get disks from. Defaults to None. + regions (list[str], optional): Define regions to get disks from. Defaults to None. + status (list[str], optional): Define status to get disks from. Available values : Available, Bound, Released, Failed, Pending.Defaults to None. + + + Returns: + Munch: List of disks as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/disks/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "labelSelector": label_selector, + "names": names, + "regions": regions, + "status": status, + }, + ) + + @handle_and_munchify_response + async def get_disk(self, name: str, **kwargs) -> Munch: + """Get a disk by its name. + + Args: + name (str): Disk name + + Returns: + Munch: Disk details as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/disks/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + async def create_disk(self, body: str, **kwargs) -> Munch: + """Create a new disk. + + Returns: + Munch: Disk details as a Munch object. + """ + + return await self.c.post( + url=f"{self.v2api_host}/v2/disks/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + async def delete_disk(self, name: str, **kwargs) -> Munch: + """Delete a disk by its name. + + Args: + name (str): Disk name + + Returns: + Munch: Disk details as a Munch object. + """ + + return await self.c.delete( + url=f"{self.v2api_host}/v2/disks/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + # -------------------Static Routes------------------- + @handle_and_munchify_response + async def list_staticroutes( + self, + cont: int = 0, + limit: int = 50, + guids: list[str] = None, + label_selector: list[str] = None, + names: list[str] = None, + regions: list[str] = None, + **kwargs, + ) -> Munch: + """List all static routes in a project. + + Args: + cont (int, optional): Start index of static routes. Defaults to 0. + limit (int, optional): Number of static routes to list. Defaults to 50. + guids (list[str], optional): Define guids to get static routes from. Defaults to None. + label_selector (list[str], optional): Define labelSelector to get static routes from. Defaults to None. + names (list[str], optional): Define names to get static routes from. Defaults to None. + regions (list[str], optional): Define regions to get static routes from. Defaults to None. + + Returns: + Munch: List of static routes as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/staticroutes/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "guids": guids, + "labelSelector": label_selector, + "names": names, + "regions": regions, + }, + ) + + @handle_and_munchify_response + async def create_staticroute(self, body: dict, **kwargs) -> Munch: + """Create a new static route. + + Returns: + Munch: Static route details as a Munch object. + """ + + return await self.c.post( + url=f"{self.v2api_host}/v2/staticroutes/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + async def get_staticroute(self, name: str, **kwargs) -> Munch: + """Get a static route by its name. + + Args: + name (str): Static route name + + Returns: + Munch: Static route details as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/staticroutes/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + async def update_staticroute(self, name: str, body: dict, **kwargs) -> Munch: + """Update a static route by its name. + + Args: + name (str): Static route name + body (dict): Update details + + Returns: + Munch: Static route details as a Munch object. + """ + + return await self.c.put( + url=f"{self.v2api_host}/v2/staticroutes/{name}/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + async def delete_staticroute(self, name: str, **kwargs) -> Munch: + """Delete a static route by its name. + + Args: + name (str): Static route name + + Returns: + Munch: Static route details as a Munch object. + """ + + return await self.c.delete( + url=f"{self.v2api_host}/v2/staticroutes/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + # -------------------Networks------------------- + @handle_and_munchify_response + async def list_networks( + self, + cont: int = 0, + limit: int = 50, + device_name: str = None, + label_selector: list[str] = None, + names: list[str] = None, + network_type: str = None, + phases: list[str] = None, + regions: list[str] = None, + status: list[str] = None, + **kwargs, + ) -> Munch: + """List all networks in a project. + + Args: + cont (int, optional): Start index of networks. Defaults to 0. + limit (int, optional): Number of networks to list. Defaults to 50. + device_name (str, optional): Filter networks by device name. Defaults to None. + label_selector (list[str], optional): Define labelSelector to get networks from. Defaults to None. + names (list[str], optional): Define names to get networks from. Defaults to None. + network_type (str, optional): Define network type to get networks from. Defaults to None. + phases (list[str], optional): Define phases to get networks from. Available values : InProgress, Provisioning, Succeeded, FailedToUpdate, FailedToStart, Stopped. Defaults to None. + regions (list[str], optional): Define regions to get networks from. Defaults to None. + status (list[str], optional): Define status to get networks from. Available values : Running, Pending, Error, Unknown, Stopped. Defaults to None. + + Returns: + Munch: List of networks as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/networks/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "deviceName": device_name, + "labelSelector": label_selector, + "names": names, + "networkType": network_type, + "phases": phases, + "regions": regions, + "status": status, + }, + ) + + @handle_and_munchify_response + async def create_network(self, body: dict, **kwargs) -> Munch: + """Create a new network. + + Returns: + Munch: Network details as a Munch object. + """ + + return await self.c.post( + url=f"{self.v2api_host}/v2/networks/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + async def get_network(self, name: str, **kwargs) -> Munch: + """Get a network by its name. + + Args: + name (str): Network name + + Returns: + Munch: Network details as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/networks/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + async def delete_network(self, name: str, **kwargs) -> Munch: + """Delete a network by its name. + + Args: + name (str): Network name + + Returns: + Munch: Network details as a Munch object. + """ + + return await self.c.delete( + url=f"{self.v2api_host}/v2/networks/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + # -------------------Secrets------------------- + @handle_and_munchify_response + async def list_secrets( + self, + cont: int = 0, + limit: int = 50, + label_selector: list[str] = None, + names: list[str] = None, + regions: list[str] = None, + **kwargs, + ) -> Munch: + """List all secrets in a project. + + Args: + cont (int, optional): Start index of secrets. Defaults to 0. + limit (int, optional): Number of secrets to list. Defaults to 50. + label_selector (list[str], optional): Define labelSelector to get secrets from. Defaults to None. + names (list[str], optional): Define names to get secrets from. Defaults to None. + regions (list[str], optional): Define regions to get secrets from. Defaults to None. + + Returns: + Munch: List of secrets as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/secrets/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "labelSelector": label_selector, + "names": names, + "regions": regions, + }, + ) + + @handle_and_munchify_response + async def create_secret(self, body: dict, **kwargs) -> Munch: + """Create a new secret. + + Returns: + Munch: Secret details as a Munch object. + """ + + return await self.c.post( + url=f"{self.v2api_host}/v2/secrets/", + headers=self.config.get_headers(*kwargs), + json=body, + ) + + @handle_and_munchify_response + async def get_secret(self, name: str, **kwargs) -> Munch: + """Get a secret by its name. + + Args: + name (str): Secret name + + Returns: + Munch: Secret details as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/secrets/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + async def update_secret(self, name: str, body: dict, **kwargs) -> Munch: + """Update a secret by its name. + + Args: + name (str): Secret name + body (dict): Update details + + Returns: + Munch: Secret details as a Munch object. + """ + + return await self.c.put( + url=f"{self.v2api_host}/v2/secrets/{name}/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + async def delete_secret(self, name: str, **kwargs) -> Munch: + """Delete a secret by its name. + + Args: + name (str): Secret name + + Returns: + Munch: Secret details as a Munch object. + """ + + return await self.c.delete( + url=f"{self.v2api_host}/v2/secrets/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + # -------------------Config Trees------------------- + @handle_and_munchify_response + async def list_configtrees( + self, + cont: int = 0, + limit: int = 50, + label_selector: list[str] = None, + with_project: bool = True, + **kwargs, + ) -> Munch: + """List all config trees in a project. + + Args: + cont (int, optional): Start index of config trees. Defaults to 0. + limit (int, optional): Number of config trees to list. Defaults to 50. + label_selector (list[str], optional): Define labelSelector to get config trees from. Defaults to None. + with_project (bool, optional): Include project details. Defaults to True. + + Returns: + Munch: List of config trees as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/configtrees/", + headers=self.config.get_headers(with_project=with_project, **kwargs), + params={ + "continue": cont, + "limit": limit, + "labelSelector": label_selector, + }, + ) + + @handle_and_munchify_response + async def create_configtree( + self, body: dict, with_project: bool = True, **kwargs + ) -> Munch: + """Create a new config tree. + + Args: + body (object): Config tree details + with_project (bool, optional): Work in the project scope. Defaults to True. + + Returns: + Munch: Config tree details as a Munch object. + """ + + return await self.c.post( + url=f"{self.v2api_host}/v2/configtrees/", + headers=self.config.get_headers(with_project=with_project, **kwargs), + json=body, + ) + + @handle_and_munchify_response + async def get_configtree( + self, + name: str, + content_types: list[str] = None, + include_data: bool = False, + key_prefixes: list[str] = None, + revision: str = None, + with_project: bool = True, + **kwargs, + ) -> Munch: + """Get a config tree by its name. + + Args: + name (str): Config tree name + content_types (list[str], optional): Define contentTypes to get config tree from. Defaults to None. + include_data (bool, optional): Include data. Defaults to False. + key_prefixes (list[str], optional): Define keyPrefixes to get config tree from. Defaults to None. + revision (str, optional): Define revision to get config tree from. Defaults to None. + with_project (bool, optional): Work in the project scope. Defaults to True. + + Returns: + Munch: Config tree details as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/configtrees/{name}/", + headers=self.config.get_headers(with_project=with_project, **kwargs), + params={ + "contentTypes": content_types, + "includeData": include_data, + "keyPrefixes": key_prefixes, + "revision": revision, + }, + ) + + @handle_and_munchify_response + async def set_configtree_revision( + self, name: str, configtree: object, project_guid: str = None, **kwargs + ) -> Munch: + """Set a config tree revision. + + Args: + name (str): Config tree name + configtree (object): Config tree details + project_guid (str, optional): Project GUID. async defaults to None. + + Returns: + Munch: Config tree details as a Munch object. + """ + + return await self.c.put( + url=f"{self.v2api_host}/v2/configtrees/{name}/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + json=configtree, + ) + + @handle_and_munchify_response + async def update_configtree( + self, name: str, body: dict, with_project: bool = True, **kwargs + ) -> Munch: + """Update a config tree by its name. + + Args: + name (str): Config tree name + body (dict): Update details + with_project (bool, optional): Work in the project scope. Defaults to True. + + Returns: + Munch: Config tree details as a Munch object. + """ + + return await self.c.put( + url=f"{self.v2api_host}/v2/configtrees/{name}/", + headers=self.config.get_headers(with_project=with_project, **kwargs), + json=body, + ) + + @handle_and_munchify_response + async def delete_configtree(self, name: str, **kwargs) -> Munch: + """Delete a config tree by its name. + + Args: + name (str): Config tree name + + Returns: + Munch: Config tree details as a Munch object. + """ + + return await self.c.delete( + url=f"{self.v2api_host}/v2/configtrees/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + async def list_revisions( + self, + tree_name: str, + cont: int = 0, + limit: int = 50, + committed: bool = False, + label_selector: list[str] = None, + **kwargs, + ) -> Munch: + """List all revisions of a config tree. + + Args: + tree_name (str): Config tree name + cont (int, optional): Continue param . Defaults to 0. + limit (int, optional): Limit param . Defaults to 50. + committed (bool, optional): Committed. Defaults to False. + label_selector (list[str], optional): Define labelSelector to get revisions from. Defaults to None. + + Returns: + Munch: List of revisions as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/configtrees/{tree_name}/revisions/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "committed": committed, + "labelSelector": label_selector, + }, + ) + + @handle_and_munchify_response + async def create_revision( + self, name: str, body: dict, project_guid: str = None, **kwargs + ) -> Munch: + """Create a new revision. + + Args: + name (str): Config tree name + body (object): Revision details + project_guid (str): Project GUID (optional) + + Returns: + Munch: Revision details as a Munch object. + """ + + return await self.c.post( + url=f"{self.v2api_host}/v2/configtrees/{name}/revisions/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + json=body, + ) + + @handle_and_munchify_response + async def put_keys_in_revision( + self, name: str, revision_id: str, config_values: dict, **kwargs + ) -> Munch: + """Put keys in a revision. + + Args: + name (str): Config tree name + revision_id (str): Config tree revision ID + config_values (dict): Config values + + Returns: + Munch: Revision details as a Munch object. + """ + + return await self.c.put( + url=f"{self.v2api_host}/v2/configtrees/{name}/revisions/{revision_id}/keys/", + headers=self.config.get_headers(**kwargs), + json=config_values, + ) + + @handle_and_munchify_response + async def commit_revision( + self, + tree_name: str, + revision_id: str, + author: str = None, + message: str = None, + project_guid: str = None, + **kwargs, + ) -> Munch: + """Commit a revision. + + Args: + tree_name (str): Config tree name + revision_id (str): Config tree revision ID + author (str, optional): Revision Author. Defaults to None. + message (str, optional): Revision Message. Defaults to None. + project_guid (str, optional): Project GUID. Defaults to None. + + Returns: + Munch: Revision details as a Munch object. + """ + config_tree_revision = { + "author": author, + "message": message, + } + + return await self.c.patch( + url=f"{self.v2api_host}/v2/configtrees/{tree_name}/revisions/{revision_id}/commit/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + json=config_tree_revision, + ) + + @handle_and_munchify_response + async def get_key_in_revision( + self, + tree_name: str, + revision_id: str, + key: str, + project_guid: str = None, + **kwargs, + ) -> Munch: + """Get a key in a revision. + + Args: + tree_name (str): Config tree name + revision_id (str): Config tree revision ID + key (str): Key + project_guid (str, optional): Project GUID. async defaults to None. + + Returns: + Munch: Key details as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/configtrees/{tree_name}/revisions/{revision_id}/{key}/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + ) + + @handle_and_munchify_response + async def put_key_in_revision( + self, + tree_name: str, + revision_id: str, + key: str, + project_guid: str = None, + **kwargs, + ) -> Munch: + """Put a key in a revision. + + Args: + tree_name (str): Config tree name + revision_id (str): Config tree revision ID + key (str): Key + project_guid (str, optional): Project GUID. async defaults to None. + + Returns: + Munch: Key details as a Munch object. + """ + + return await self.c.put( + url=f"{self.v2api_host}/v2/configtrees/{tree_name}/revisions/{revision_id}/{key}/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + ) + + @handle_and_munchify_response + async def delete_key_in_revision( + self, + tree_name: str, + revision_id: str, + key: str, + project_guid: str = None, + **kwargs, + ) -> Munch: + """Delete a key in a revision. + + Args: + tree_name (str): Config tree name + revision_id (str): Config tree revision ID + key (str): Key + project_guid (str, optional): Project GUID. async defaults to None. + + Returns: + Munch: Key details as a Munch object. + """ + + return await self.c.delete( + url=f"{self.v2api_host}/v2/configtrees/{tree_name}/revisions/{revision_id}/{key}/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + ) + + @handle_and_munchify_response + async def rename_key_in_revision( + self, + tree_name: str, + revision_id: str, + key: str, + config_key_rename: dict, + project_guid: str = None, + **kwargs, + ) -> Munch: + """Rename a key in a revision. + + Args: + tree_name (str): Config tree name + revision_id (str): Config tree revision ID + key (str): Key + config_key_rename (dict): Key rename details + project_guid (str, optional): Project GUID. async defaults to None. + + Returns: + Munch: Key details as a Munch object. + """ + + return await self.c.patch( + url=f"{self.v2api_host}/v2/configtrees/{tree_name}/revisions/{revision_id}/{key}/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + json=config_key_rename, + ) + + # Managed Service API + @handle_and_munchify_response + async def list_providers(self) -> Munch: + """List all providers. + + Returns: + Munch: List of providers as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/managedservices/providers/", + headers=self.config.get_headers(with_project=False), + ) + + @handle_and_munchify_response + async def list_instances( + self, + cont: int = 0, + limit: int = 50, + label_selector: list[str] = None, + providers: list[str] = None, + ): + """List all instances in a project. + + Args: + cont (int, optional): Start index of instances. Defaults to 0. + limit (int, optional): Number of instances to list. Defaults to 50. + label_selector (list[str], optional): Define labelSelector to get instances from. Defaults to None. + providers (list[str], optional): Define providers to get instances from. Defaults to None. + + Returns: + Munch: List of instances as a Munch object. + """ + return await self.c.get( + url=f"{self.v2api_host}/v2/managedservices/", + headers=self.config.get_headers(), + params={ + "continue": cont, + "limit": limit, + "labelSelector": label_selector, + "providers": providers, + }, + ) + + @handle_and_munchify_response + async def get_instance(self, name: str) -> Munch: + """Get an instance by its name. + + Args: + name (str): Instance name + + Returns: + Munch: Instance details as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/managedservices/{name}/", + headers=self.config.get_headers(), + ) + + @handle_and_munchify_response + async def create_instance(self, body: dict) -> Munch: + """Create a new instance. + + Returns: + Munch: Instance details as a Munch object. + """ + + return await self.c.post( + url=f"{self.v2api_host}/v2/managedservices/", + headers=self.config.get_headers(), + json=body, + ) + + @handle_and_munchify_response + async def delete_instance(self, name: str) -> Munch: + """Delete an instance. + + Returns: + Munch: Instance details as a Munch object. + """ + + return await self.c.delete( + url=f"{self.v2api_host}/v2/managedservices/{name}/", + headers=self.config.get_headers(), + ) + + @handle_and_munchify_response + async def list_instance_bindings( + self, + instance_name: str, + cont: int = 0, + limit: int = 50, + label_selector: list[str] = None, + ): + """List all instance bindings in a project. + + Args: + instance_name (str): Instance name. + cont (int, optional): Start index of instance bindings. Defaults to 0. + limit (int, optional): Number of instance bindings to list. Defaults to 50. + label_selector (list[str], optional): Define labelSelector to get instance bindings from. Defaults to None. + + Returns: + Munch: List of instance bindings as a Munch object. + """ + return await self.c.get( + url=f"{self.v2api_host}/v2/managedservices/{instance_name}/bindings/", + headers=self.config.get_headers(), + params={ + "continue": cont, + "limit": limit, + "labelSelector": label_selector, + }, + ) + + @handle_and_munchify_response + async def create_instance_binding(self, instance_name: str, body: dict) -> Munch: + """Create a new instance binding. + + Args: + instance_name (str): Instance name. + body (object): Instance binding details. + + Returns: + Munch: Instance binding details as a Munch object. + """ + + return await self.c.post( + url=f"{self.v2api_host}/v2/managedservices/{instance_name}/bindings/", + headers=self.config.get_headers(), + json=body, + ) + + @handle_and_munchify_response + async def get_instance_binding(self, instance_name: str, name: str) -> Munch: + """Get an instance binding by its name. + + Args: + instance_name (str): Instance name. + name (str): Instance binding name. + + Returns: + Munch: Instance binding details as a Munch object. + """ + + return await self.c.get( + url=f"{self.v2api_host}/v2/managedservices/{instance_name}/bindings/{name}/", + headers=self.config.get_headers(), + ) + + @handle_and_munchify_response + async def delete_instance_binding(self, instance_name: str, name: str) -> Munch: + """Delete an instance binding. + + Args: + instance_name (str): Instance name. + name (str): Instance binding name. + + Returns: + Munch: Instance binding details as a Munch object. + """ + + return await self.c.delete( + url=f"{self.v2api_host}/v2/managedservices/{instance_name}/bindings/{name}/", + headers=self.config.get_headers(), + ) diff --git a/rapyuta_io_sdk_v2/client.py b/rapyuta_io_sdk_v2/client.py index 71947df..0660586 100644 --- a/rapyuta_io_sdk_v2/client.py +++ b/rapyuta_io_sdk_v2/client.py @@ -13,11 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -import httpx -from munch import Munch, munchify import platform + +import httpx +from munch import Munch + from rapyuta_io_sdk_v2.config import Configuration -from rapyuta_io_sdk_v2.utils import handle_server_errors +from rapyuta_io_sdk_v2.utils import handle_and_munchify_response, handle_server_errors class Client(object): @@ -49,91 +51,90 @@ def __init__(self, config: Configuration = None, **kwargs): ) }, ) + self.v2api_host = self.config.hosts.get("v2api_host") + self.rip_host = self.config.hosts.get("rip_host") - def login( - self, - email: str = None, - password: str = None, - environment: str = "ga", - ) -> str: + def get_auth_token(self, email: str, password: str) -> str: """Get the authentication token for the user. Args: email (str) password (str) - environment (str) Returns: str: authentication token """ - if email is None and password is None and self.config is None: - raise ValueError("email and password are required") - - if self.config is None: - self.config = Configuration( - email=email, password=password, environment=environment - ) - - payload = { - "email": email or self.config.email, - "password": password or self.config.password, - } - - rip_host = self.config.hosts.get("rip_host") - url = f"{rip_host}/user/login" - headers = {"Content-Type": "application/json"} + response = self.c.post( + url=f"{self.rip_host}/user/login", + headers={"Content-Type": "application/json"}, + json={ + "email": email, + "password": password, + }, + ) + handle_server_errors(response) + return response.json()["data"].get("token") - response = self.c.post(url=url, headers=headers, json=payload) + def login( + self, + email: str, + password: str, + ) -> None: + """Get the authentication token for the user. - handle_server_errors(response) + Args: + email (str) + password (str) - self.config.auth_token = response.json()["data"].get("token") + Returns: + str: authentication token + """ - return self.config.auth_token + token = self.get_auth_token(email, password) + self.config.auth_token = token - def logout(self, token: str = None) -> None: + @handle_and_munchify_response + def logout(self, token: str = None) -> Munch: """Expire the authentication token. Args: token (str): The token to expire. """ - rip_host = self.config.hosts.get("rip_host") - url = f"{rip_host}/user/logout" - headers = {"Content-Type": "application/json"} if token is None: token = self.config.auth_token - response = self.c.post(url=url, headers=headers, json={"token": token}) - - handle_server_errors(response) - - return + return self.c.post( + url=f"{self.rip_host}/user/logout", + headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {token}", + }, + ) - def refresh_token(self, token: str = None) -> str: + def refresh_token(self, token: str = None, set_token: bool = True) -> str: """Refresh the authentication token. Args: token (str): The token to refresh. + set_token (bool): Set the refreshed token in the configuration. Returns: str: The refreshed token. """ - rip_host = self.config.hosts.get("rip_host") - url = f"{rip_host}/refreshtoken" - headers = {"Content-Type": "application/json"} if token is None: token = self.config.auth_token - response = self.c.post(url=url, headers=headers, json={"token": token}) - + response = self.c.post( + url=f"{self.rip_host}/refreshtoken", + headers={"Content-Type": "application/json"}, + json={"token": token}, + ) handle_server_errors(response) - - data = response.json()["data"] - self.config.auth_token = data["Token"] - - return self.config.auth_token + if set_token: + self.config.auth_token = response.json()["data"].get("token") + return response.json()["data"].get("token") def set_organization(self, organization_guid: str) -> None: """Set the organization GUID. @@ -151,7 +152,9 @@ def set_project(self, project_guid: str) -> None: """ self.config.set_project(project_guid) - def get_project(self, project_guid: str = None) -> Munch: + # -------------------Project------------------- + @handle_and_munchify_response + def get_project(self, project_guid: str = None, **kwargs) -> Munch: """Get a project by its GUID. If no project or organization GUID is provided, @@ -159,7 +162,7 @@ def get_project(self, project_guid: str = None) -> Munch: be picked from the current configuration. Args: - project_guid (str): Project GUID + project_guid (str): user provided project GUID or config project GUID Raises: ValueError: If organization_guid or project_guid is None @@ -167,23 +170,1252 @@ def get_project(self, project_guid: str = None) -> Munch: Returns: Munch: Project details as a Munch object. """ - headers = self.config.get_headers(with_project=False) - if project_guid is None: project_guid = self.config.project_guid if not project_guid: raise ValueError("project_guid is required") - v2api_host = self.config.hosts.get("v2api_host") + return self.c.get( + url=f"{self.v2api_host}/v2/projects/{project_guid}/", + headers=self.config.get_headers(with_project=False, **kwargs), + ) + + @handle_and_munchify_response + def list_projects( + self, + cont: int = 0, + limit: int = 50, + label_selector: list[str] = None, + status: list[str] = None, + organizations: list[str] = None, + **kwargs, + ) -> Munch: + """List all projects in an organization. + + Args: + cont (int, optional): Start index of projects. Defaults to 0. + limit (int, optional): Number of projects to list. Defaults to 50. + label_selector (list[str], optional): Define labelSelector to get projects from. Defaults to None. + status (list[str], optional): Define status to get projects from. Defaults to None. + organizations (list[str], optional): Define organizations to get projects from. Defaults to None. + + Returns: + Munch: List of projects as a Munch object. + """ - response = self.c.get( - url=f"{v2api_host}/v2/projects/{project_guid}/", - headers=headers, + return self.c.get( + url=f"{self.v2api_host}/v2/projects/", + headers=self.config.get_headers(with_project=False, **kwargs), + params={ + "continue": cont, + "limit": limit, + "status": status, + "organizations": organizations, + "labelSelector": label_selector, + }, ) - handle_server_errors(response) + @handle_and_munchify_response + def create_project(self, body: dict, **kwargs) -> Munch: + """Create a new project. + + Args: + body (object): Project details + + Returns: + Munch: Project details as a Munch object. + """ + + return self.c.post( + url=f"{self.v2api_host}/v2/projects/", + headers=self.config.get_headers(with_project=False, **kwargs), + json=body, + ) + + @handle_and_munchify_response + def update_project(self, body: dict, project_guid: str = None, **kwargs) -> Munch: + """Update a project by its GUID. + + Args: + body (object): Project details + project_guid (str, optional): Project GUID. Defaults to None. + + Returns: + Munch: Project details as a Munch object. + """ + + return self.c.put( + url=f"{self.v2api_host}/v2/projects/{project_guid}/", + headers=self.config.get_headers(with_project=False, **kwargs), + json=body, + ) + + @handle_and_munchify_response + def delete_project(self, project_guid: str, **kwargs) -> Munch: + """Delete a project by its GUID. + + Args: + project_guid (str): Project GUID + + Returns: + Munch: Project details as a Munch object. + """ + + return self.c.delete( + url=f"{self.v2api_host}/v2/projects/{project_guid}/", + headers=self.config.get_headers(with_project=False, **kwargs), + ) + + @handle_and_munchify_response + def update_project_owner( + self, body: dict, project_guid: str = None, **kwargs + ) -> Munch: + """Update the owner of a project by its GUID. + + Returns: + Munch: Project details as a Munch object. + """ + project_guid = project_guid or self.config.project_guid + + return self.c.put( + url=f"{self.v2api_host}/v2/projects/{project_guid}/owner/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + # -------------------Package------------------- + @handle_and_munchify_response + def list_packages( + self, + cont: int = 0, + limit: int = 50, + label_selector: list[str] = None, + name: str = None, + **kwargs, + ) -> Munch: + """List all packages in a project. + + Args: + cont (int, optional): Start index of packages. Defaults to 0. + limit (int, optional): Number of packages to list. Defaults to 50. + label_selector (list[str], optional): Define labelSelector to get packages from. Defaults to None. + name (str, optional): Define name to get packages from. Defaults to None. + + Returns: + Munch: List of packages as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/packages/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "labelSelector": label_selector, + "name": name, + }, + ) + + @handle_and_munchify_response + def create_package(self, body: dict, **kwargs) -> Munch: + """Create a new package. + + The Payload is the JSON format of the Package Manifest. + For a documented example, run the rio explain package command. + + Returns: + Munch: Package details as a Munch object. + """ + + return self.c.post( + url=f"{self.v2api_host}/v2/packages/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + def get_package(self, name: str, version: str = None, **kwargs) -> Munch: + """Get a package by its name. + + Args: + name (str): Package name + version (str, optional): Package version. Defaults to None. + + Returns: + Munch: Package details as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/packages/{name}/", + headers=self.config.get_headers(**kwargs), + params={"version": version}, + ) + + @handle_and_munchify_response + def delete_package(self, name: str, **kwargs) -> Munch: + """Delete a package by its name. + + Args: + name (str): Package name + + Returns: + Munch: Package details as a Munch object. + """ + + return self.c.delete( + url=f"{self.v2api_host}/v2/packages/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + # -------------------Deployment------------------- + @handle_and_munchify_response + def list_deployments( + self, + cont: int = 0, + limit: int = 50, + dependencies: bool = False, + device_name: str = None, + guids: list[str] = None, + label_selector: list[str] = None, + name: str = None, + names: list[str] = None, + package_name: str = None, + package_version: str = None, + phases: list[str] = None, + regions: list[str] = None, + **kwargs, + ) -> Munch: + """List all deployments in a project. + + Args: + cont (int, optional): Start index of deployments. Defaults to 0. + limit (int, optional): Number of deployments to list. Defaults to 50. + dependencies (bool, optional): Filter by dependencies. Defaults to False. + device_name (str, optional): Filter deployments by device name. Defaults to None. + guids (list[str], optional): Filter by GUIDs. Defaults to None. + label_selector (list[str], optional): Define labelSelector to get deployments from. Defaults to None. + name (str, optional): Define name to get deployments from. Defaults to None. + names (list[str], optional): Define names to get deployments from. Defaults to None. + package_name (str, optional): Filter by package name. Defaults to None. + package_version (str, optional): Filter by package version. Defaults to None. + phases (list[str], optional): Filter by phases. Available values : InProgress, Provisioning, Succeeded, FailedToUpdate, FailedToStart, Stopped. Defaults to None. + regions (list[str], optional): Filter by regions. Defaults to None. + + Returns: + Munch: List of deployments as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/deployments/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "dependencies": dependencies, + "deviceName": device_name, + "guids": guids, + "labelSelector": label_selector, + "name": name, + "names": names, + "packageName": package_name, + "packageVersion": package_version, + "phases": phases, + "regions": regions, + }, + ) + + @handle_and_munchify_response + def create_deployment(self, body: dict, **kwargs) -> Munch: + """Create a new deployment. + + Args: + body (object): Deployment details + + Returns: + Munch: Deployment details as a Munch object. + """ + + return self.c.post( + url=f"{self.v2api_host}/v2/deployments/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + def get_deployment(self, name: str, guid: str = None, **kwargs) -> Munch: + """Get a deployment by its name. + + Returns: + Munch: Deployment details as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/deployments/{name}/", + headers=self.config.get_headers(**kwargs), + params={"guid": guid}, + ) + + @handle_and_munchify_response + def update_deployment(self, name: str, body: dict, **kwargs) -> Munch: + """Update a deployment by its name. + + Returns: + Munch: Deployment details as a Munch object. + """ + + return self.c.put( + url=f"{self.v2api_host}/v2/deployments/{name}/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + def delete_deployment(self, name: str, **kwargs) -> Munch: + """Delete a deployment by its name. + + Returns: + Munch: Deployment details as a Munch object. + """ + + return self.c.delete( + url=f"{self.v2api_host}/v2/deployments/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + def get_deployment_graph(self, name: str, **kwargs) -> Munch: + """Get a deployment graph by its name. [Experimental] + + Returns: + Munch: Deployment graph as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/deployments/{name}/graph/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + def get_deployment_history(self, name: str, guid: str = None, **kwargs) -> Munch: + """Get a deployment history by its name. + + Returns: + Munch: Deployment history as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/deployments/{name}/history/", + headers=self.config.get_headers(**kwargs), + params={"guid": guid}, + ) + + # -------------------Disks------------------- + @handle_and_munchify_response + def list_disks( + self, + cont: int = 0, + label_selector: list[str] = None, + limit: int = 50, + names: list[str] = None, + regions: list[str] = None, + status: list[str] = None, + **kwargs, + ) -> Munch: + """List all disks in a project. + + Args: + cont (int, optional): Start index of disks. Defaults to 0. + label_selector (list[str], optional): Define labelSelector to get disks from. Defaults to None. + limit (int, optional): Number of disks to list. Defaults to 50. + names (list[str], optional): Define names to get disks from. Defaults to None. + regions (list[str], optional): Define regions to get disks from. Defaults to None. + status (list[str], optional): Define status to get disks from. Available values : Available, Bound, Released, Failed, Pending.Defaults to None. + + Returns: + Munch: List of disks as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/disks/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "labelSelector": label_selector, + "names": names, + "regions": regions, + "status": status, + }, + ) + + @handle_and_munchify_response + def get_disk(self, name: str, **kwargs) -> Munch: + """Get a disk by its name. + + Args: + name (str): Disk name + + Returns: + Munch: Disk details as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/disks/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + def create_disk(self, body: str, **kwargs) -> Munch: + """Create a new disk. + + Returns: + Munch: Disk details as a Munch object. + """ + + return self.c.post( + url=f"{self.v2api_host}/v2/disks/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + def delete_disk(self, name: str, **kwargs) -> Munch: + """Delete a disk by its name. + + Args: + name (str): Disk name + + Returns: + Munch: Disk details as a Munch object. + """ + + return self.c.delete( + url=f"{self.v2api_host}/v2/disks/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + # -------------------Static Routes------------------- + @handle_and_munchify_response + def list_staticroutes( + self, + cont: int = 0, + limit: int = 50, + guids: list[str] = None, + label_selector: list[str] = None, + names: list[str] = None, + regions: list[str] = None, + **kwargs, + ) -> Munch: + """List all static routes in a project. + + Args: + cont (int, optional): Start index of static routes. Defaults to 0. + limit (int, optional): Number of static routes to list. Defaults to 50. + guids (list[str], optional): Define guids to get static routes from. Defaults to None. + label_selector (list[str], optional): Define labelSelector to get static routes from. Defaults to None. + names (list[str], optional): Define names to get static routes from. Defaults to None. + regions (list[str], optional): Define regions to get static routes from. Defaults to None. + + Returns: + Munch: List of static routes as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/staticroutes/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "guids": guids, + "labelSelector": label_selector, + "names": names, + "regions": regions, + }, + ) + + @handle_and_munchify_response + def create_staticroute(self, body: dict, **kwargs) -> Munch: + """Create a new static route. + + Returns: + Munch: Static route details as a Munch object. + """ + + return self.c.post( + url=f"{self.v2api_host}/v2/staticroutes/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + def get_staticroute(self, name: str, **kwargs) -> Munch: + """Get a static route by its name. + + Args: + name (str): Static route name + + Returns: + Munch: Static route details as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/staticroutes/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + def update_staticroute(self, name: str, body: dict, **kwargs) -> Munch: + """Update a static route by its name. + + Args: + name (str): Static route name + body (dict): Update details + + Returns: + Munch: Static route details as a Munch object. + """ + + return self.c.put( + url=f"{self.v2api_host}/v2/staticroutes/{name}/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + def delete_staticroute(self, name: str, **kwargs) -> Munch: + """Delete a static route by its name. + + Args: + name (str): Static route name + + Returns: + Munch: Static route details as a Munch object. + """ + + return self.c.delete( + url=f"{self.v2api_host}/v2/staticroutes/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + # -------------------Networks------------------- + @handle_and_munchify_response + def list_networks( + self, + cont: int = 0, + limit: int = 50, + device_name: str = None, + label_selector: list[str] = None, + names: list[str] = None, + network_type: str = None, + phases: list[str] = None, + regions: list[str] = None, + status: list[str] = None, + **kwargs, + ) -> Munch: + """List all networks in a project. + + Args: + cont (int, optional): Start index of networks. Defaults to 0. + limit (int, optional): Number of networks to list. Defaults to 50. + device_name (str, optional): Filter networks by device name. Defaults to None. + label_selector (list[str], optional): Define labelSelector to get networks from. Defaults to None. + names (list[str], optional): Define names to get networks from. Defaults to None. + network_type (str, optional): Define network type to get networks from. Defaults to None. + phases (list[str], optional): Define phases to get networks from. Available values : InProgress, Provisioning, Succeeded, FailedToUpdate, FailedToStart, Stopped. Defaults to None. + regions (list[str], optional): Define regions to get networks from. Defaults to None. + status (list[str], optional): Define status to get networks from. Available values : Running, Pending, Error, Unknown, Stopped. Defaults to None. + + Returns: + Munch: List of networks as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/networks/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "deviceName": device_name, + "labelSelector": label_selector, + "names": names, + "networkType": network_type, + "phases": phases, + "regions": regions, + "status": status, + }, + ) + + @handle_and_munchify_response + def create_network(self, body: dict, **kwargs) -> Munch: + """Create a new network. + + Returns: + Munch: Network details as a Munch object. + """ + + return self.c.post( + url=f"{self.v2api_host}/v2/networks/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + def get_network(self, name: str, **kwargs) -> Munch: + """Get a network by its name. + + Args: + name (str): Network name + + Returns: + Munch: Network details as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/networks/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + def delete_network(self, name: str, **kwargs) -> Munch: + """Delete a network by its name. + + Args: + name (str): Network name + + Returns: + Munch: Network details as a Munch object. + """ + + return self.c.delete( + url=f"{self.v2api_host}/v2/networks/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + # -------------------Secrets------------------- + @handle_and_munchify_response + def list_secrets( + self, + cont: int = 0, + limit: int = 50, + label_selector: list[str] = None, + names: list[str] = None, + regions: list[str] = None, + **kwargs, + ) -> Munch: + """List all secrets in a project. + + Args: + cont (int, optional): Start index of secrets. Defaults to 0. + limit (int, optional): Number of secrets to list. Defaults to 50. + label_selector (list[str], optional): Define labelSelector to get secrets from. Defaults to None. + names (list[str], optional): Define names to get secrets from. Defaults to None. + regions (list[str], optional): Define regions to get secrets from. Defaults to None. + + Returns: + Munch: List of secrets as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/secrets/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "labelSelector": label_selector, + "names": names, + "regions": regions, + }, + ) + + @handle_and_munchify_response + def create_secret(self, body: dict, **kwargs) -> Munch: + """Create a new secret. + + Returns: + Munch: Secret details as a Munch object. + """ + + return self.c.post( + url=f"{self.v2api_host}/v2/secrets/", + headers=self.config.get_headers(*kwargs), + json=body, + ) + + @handle_and_munchify_response + def get_secret(self, name: str, **kwargs) -> Munch: + """Get a secret by its name. + + Args: + name (str): Secret name + + Returns: + Munch: Secret details as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/secrets/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + def update_secret(self, name: str, body: dict, **kwargs) -> Munch: + """Update a secret by its name. + + Args: + name (str): Secret name + body (dict): Update details + + Returns: + Munch: Secret details as a Munch object. + """ + + return self.c.put( + url=f"{self.v2api_host}/v2/secrets/{name}/", + headers=self.config.get_headers(**kwargs), + json=body, + ) + + @handle_and_munchify_response + def delete_secret(self, name: str, **kwargs) -> Munch: + """Delete a secret by its name. + + Args: + name (str): Secret name + + Returns: + Munch: Secret details as a Munch object. + """ + + return self.c.delete( + url=f"{self.v2api_host}/v2/secrets/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + # -------------------Config Trees------------------- + @handle_and_munchify_response + def list_configtrees( + self, + cont: int = 0, + limit: int = 50, + label_selector: list[str] = None, + with_project: bool = True, + **kwargs, + ) -> Munch: + """List all config trees in a project. + + Args: + cont (int, optional): Start index of config trees. Defaults to 0. + limit (int, optional): Number of config trees to list. Defaults to 50. + label_selector (list[str], optional): Define labelSelector to get config trees from. Defaults to None. + with_project (bool, optional): Include project. Defaults to True. + + Returns: + Munch: List of config trees as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/configtrees/", + headers=self.config.get_headers(with_project=with_project, **kwargs), + params={ + "continue": cont, + "limit": limit, + "labelSelector": label_selector, + }, + ) + + @handle_and_munchify_response + def create_configtree(self, body: dict, with_project: bool = True, **kwargs) -> Munch: + """Create a new config tree. + + Args: + body (object): Config tree details + with_project (bool, optional): Work in the project scope. Defaults to True. + + Returns: + Munch: Config tree details as a Munch object. + """ + + return self.c.post( + url=f"{self.v2api_host}/v2/configtrees/", + headers=self.config.get_headers(with_project=with_project, **kwargs), + json=body, + ) + + @handle_and_munchify_response + def get_configtree( + self, + name: str, + content_types: list[str] = None, + include_data: bool = False, + key_prefixes: list[str] = None, + revision: str = None, + with_project: bool = True, + **kwargs, + ) -> Munch: + """Get a config tree by its name. + + Args: + name (str): Config tree name + content_types (list[str], optional): Define contentTypes to get config tree from. Defaults to None. + include_data (bool, optional): Include data. Defaults to False. + key_prefixes (list[str], optional): Define keyPrefixes to get config tree from. Defaults to None. + revision (str, optional): Define revision to get config tree from. Defaults to None. + with_project (bool, optional): Work in the project scope. Defaults to True. + + Returns: + Munch: Config tree details as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/configtrees/{name}/", + headers=self.config.get_headers(with_project=with_project, **kwargs), + params={ + "contentTypes": content_types, + "includeData": include_data, + "keyPrefixes": key_prefixes, + "revision": revision, + }, + ) + + @handle_and_munchify_response + def set_configtree_revision( + self, name: str, configtree: dict, project_guid: str = None, **kwargs + ) -> Munch: + """Set a config tree revision. + + Args: + name (str): Config tree name + configtree (object): Config tree details + project_guid (str, optional): Project GUID. Defaults to None. + + Returns: + Munch: Config tree details as a Munch object. + """ + + return self.c.put( + url=f"{self.v2api_host}/v2/configtrees/{name}/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + json=configtree, + ) - print(response.headers) + @handle_and_munchify_response + def update_configtree( + self, name: str, body: dict, with_project: bool = True, **kwargs + ) -> Munch: + """Update a config tree by its name. + + Args: + name (str): Config tree name + body (dict): Update details + with_project (bool, optional): Work in the project scope. Defaults to True. + + Returns: + Munch: Config tree details as a Munch object. + """ + + return self.c.put( + url=f"{self.v2api_host}/v2/configtrees/{name}/", + headers=self.config.get_headers(with_project=with_project, **kwargs), + json=body, + ) + + @handle_and_munchify_response + def delete_configtree(self, name: str, **kwargs) -> Munch: + """Delete a config tree by its name. + + Args: + name (str): Config tree name + + Returns: + Munch: Config tree details as a Munch object. + """ + + return self.c.delete( + url=f"{self.v2api_host}/v2/configtrees/{name}/", + headers=self.config.get_headers(**kwargs), + ) + + @handle_and_munchify_response + def list_revisions( + self, + tree_name: str, + cont: int = 0, + limit: int = 50, + committed: bool = False, + label_selector: list[str] = None, + **kwargs, + ) -> Munch: + """List all revisions of a config tree. + + Args: + tree_name (str): Config tree name + cont (int, optional): Continue param . Defaults to 0. + limit (int, optional): Limit param . Defaults to 50. + committed (bool, optional): Committed. Defaults to False. + label_selector (list[str], optional): Define labelSelector to get revisions from. Defaults to None. + + Returns: + Munch: List of revisions as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/configtrees/{tree_name}/revisions/", + headers=self.config.get_headers(**kwargs), + params={ + "continue": cont, + "limit": limit, + "committed": committed, + "labelSelector": label_selector, + }, + ) + + @handle_and_munchify_response + def create_revision( + self, name: str, body: dict, project_guid: str = None, **kwargs + ) -> Munch: + """Create a new revision. + + Args: + name (str): Config tree name + body (object): Revision details + project_guid (str): Project GUID (optional) + + Returns: + Munch: Revision details as a Munch object. + """ - return munchify(response.json()) + return self.c.post( + url=f"{self.v2api_host}/v2/configtrees/{name}/revisions/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + json=body, + ) + + @handle_and_munchify_response + def put_keys_in_revision( + self, name: str, revision_id: str, config_values: dict, **kwargs + ) -> Munch: + """Put keys in a revision. + + Args: + name (str): Config tree name + revision_id (str): Config tree revision ID + config_values (dict): Config values + + Returns: + Munch: Revision details as a Munch object. + """ + + return self.c.put( + url=f"{self.v2api_host}/v2/configtrees/{name}/revisions/{revision_id}/keys/", + headers=self.config.get_headers(**kwargs), + json=config_values, + ) + + @handle_and_munchify_response + def commit_revision( + self, + tree_name: str, + revision_id: str, + author: str = None, + message: str = None, + project_guid: str = None, + **kwargs, + ) -> Munch: + """Commit a revision. + + Args: + tree_name (str): Config tree name + revision_id (str): Config tree revision ID + author (str, optional): Revision Author. Defaults to None. + message (str, optional): Revision Message. Defaults to None. + project_guid (str, optional): Project GUID. Defaults to None. + + Returns: + Munch: Revision details as a Munch object. + """ + config_tree_revision = { + "author": author, + "message": message, + } + + return self.c.patch( + url=f"{self.v2api_host}/v2/configtrees/{tree_name}/revisions/{revision_id}/commit/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + json=config_tree_revision, + ) + + @handle_and_munchify_response + def get_key_in_revision( + self, + tree_name: str, + revision_id: str, + key: str, + project_guid: str = None, + **kwargs, + ) -> Munch: + """Get a key in a revision. + + Args: + tree_name (str): Config tree name + revision_id (str): Config tree revision ID + key (str): Key + project_guid (str, optional): Project GUID. Defaults to None. + + Returns: + Munch: Key details as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/configtrees/{tree_name}/revisions/{revision_id}/{key}/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + ) + + @handle_and_munchify_response + def put_key_in_revision( + self, + tree_name: str, + revision_id: str, + key: str, + project_guid: str = None, + **kwargs, + ) -> Munch: + """Put a key in a revision. + + Args: + tree_name (str): Config tree name + revision_id (str): Config tree revision ID + key (str): Key + project_guid (str, optional): Project GUID. Defaults to None. + + Returns: + Munch: Key details as a Munch object. + """ + + return self.c.put( + url=f"{self.v2api_host}/v2/configtrees/{tree_name}/revisions/{revision_id}/{key}/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + ) + + @handle_and_munchify_response + def delete_key_in_revision( + self, + tree_name: str, + revision_id: str, + key: str, + project_guid: str = None, + **kwargs, + ) -> Munch: + """Delete a key in a revision. + + Args: + tree_name (str): Config tree name + revision_id (str): Config tree revision ID + key (str): Key + project_guid (str, optional): Project GUID. Defaults to None. + + Returns: + Munch: Key details as a Munch object. + """ + + return self.c.delete( + url=f"{self.v2api_host}/v2/configtrees/{tree_name}/revisions/{revision_id}/{key}/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + ) + + @handle_and_munchify_response + def rename_key_in_revision( + self, + tree_name: str, + revision_id: str, + key: str, + config_key_rename: dict, + project_guid: str = None, + **kwargs, + ) -> Munch: + """Rename a key in a revision. + + Args: + tree_name (str): Config tree name + revision_id (str): Config tree revision ID + key (str): Key + config_key_rename (object): Key rename details + project_guid (str, optional): Project GUID. Defaults to None. + + Returns: + Munch: Key details as a Munch object. + """ + + return self.c.patch( + url=f"{self.v2api_host}/v2/configtrees/{tree_name}/revisions/{revision_id}/{key}/", + headers=self.config.get_headers(project_guid=project_guid, **kwargs), + json=config_key_rename, + ) + + # Managed Service API + @handle_and_munchify_response + def list_providers(self) -> Munch: + """List all providers. + + Returns: + Munch: List of providers as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/managedservices/providers/", + headers=self.config.get_headers(with_project=False), + ) + + @handle_and_munchify_response + def list_instances( + self, + cont: int = 0, + limit: int = 50, + label_selector: list[str] = None, + providers: list[str] = None, + ): + """List all instances in a project. + + Args: + cont (int, optional): Start index of instances. Defaults to 0. + limit (int, optional): Number of instances to list. Defaults to 50. + label_selector (list[str], optional): Define labelSelector to get instances from. Defaults to None. + providers (list[str], optional): Define providers to get instances from. Defaults to None. + + Returns: + Munch: List of instances as a Munch object. + """ + return self.c.get( + url=f"{self.v2api_host}/v2/managedservices/", + headers=self.config.get_headers(), + params={ + "continue": cont, + "limit": limit, + "labelSelector": label_selector, + "providers": providers, + }, + ) + + @handle_and_munchify_response + def get_instance(self, name: str) -> Munch: + """Get an instance by its name. + + Args: + name (str): Instance name + + Returns: + Munch: Instance details as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/managedservices/{name}/", + headers=self.config.get_headers(), + ) + + @handle_and_munchify_response + def create_instance(self, body: dict) -> Munch: + """Create a new instance. + + Returns: + Munch: Instance details as a Munch object. + """ + + return self.c.post( + url=f"{self.v2api_host}/v2/managedservices/", + headers=self.config.get_headers(), + json=body, + ) + + @handle_and_munchify_response + def delete_instance(self, name: str) -> Munch: + """Delete an instance. + + Returns: + Munch: Instance details as a Munch object. + """ + + return self.c.delete( + url=f"{self.v2api_host}/v2/managedservices/{name}/", + headers=self.config.get_headers(), + ) + + @handle_and_munchify_response + def list_instance_bindings( + self, + instance_name: str, + cont: int = 0, + limit: int = 50, + label_selector: list[str] = None, + ): + """List all instance bindings in a project. + + Args: + instance_name (str): Instance name. + cont (int, optional): Start index of instance bindings. Defaults to 0. + limit (int, optional): Number of instance bindings to list. Defaults to 50. + label_selector (list[str], optional): Define labelSelector to get instance bindings from. Defaults to None. + + Returns: + Munch: List of instance bindings as a Munch object. + """ + return self.c.get( + url=f"{self.v2api_host}/v2/managedservices/{instance_name}/bindings/", + headers=self.config.get_headers(), + params={ + "continue": cont, + "limit": limit, + "labelSelector": label_selector, + }, + ) + + @handle_and_munchify_response + def create_instance_binding(self, instance_name: str, body: dict) -> Munch: + """Create a new instance binding. + + Args: + instance_name (str): Instance name. + body (object): Instance binding details. + + Returns: + Munch: Instance binding details as a Munch object. + """ + + return self.c.post( + url=f"{self.v2api_host}/v2/managedservices/{instance_name}/bindings/", + headers=self.config.get_headers(), + json=body, + ) + + @handle_and_munchify_response + def get_instance_binding(self, instance_name: str, name: str) -> Munch: + """Get an instance binding by its name. + + Args: + instance_name (str): Instance name. + name (str): Instance binding name. + + Returns: + Munch: Instance binding details as a Munch object. + """ + + return self.c.get( + url=f"{self.v2api_host}/v2/managedservices/{instance_name}/bindings/{name}/", + headers=self.config.get_headers(), + ) + + @handle_and_munchify_response + def delete_instance_binding(self, instance_name: str, name: str) -> Munch: + """Delete an instance binding. + + Args: + instance_name (str): Instance name. + name (str): Instance binding name. + + Returns: + Munch: Instance binding details as a Munch object. + """ + + return self.c.delete( + url=f"{self.v2api_host}/v2/managedservices/{instance_name}/bindings/{name}/", + headers=self.config.get_headers(), + ) diff --git a/rapyuta_io_sdk_v2/config.py b/rapyuta_io_sdk_v2/config.py index b5c6775..bd24e21 100644 --- a/rapyuta_io_sdk_v2/config.py +++ b/rapyuta_io_sdk_v2/config.py @@ -13,16 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. import json -from dataclasses import dataclass import os +from dataclasses import dataclass from rapyuta_io_sdk_v2.constants import ( APP_NAME, NAMED_ENVIRONMENTS, STAGING_ENVIRONMENT_SUBDOMAIN, ) -from rapyuta_io_sdk_v2.utils import get_default_app_dir from rapyuta_io_sdk_v2.exceptions import ValidationError +from rapyuta_io_sdk_v2.utils import get_default_app_dir @dataclass @@ -69,22 +69,31 @@ def from_file(cls, file_path: str = None) -> "Configuration": auth_token=data.get("auth_token"), ) - def get_headers(self, with_project: bool = True) -> dict: + def get_headers( + self, + organization_guid: str = None, + with_project: bool = True, + project_guid: str = None, + ) -> dict: """Get the headers for the configuration. Args: - with_project (bool): Include project guid in headers. Default is True. + organization_guid (str): The organization guid. + with_project (bool): Whether to include the project headers. + project_guid (str): The project guid. Returns: dict: Headers for the configuration. """ headers = dict(Authorization=f"Bearer {self.auth_token}") - if self.organization_guid: - headers["organizationguid"] = self.organization_guid + organization_guid = organization_guid or self.organization_guid + if organization_guid: + headers["organizationguid"] = organization_guid - if with_project and self.project_guid is not None: - headers["project"] = self.project_guid + project_guid = project_guid or self.project_guid + if with_project and project_guid is not None: + headers["project"] = project_guid custom_client_request_id = os.getenv("REQUEST_ID") if custom_client_request_id: diff --git a/rapyuta_io_sdk_v2/exceptions.py b/rapyuta_io_sdk_v2/exceptions.py index aff52cf..3c233fa 100644 --- a/rapyuta_io_sdk_v2/exceptions.py +++ b/rapyuta_io_sdk_v2/exceptions.py @@ -44,3 +44,51 @@ class ValidationError(Exception): def __init__(self, message=None): self.message = message super().__init__(self.message) + + +class MethodNotAllowedError(Exception): + def __init__(self, message="method not allowed"): + self.message = message + super().__init__(self.message) + + +class InternalServerError(Exception): + def __init__(self, message="internal server error"): + self.message = message + super().__init__(self.message) + + +class NotImplementedError(Exception): + def __init__(self, message="not implemented"): + self.message = message + super().__init__(self.message) + + +class BadGatewayError(Exception): + def __init__(self, message="bad gateway"): + self.message = message + super().__init__(self.message) + + +class UnauthorizedAccessError(Exception): + def __init__(self, message="unauthorized permission access"): + self.message = message + super().__init__(self.message) + + +class GatewayTimeoutError(Exception): + def __init__(self, message="gateway timeout"): + self.message = message + super().__init__(self.message) + + +class ServiceUnavailableError(Exception): + def __init__(self, message="service unavailable"): + self.message = message + super().__init__(self.message) + + +class UnknownError(Exception): + def __init__(self, message="unknown error"): + self.message = message + super().__init__(self.message) diff --git a/rapyuta_io_sdk_v2/utils.py b/rapyuta_io_sdk_v2/utils.py index 436d6b7..e16fcd9 100644 --- a/rapyuta_io_sdk_v2/utils.py +++ b/rapyuta_io_sdk_v2/utils.py @@ -13,14 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. # from rapyuta_io_sdk_v2.config import Configuration -import http +import asyncio import json import os import sys +import typing +from functools import wraps import httpx +from munch import Munch, munchify -from rapyuta_io_sdk_v2.exceptions import HttpAlreadyExistsError, HttpNotFoundError +import rapyuta_io_sdk_v2.exceptions as exceptions def handle_server_errors(response: httpx.Response): @@ -36,36 +39,36 @@ def handle_server_errors(response: httpx.Response): err = response.text # 404 Not Found - if status_code == http.HTTPStatus.NOT_FOUND: - raise HttpNotFoundError(err) + if status_code == httpx.codes.NOT_FOUND: + raise exceptions.HttpNotFoundError(err) # 405 Method Not Allowed - if status_code == http.HTTPStatus.METHOD_NOT_ALLOWED: - raise Exception("method not allowed") + if status_code == httpx.codes.METHOD_NOT_ALLOWED: + raise exceptions.MethodNotAllowedError(err) # 409 Conflict - if status_code == http.HTTPStatus.CONFLICT: - raise HttpAlreadyExistsError() + if status_code == httpx.codes.CONFLICT: + raise exceptions.HttpAlreadyExistsError(err) # 500 Internal Server Error - if status_code == http.HTTPStatus.INTERNAL_SERVER_ERROR: - raise Exception("internal server error") + if status_code == httpx.codes.INTERNAL_SERVER_ERROR: + raise exceptions.InternalServerError(err) # 501 Not Implemented - if status_code == http.HTTPStatus.NOT_IMPLEMENTED: - raise Exception("not implemented") + if status_code == httpx.codes.NOT_IMPLEMENTED: + raise exceptions.NotImplementedError(err) # 502 Bad Gateway - if status_code == http.HTTPStatus.BAD_GATEWAY: - raise Exception("bad gateway") + if status_code == httpx.codes.BAD_GATEWAY: + raise exceptions.BadGatewayError(err) # 503 Service Unavailable - if status_code == http.HTTPStatus.SERVICE_UNAVAILABLE: - raise Exception("service unavailable") + if status_code == httpx.codes.SERVICE_UNAVAILABLE: + raise exceptions.ServiceUnavailableError(err) # 504 Gateway Timeout - if status_code == http.HTTPStatus.GATEWAY_TIMEOUT: - raise Exception("gateway timeout") + if status_code == httpx.codes.GATEWAY_TIMEOUT: + raise exceptions.GatewayTimeoutError(err) # 401 UnAuthorize Access - if status_code == http.HTTPStatus.UNAUTHORIZED: - raise Exception("unauthorized permission access") + if status_code == httpx.codes.UNAUTHORIZED: + raise exceptions.UnauthorizedAccessError(err) # Anything else that is not known if status_code > 504: - raise Exception("unknown server error") + raise exceptions.UnknownError(err) def get_default_app_dir(app_name: str) -> str: @@ -85,3 +88,99 @@ def get_default_app_dir(app_name: str) -> str: # On Linux and other Unix-like systems xdg_config_home = os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")) return os.path.join(xdg_config_home, app_name) + + +# Decorator to handle server errors and munchify response +def handle_and_munchify_response(func) -> typing.Callable: + """Decorator to handle server errors and munchify response. + + Args: + func (callable): The function to decorate. + """ + + @wraps(func) + async def async_wrapper(*args, **kwargs) -> Munch: + response = await func(*args, **kwargs) + handle_server_errors(response) + return munchify(response.json()) + + @wraps(func) + def sync_wrapper(*args, **kwargs) -> Munch: + response = func(*args, **kwargs) + handle_server_errors(response) + return munchify(response.json()) + + if asyncio.iscoroutinefunction(func): + return async_wrapper + + return sync_wrapper + + +def walk_pages( + func: typing.Callable, + *args, + limit: int = 50, + cont: int = 0, + **kwargs, +) -> typing.Generator: + """A generator function to paginate through list API results. + + Args: + func (callable): The API function to call, must accept `cont` and `limit` as arguments. + *args: Positional arguments to pass to the API function. + limit (int, optional): Maximum number of items to return. Defaults to 50. + cont (int, optional): Initial continuation token. Defaults to 0. + **kwargs: Additional keyword arguments to pass to the API function. + + Yields: + Munch: Each item from the API response. + """ + while True: + data = func(cont, limit, *args, **kwargs) + + items = data.get("items", []) + if not items: + break + + for item in items: + yield munchify(item) + + # Update `cont` for the next page + cont = data.get("metadata", {}).get("continue") + if cont is None: + break + + +async def walk_pages_async( + func: typing.Callable, + *args, + limit: int = 50, + cont: int = 0, + **kwargs, +) -> typing.AsyncGenerator: + """A generator function to paginate through list API results. + + Args: + func (callable): The API function to call, must accept `cont` and `limit` as arguments. + *args: Positional arguments to pass to the API function. + limit (int, optional): Maximum number of items to return. Defaults to 50. + cont (int, optional): Initial continuation token. Defaults to 0. + **kwargs: Additional keyword arguments to pass to the API function. + + Yields: + Munch: Each item from the API response. + """ + while True: + data = await func(cont, limit, *args, **kwargs) + + items = data.get("items", []) + if not items: + break + + for item in items: + yield munchify(item) + + # Update `cont` for the next page + cont = data.get("metadata", {}).get("continue") + if cont is None: + break diff --git a/tests/async_tests/test_configtree_async.py b/tests/async_tests/test_configtree_async.py new file mode 100644 index 0000000..8976496 --- /dev/null +++ b/tests/async_tests/test_configtree_async.py @@ -0,0 +1,362 @@ +import httpx +import pytest +import pytest_asyncio # noqa: F401 +from munch import Munch +from asyncmock import AsyncMock + +from tests.data.mock_data import configtree_body # noqa: F401 +from tests.utils.fixtures import async_client as client # noqa: F401 + + +@pytest.mark.asyncio +async def test_list_configtrees_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-configtree", "guid": "mock_configtree_guid"}], + }, + ) + + # Call the list_configtrees method + response = await client.list_configtrees() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [ + {"name": "test-configtree", "guid": "mock_configtree_guid"} + ] + + +@pytest.mark.asyncio +async def test_list_configtrees_bad_gateway(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=502, + json={"error": "bad gateway"}, + ) + + # Call the list_configtrees method + with pytest.raises(Exception) as exc: + await client.list_configtrees() + + assert str(exc.value) == "bad gateway" + + +@pytest.mark.asyncio +async def test_create_configtree_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.post method + mock_post = mocker.patch("httpx.AsyncClient.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": {"guid": "test_configtree_guid", "name": "test_configtree"}, + }, + ) + + # Call the create_configtree method + response = await client.create_configtree(configtree_body) + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "test_configtree_guid" + + +@pytest.mark.asyncio +async def test_create_configtree_service_unavailable(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.post method + mock_post = mocker.patch("httpx.AsyncClient.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=503, + json={"error": "service unavailable"}, + ) + + # Call the create_configtree method + with pytest.raises(Exception) as exc: + await client.create_configtree(configtree_body) + + assert str(exc.value) == "service unavailable" + + +@pytest.mark.asyncio +async def test_get_configtree_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_configtree_guid", "name": "test_configtree"}, + }, + ) + + # Call the get_configtree method + response = await client.get_configtree(name="mock_configtree_name") + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_configtree_guid" + assert response.metadata.name == "test_configtree" + + +@pytest.mark.asyncio +async def test_set_configtree_revision_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.put method + mock_put = mocker.patch("httpx.AsyncClient.put") + + # Set up the mock response + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_configtree_guid", "name": "test_configtree"}, + }, + ) + + # Call the set_configtree_revision method + response = await client.set_configtree_revision( + name="mock_configtree_name", configtree=configtree_body + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_configtree_guid" + assert response.metadata.name == "test_configtree" + + +@pytest.mark.asyncio +async def test_update_configtree_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.put method + mock_put = mocker.patch("httpx.AsyncClient.put") + + # Set up the mock response + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_configtree_guid", "name": "test_configtree"}, + }, + ) + + # Call the update_configtree method + response = await client.update_configtree( + name="mock_configtree_name", body=configtree_body + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_configtree_guid" + assert response.metadata.name == "test_configtree" + + +@pytest.mark.asyncio +async def test_delete_configtree_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.delete method + mock_delete = mocker.patch("httpx.AsyncClient.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_configtree method + response = await client.delete_configtree(name="mock_configtree_name") + + # Validate the response + assert response["success"] is True + + +@pytest.mark.asyncio +async def test_list_revisions_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-configtree", "guid": "mock_configtree_guid"}], + }, + ) + + # Call the list_revisions method + response = await client.list_revisions(tree_name="mock_configtree_name") + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [ + {"name": "test-configtree", "guid": "mock_configtree_guid"} + ] + + +@pytest.mark.asyncio +async def test_create_revision_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.post method + mock_post = mocker.patch("httpx.AsyncClient.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": {"guid": "test_revision_guid", "name": "test_revision"}, + }, + ) + + # Call the create_revision method + response = await client.create_revision( + name="mock_configtree_name", body=configtree_body + ) + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "test_revision_guid" + + +@pytest.mark.asyncio +async def test_put_keys_in_revision_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.put method + mock_put = mocker.patch("httpx.AsyncClient.put") + + # Set up the mock response + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_revision_guid", "name": "test_revision"}, + }, + ) + + # Call the put_keys_in_revision method + response = await client.put_keys_in_revision( + name="mock_configtree_name", + revision_id="mock_revision_id", + config_values=["mock_value1", "mock_value2"], + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_revision_guid" + assert response.metadata.name == "test_revision" + + +@pytest.mark.asyncio +async def test_commit_revision_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.put method + mock_patch = mocker.patch("httpx.AsyncClient.patch") + + # Set up the mock response + mock_patch.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_revision_guid", "name": "test_revision"}, + }, + ) + + # Call the commit_revision method + response = await client.commit_revision( + tree_name="mock_configtree_name", + revision_id="mock_revision_id", + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_revision_guid" + assert response.metadata.name == "test_revision" + + +@pytest.mark.asyncio +async def test_get_key_in_revision(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_revision_guid", "name": "test_revision"}, + }, + ) + + # Call the get_key_in_revision method + response = await client.get_key_in_revision( + tree_name="mock_configtree_name", revision_id="mock_revision_id", key="mock_key" + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_revision_guid" + assert response.metadata.name == "test_revision" + + +@pytest.mark.asyncio +async def test_put_key_in_revision_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.put method + mock_put = mocker.patch("httpx.AsyncClient.put") + + # Set up the mock response + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_revision_guid", "name": "test_revision"}, + }, + ) + + # Call the put_key_in_revision method + response = await client.put_key_in_revision( + tree_name="mock_configtree_name", revision_id="mock_revision_id", key="mock_key" + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_revision_guid" + assert response.metadata.name == "test_revision" + + +@pytest.mark.asyncio +async def test_delete_key_in_revision_success(client, mocker: AsyncMock): # noqa: F811 + mock_delete = mocker.patch("httpx.AsyncClient.delete") + + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + response = await client.delete_key_in_revision( + tree_name="mock_configtree_name", revision_id="mock_revision_id", key="mock_key" + ) + + assert response["success"] is True + + +@pytest.mark.asyncio +async def test_rename_key_in_revision_success(client, mocker: AsyncMock): # noqa: F811 + mock_patch = mocker.patch("httpx.AsyncClient.patch") + + mock_patch.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_revision_guid", "name": "test_revision"}, + }, + ) + + response = await client.rename_key_in_revision( + tree_name="mock_configtree_name", + revision_id="mock_revision_id", + key="mock_key", + config_key_rename={"metadata": {"name": "test_key"}}, + ) + + assert isinstance(response, Munch) + assert response.metadata.guid == "test_revision_guid" + assert response.metadata.name == "test_revision" diff --git a/tests/async_tests/test_deployment_async.py b/tests/async_tests/test_deployment_async.py new file mode 100644 index 0000000..f124148 --- /dev/null +++ b/tests/async_tests/test_deployment_async.py @@ -0,0 +1,153 @@ +import httpx +import pytest +import pytest_asyncio # noqa: F401 +from munch import Munch +from asyncmock import AsyncMock + +from tests.data.mock_data import deployment_body # noqa: F401 +from tests.utils.fixtures import async_client as client # noqa: F401 + + +@pytest.mark.asyncio +async def test_list_deployments_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get") method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-deployment", "guid": "mock_deployment_guid"}], + }, + ) + + # Call the list_deployments method + response = await client.list_deployments() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [ + {"name": "test-deployment", "guid": "mock_deployment_guid"} + ] + + +@pytest.mark.asyncio +async def test_list_deployments_not_found(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get") method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + with pytest.raises(Exception) as exc: + await client.list_deployments() + + assert str(exc.value) == "not found" + + +@pytest.mark.asyncio +async def test_get_deployment_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get") method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Deployment", + "metadata": {"guid": "test_deployment_guid", "name": "test_deployment"}, + }, + ) + + # Call the get_deployment method + response = await client.get_deployment(name="mock_deployment_name") + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "test_deployment_guid" + + +@pytest.mark.asyncio +async def test_get_deployment_not_found(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get") method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "deployment not found"}, + ) + + # Call the get_deployment method + with pytest.raises(Exception) as exc: + await client.get_deployment(name="mock_deployment_name") + + assert str(exc.value) == "deployment not found" + + +@pytest.mark.asyncio +async def test_create_deployment_success(client, deployment_body, mocker: AsyncMock): # noqa: F811 + mock_post = mocker.patch("httpx.AsyncClient.post") + + mock_post.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Deployment", + "metadata": {"guid": "test_deployment_guid", "name": "test_deployment"}, + }, + ) + + response = await client.create_deployment(body=deployment_body) + + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "test_deployment_guid" + + +@pytest.mark.asyncio +async def test_create_deployment_unauthorized(client, deployment_body, mocker: AsyncMock): # noqa: F811 + mock_post = mocker.patch("httpx.AsyncClient.post") + + mock_post.return_value = httpx.Response( + status_code=401, + json={"error": "unauthorized"}, + ) + + with pytest.raises(Exception) as exc: + await client.create_deployment(body=deployment_body) + + assert str(exc.value) == "unauthorized" + + +@pytest.mark.asyncio +async def test_update_deployment_success(client, deployment_body, mocker: AsyncMock): # noqa: F811 + mock_put = mocker.patch("httpx.AsyncClient.put") + + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Deployment", + "metadata": {"guid": "test_deployment_guid", "name": "test_deployment"}, + }, + ) + + response = await client.update_deployment( + name="mock_deployment_name", body=deployment_body + ) + + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "test_deployment_guid" + + +@pytest.mark.asyncio +async def test_delete_deployment_success(client, mocker: AsyncMock): # noqa: F811 + mock_delete = mocker.patch("httpx.AsyncClient.delete") + + mock_delete.return_value = httpx.Response(status_code=204, json={"success": True}) + + response = await client.delete_deployment(name="mock_deployment_name") + + assert response["success"] is True diff --git a/tests/async_tests/test_disk_async.py b/tests/async_tests/test_disk_async.py new file mode 100644 index 0000000..78f7d1a --- /dev/null +++ b/tests/async_tests/test_disk_async.py @@ -0,0 +1,142 @@ +import httpx +import pytest +import pytest_asyncio # noqa: F401 +from munch import Munch +from asyncmock import AsyncMock + +from tests.data.mock_data import disk_body # noqa: F401 +from tests.utils.fixtures import async_client as client # noqa: F401 + + +@pytest.mark.asyncio +async def test_list_disks_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-disk", "guid": "mock_disk_guid"}], + }, + ) + + # Call the list_disks method + response = await client.list_disks() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test-disk", "guid": "mock_disk_guid"}] + + +@pytest.mark.asyncio +async def test_list_disks_not_found(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + with pytest.raises(Exception) as exc: + await client.list_disks() + + assert str(exc.value) == "not found" + + +@pytest.mark.asyncio +async def test_get_disk_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Disk", + "metadata": {"guid": "test_disk_guid", "name": "mock_disk_name"}, + }, + ) + + # Call the get_disk method + response = await client.get_disk(name="mock_disk_name") + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "test_disk_guid" + + +@pytest.mark.asyncio +async def test_get_disk_not_found(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "disk not found"}, + ) + + # Call the get_disk method + with pytest.raises(Exception) as exc: + await client.get_disk(name="mock_disk_name") + + assert str(exc.value) == "disk not found" + + +@pytest.mark.asyncio +async def test_create_disk_success(client, disk_body, mocker: AsyncMock): # noqa: F811 + mock_post = mocker.patch("httpx.AsyncClient.post") + + mock_post.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Disk", + "metadata": {"guid": "test_disk_guid", "name": "test_disk"}, + }, + ) + + response = await client.create_disk(body=disk_body, project_guid="mock_project_guid") + + assert isinstance(response, Munch) + assert response.metadata.guid == "test_disk_guid" + assert response.metadata.name == "test_disk" + + +@pytest.mark.asyncio +async def test_delete_disk_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.delete method + mock_delete = mocker.patch("httpx.AsyncClient.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_disk method + response = await client.delete_disk(name="mock_disk_name") + + # Validate the response + assert response["success"] is True + + +@pytest.mark.asyncio +async def test_delete_disk_not_found(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.delete method + mock_delete = mocker.patch("httpx.AsyncClient.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=404, + json={"error": "disk not found"}, + ) + + # Call the delete_disk method + with pytest.raises(Exception) as exc: + await client.delete_disk(name="mock_disk_name") + + assert str(exc.value) == "disk not found" diff --git a/tests/async_tests/test_managedservice_async.py b/tests/async_tests/test_managedservice_async.py new file mode 100644 index 0000000..d670583 --- /dev/null +++ b/tests/async_tests/test_managedservice_async.py @@ -0,0 +1,208 @@ +import httpx +import pytest # noqa: F401 +from munch import Munch +from asyncmock import AsyncMock + +from tests.utils.fixtures import async_client as client # noqa: F401 + + +@pytest.mark.asyncio +async def test_list_providers_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-provider", "guid": "mock_provider_guid"}], + }, + ) + + # Call the list_providers method + response = await client.list_providers() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test-provider", "guid": "mock_provider_guid"}] + + +@pytest.mark.asyncio +async def test_list_instances_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-instance", "guid": "mock_instance_guid"}], + }, + ) + + # Call the list_instances method + response = await client.list_instances() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test-instance", "guid": "mock_instance_guid"}] + + +@pytest.mark.asyncio +async def test_get_instance_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_instance_guid", "name": "test_instance"}, + }, + ) + + # Call the get_instance method + response = await client.get_instance(name="mock_instance_name") + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_instance_guid" + + +@pytest.mark.asyncio +async def test_create_instance_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.post method + mock_post = mocker.patch("httpx.AsyncClient.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": {"guid": "test_instance_guid", "name": "test_instance"}, + }, + ) + + # Call the create_instance method + response = await client.create_instance(body={"name": "test_instance"}) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_instance_guid" + + +@pytest.mark.asyncio +async def test_delete_instance_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.delete method + mock_delete = mocker.patch("httpx.AsyncClient.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_instance method + response = await client.delete_instance(name="mock_instance_name") + + # Validate the response + assert response["success"] is True + + +@pytest.mark.asyncio +async def test_list_instance_bindings_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [ + {"name": "test-instance-binding", "guid": "mock_instance_binding_guid"} + ], + }, + ) + + # Call the list_instance_bindings method + response = await client.list_instance_bindings("mock_instance_name") + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [ + {"name": "test-instance-binding", "guid": "mock_instance_binding_guid"} + ] + + +@pytest.mark.asyncio +async def test_get_instance_binding_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": { + "guid": "test_instance_binding_guid", + "name": "test_instance_binding", + }, + }, + ) + + # Call the get_instance_binding method + response = await client.get_instance_binding( + name="mock_instance_binding_name", instance_name="mock_instance_name" + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_instance_binding_guid" + + +@pytest.mark.asyncio +async def test_create_instance_binding_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.post method + mock_post = mocker.patch("httpx.AsyncClient.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": { + "guid": "test_instance_binding_guid", + "name": "test_instance_binding", + }, + }, + ) + + # Call the create_instance_binding method + response = await client.create_instance_binding( + body={"name": "test_instance_binding"}, instance_name="mock_instance_name" + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_instance_binding_guid" + + +@pytest.mark.asyncio +async def test_delete_instance_binding_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.delete method + mock_delete = mocker.patch("httpx.AsyncClient.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_instance_binding method + response = await client.delete_instance_binding( + name="mock_instance_binding_name", instance_name="mock_instance_name" + ) + + # Validate the response + assert response["success"] is True diff --git a/tests/async_tests/test_network_async.py b/tests/async_tests/test_network_async.py new file mode 100644 index 0000000..0512404 --- /dev/null +++ b/tests/async_tests/test_network_async.py @@ -0,0 +1,124 @@ +import httpx +import pytest +import pytest_asyncio # noqa: F401 +from munch import Munch +from asyncmock import AsyncMock + +from tests.data.mock_data import network_body # noqa: F401 +from tests.utils.fixtures import async_client as client # noqa: F401 + + +@pytest.mark.asyncio +async def test_list_networks_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-network", "guid": "mock_network_guid"}], + }, + ) + + # Call the list_networks method + response = await client.list_networks() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test-network", "guid": "mock_network_guid"}] + + +@pytest.mark.asyncio +async def test_list_networks_not_found(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + with pytest.raises(Exception) as exc: + await client.list_networks() + + assert str(exc.value) == "not found" + + +@pytest.mark.asyncio +async def test_create_network_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.post method + mock_post = mocker.patch("httpx.AsyncClient.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": {"guid": "mock_network_guid", "name": "test-network"}, + }, + ) + + # Call the create_network method + response = await client.create_network(body=network_body) + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["name"] == "test-network" + + +@pytest.mark.asyncio +async def test_create_network_failure(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.post method + mock_post = mocker.patch("httpx.AsyncClient.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=409, + json={"error": "already exists"}, + ) + + with pytest.raises(Exception) as exc: + await client.create_network(body=network_body) + + assert str(exc.value) == "already exists" + + +@pytest.mark.asyncio +async def test_get_network_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "mock_network_guid", "name": "test-network"}, + }, + ) + + # Call the get_network method + response = await client.get_network(name="test-network") + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "mock_network_guid" + + +@pytest.mark.asyncio +async def test_delete_network_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.delete method + mock_delete = mocker.patch("httpx.AsyncClient.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_network method + response = await client.delete_network(name="test-network") + + # Validate the response + assert response["success"] is True diff --git a/tests/async_tests/test_package_async.py b/tests/async_tests/test_package_async.py new file mode 100644 index 0000000..cc40cea --- /dev/null +++ b/tests/async_tests/test_package_async.py @@ -0,0 +1,106 @@ +import pytest +import pytest_asyncio # noqa: F401 +import httpx +from munch import Munch +from asyncmock import AsyncMock + +from tests.data.mock_data import package_body +from tests.utils.fixtures import async_client as client # noqa: F401 + + +@pytest.mark.asyncio +async def test_list_packages_success(client, mocker: AsyncMock): # noqa: F811 + mock_get = mocker.patch("httpx.AsyncClient.get") + + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test_package", "guid": "mock_package_guid"}], + }, + ) + + response = await client.list_packages() + + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test_package", "guid": "mock_package_guid"}] + + +@pytest.mark.asyncio +async def test_list_packages_not_found(client, mocker: AsyncMock): # noqa: F811 + mock_get = mocker.patch("httpx.AsyncClient.get") + + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + with pytest.raises(Exception) as exc: + await client.list_packages() + assert str(exc.value) == "not found" + + +@pytest.mark.asyncio +async def test_create_package_success(client, mocker: AsyncMock): # noqa: F811 + mock_post = mocker.patch("httpx.AsyncClient.post") + + mock_post.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Package", + "metadata": {"name": "test-package", "guid": "mock_package_guid"}, + "spec": {"users": [{"userGUID": "mock_user_guid", "emailID": "mock_email"}]}, + }, + ) + + response = await client.create_package(package_body) + + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "mock_package_guid" + + +@pytest.mark.asyncio +async def test_get_package_success(client, mocker: AsyncMock): # noqa: F811 + mock_get = mocker.patch("httpx.AsyncClient.get") + + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Package", + "metadata": {"name": "test-package", "guid": "mock_package_guid"}, + "spec": {"users": [{"userGUID": "mock_user_guid", "emailID": "mock_email"}]}, + }, + ) + + response = await client.get_package("mock_package_guid") + + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "mock_package_guid" + + +@pytest.mark.asyncio +async def test_get_package_not_found(client, mocker: AsyncMock): # noqa: F811 + mock_get = mocker.patch("httpx.AsyncClient.get") + + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + with pytest.raises(Exception) as exc: + await client.get_package("mock_package_guid") + assert str(exc.value) == "not found" + + +@pytest.mark.asyncio +async def test_delete_package_success(client, mocker: AsyncMock): # noqa: F811 + mock_delete = mocker.patch("httpx.AsyncClient.delete") + + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + response = await client.delete_package("mock_package_guid") + + assert response["success"] is True diff --git a/tests/async_tests/test_project_async.py b/tests/async_tests/test_project_async.py new file mode 100644 index 0000000..8f4956c --- /dev/null +++ b/tests/async_tests/test_project_async.py @@ -0,0 +1,97 @@ +import pytest +import pytest_asyncio # noqa: F401 +import httpx +from munch import Munch +from asyncmock import AsyncMock + +from tests.data.mock_data import project_body +from tests.utils.fixtures import async_client as client # noqa: F401 + + +@pytest.mark.asyncio +async def test_list_projects_success(client, mocker: AsyncMock): # noqa: F811 + mock_get = mocker.patch("httpx.AsyncClient.get") + + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-project", "guid": "mock_project_guid"}], + }, + ) + + response = await client.list_projects() + + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test-project", "guid": "mock_project_guid"}] + + +@pytest.mark.asyncio +async def test_create_project_success(client, mocker: AsyncMock): # noqa: F811 + mock_post = mocker.patch("httpx.AsyncClient.post") + + mock_post.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Project", + "metadata": {"name": "test-project", "guid": "mock_project_guid"}, + "spec": { + "users": [ + {"userGUID": "mock_user_guid", "emailID": "test.user@example.com"} + ] + }, + }, + ) + + response = await client.create_project(project_body) + + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "mock_project_guid" + + +@pytest.mark.asyncio +async def test_get_project_success(client, mocker: AsyncMock): # noqa: F811 + mock_get = mocker.patch("httpx.AsyncClient.get") + + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Project", + "metadata": {"name": "test-project", "guid": "mock_project_guid"}, + }, + ) + + response = await client.get_project("mock_project_guid") + + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "mock_project_guid" + + +@pytest.mark.asyncio +async def test_update_project_success(client, mocker: AsyncMock): # noqa: F811 + mock_put = mocker.patch("httpx.AsyncClient.put") + + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Project", + "metadata": {"name": "test-project", "guid": "mock_project_guid"}, + }, + ) + + response = await client.update_project("mock_project_guid", project_body) + + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "mock_project_guid" + + +@pytest.mark.asyncio +async def test_delete_project_success(client, mocker: AsyncMock): # noqa: F811 + mock_delete = mocker.patch("httpx.AsyncClient.delete") + + mock_delete.return_value = httpx.Response(status_code=200, json={"success": True}) + + response = await client.delete_project("mock_project_guid") + + assert isinstance(response, Munch) + assert response["success"] is True diff --git a/tests/async_tests/test_secret_async.py b/tests/async_tests/test_secret_async.py new file mode 100644 index 0000000..3e86a65 --- /dev/null +++ b/tests/async_tests/test_secret_async.py @@ -0,0 +1,146 @@ +import httpx +import pytest +import pytest_asyncio # noqa: F401 +from munch import Munch +from asyncmock import AsyncMock + +from tests.data.mock_data import secret_body # noqa: F401 +from tests.utils.fixtures import async_client as client # noqa: F401 + + +@pytest.mark.asyncio +async def test_list_secrets_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-secret", "guid": "mock_secret_guid"}], + }, + ) + + # Call the list_secrets method + response = await client.list_secrets() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test-secret", "guid": "mock_secret_guid"}] + + +@pytest.mark.asyncio +async def test_list_secrets_not_found(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + with pytest.raises(Exception) as exc: + await client.list_secrets() + + assert str(exc.value) == "not found" + + +@pytest.mark.asyncio +async def test_create_secret_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.post method + mock_post = mocker.patch("httpx.AsyncClient.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": {"guid": "test_secret_guid", "name": "test_secret"}, + }, + ) + + # Call the create_secret method + response = await client.create_secret(secret_body) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_secret_guid" + + +@pytest.mark.asyncio +async def test_create_secret_already_exists(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.post method + mock_post = mocker.patch("httpx.AsyncClient.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=409, + json={"error": "secret already exists"}, + ) + + with pytest.raises(Exception) as exc: + await client.create_secret(secret_body) + + assert str(exc.value) == "secret already exists" + + +@pytest.mark.asyncio +async def test_update_secret_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.put method + mock_put = mocker.patch("httpx.AsyncClient.put") + + # Set up the mock response + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_secret_guid", "name": "test_secret"}, + }, + ) + + # Call the update_secret method + response = await client.update_secret("mock_secret_guid", body=secret_body) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_secret_guid" + + +@pytest.mark.asyncio +async def test_delete_secret_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.delete method + mock_delete = mocker.patch("httpx.AsyncClient.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_secret method + response = await client.delete_secret("mock_secret_guid") + + # Validate the response + assert response == {"success": True} + + +@pytest.mark.asyncio +async def test_get_secret_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_secret_guid", "name": "test_secret"}, + }, + ) + + # Call the get_secret method + response = await client.get_secret("mock_secret_guid") + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_secret_guid" + assert response.metadata.name == "test_secret" diff --git a/tests/async_tests/test_staticroute_async.py b/tests/async_tests/test_staticroute_async.py new file mode 100644 index 0000000..91e7da3 --- /dev/null +++ b/tests/async_tests/test_staticroute_async.py @@ -0,0 +1,148 @@ +import httpx +import pytest +from munch import Munch +from asyncmock import AsyncMock + +from tests.data.mock_data import staticroute_body # noqa: F401 +from tests.utils.fixtures import async_client as client # noqa: F401 + + +@pytest.mark.asyncio +async def test_list_staticroutes_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-staticroute", "guid": "mock_staticroute_guid"}], + }, + ) + + # Call the list_staticroutes method + response = await client.list_staticroutes() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [ + {"name": "test-staticroute", "guid": "mock_staticroute_guid"} + ] + + +@pytest.mark.asyncio +async def test_list_staticroutes_not_found(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + with pytest.raises(Exception) as exc: + await client.list_staticroutes() + + assert str(exc.value) == "not found" + + +@pytest.mark.asyncio +async def test_create_staticroute_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.post method + mock_post = mocker.patch("httpx.AsyncClient.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": {"guid": "test_staticroute_guid", "name": "test_staticroute"}, + }, + ) + + # Call the create_staticroute method + response = await client.create_staticroute(body=staticroute_body) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_staticroute_guid" + + +@pytest.mark.asyncio +async def test_create_staticroute_bad_request(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.post method + mock_post = mocker.patch("httpx.AsyncClient.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=409, + json={"error": "already exists"}, + ) + + with pytest.raises(Exception) as exc: + await client.create_staticroute(body=staticroute_body) + + assert str(exc.value) == "already exists" + + +@pytest.mark.asyncio +async def test_get_staticroute_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.get method + mock_get = mocker.patch("httpx.AsyncClient.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_staticroute_guid", "name": "test_staticroute"}, + }, + ) + + # Call the get_staticroute method + response = await client.get_staticroute(name="mock_staticroute_name") + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_staticroute_guid" + + +@pytest.mark.asyncio +async def test_update_staticroute_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.put method + mock_put = mocker.patch("httpx.AsyncClient.put") + + # Set up the mock response + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_staticroute_guid", "name": "test_staticroute"}, + }, + ) + + # Call the update_staticroute method + response = await client.update_staticroute( + name="mock_staticroute_name", body=staticroute_body + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_staticroute_guid" + + +@pytest.mark.asyncio +async def test_delete_staticroute_success(client, mocker: AsyncMock): # noqa: F811 + # Mock the httpx.AsyncClient.delete method + mock_delete = mocker.patch("httpx.AsyncClient.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_staticroute method + response = await client.delete_staticroute(name="mock_staticroute_name") + + # Validate the response + assert response["success"] is True diff --git a/tests/data/mock_data.py b/tests/data/mock_data.py new file mode 100644 index 0000000..08e5887 --- /dev/null +++ b/tests/data/mock_data.py @@ -0,0 +1,137 @@ +import pytest + +from rapyuta_io_sdk_v2 import Configuration + + +@pytest.fixture +def mock_response_project(): + return { + "kind": "Project", + "metadata": {"name": "test-project", "guid": "mock_project_guid"}, + "spec": { + "users": [{"userGUID": "mock_user_guid", "emailID": "test.user@example.com"}] + }, + } + + +@pytest.fixture +def project_body(): + return { + "apiVersion": "api.rapyuta.io/v2", + "kind": "Project", + "metadata": { + "name": "test-project", + "labels": {"purpose": "testing", "version": "1.0"}, + }, + "spec": { + "users": [{"emailID": "test.user@example.com", "role": "admin"}], + "features": {"vpn": {"enabled": False}}, + }, + } + + +@pytest.fixture +def package_body(): + return { + "apiVersion": "apiextensions.rapyuta.io/v1", + "kind": "Package", + "metadata": { + "name": "test-package", + "version": "v1.0.0", + "description": "Test package for demo", + "labels": {"app": "test"}, + "projectguid": "mock_project_guid", + }, + "spec": {"runtime": "cloud", "cloud": {"enabled": True}}, + } + + +@pytest.fixture +def deployment_body(): + return { + "apiVersion": "apiextensions.rapyuta.io/v1", + "kind": "Deployment", + "metadata": { + "name": "test-deployment", + "depends": { + "kind": "Package", + "nameOrGUID": "mock_package_guid", + }, + }, + "restart": "Always", + } + + +@pytest.fixture +def disk_body(): + return { + "apiVersion": "apiextensions.rapyuta.io/v1", + "kind": "Disk", + "metadata": { + "name": "test-disk", + "labels": {"app": "test"}, + }, + "spec": { + "runtime": "cloud", + "capacity": "4", + }, + } + + +@pytest.fixture +def staticroute_body(): + return { + "apiVersion": "apiextensions.rapyuta.io/v1", + "kind": "StaticRoute", + "metadata": { + "name": "test-staticroute", + "region": "jp", + "labels": {"app": "test"}, + }, + } + + +@pytest.fixture +def network_body(): + return { + "apiVersion": "apiextensions.rapyuta.io/v1", + "kind": "Network", + "metadata": { + "name": "test-network", + "region": "jp", + "labels": {"app": "test"}, + }, + } + + +@pytest.fixture +def secret_body(): + return { + "apiVersion": "apiextensions.rapyuta.io/v1", + "kind": "Secret", + "metadata": { + "name": "test-secret", + "labels": {"app": "test"}, + }, + } + + +@pytest.fixture +def configtree_body(): + return { + "apiVersion": "apiextensions.rapyuta.io/v1", + "kind": "ConfigTree", + "metadata": { + "name": "test-configtree", + "labels": {"app": "test"}, + }, + } + + +@pytest.fixture +def mock_config(): + return Configuration( + project_guid="mock_project_guid", + organization_guid="mock_org_guid", + auth_token="mock_auth_token", + ) diff --git a/tests/sync_tests/test_config.py b/tests/sync_tests/test_config.py new file mode 100644 index 0000000..3d5edf7 --- /dev/null +++ b/tests/sync_tests/test_config.py @@ -0,0 +1,76 @@ +import json +from rapyuta_io_sdk_v2.config import Configuration +from tests.data.mock_data import mock_config # noqa: F401 + + +def test_from_file(mocker): + # Mock configuration file content + mock_config_data = { + "project_id": "mock_project_guid", + "organization_id": "mock_org_guid", + "auth_token": "mock_auth_token", + } + mock_file_content = json.dumps(mock_config_data) + + # Mock the open function + mocker.patch("builtins.open", mocker.mock_open(read_data=mock_file_content)) + + # Mock the default directory function + mocker.patch( + "rapyuta_io_sdk_v2.config.get_default_app_dir", return_value="/mock/default/dir" + ) + + # Call the method to test + config = Configuration.from_file(file_path="/mock/path/to/config.json") + + # Assert the Configuration object contains the expected values + assert config.project_guid == mock_config_data["project_id"] + assert config.organization_guid == mock_config_data["organization_id"] + assert config.auth_token == mock_config_data["auth_token"] + + +def test_get_headers_basic(mock_config): # noqa: F811 + # Call the method without passing any arguments + headers = mock_config.get_headers() + + # Verify the headers + assert headers["Authorization"] == "Bearer mock_auth_token" + assert headers["organizationguid"] == "mock_org_guid" + assert headers["project"] == "mock_project_guid" + + +def test_get_headers_without_project(mock_config): # noqa: F811 + # Call the method with `with_project=False` + headers = mock_config.get_headers(with_project=False) + + # Verify the headers + assert headers["Authorization"] == "Bearer mock_auth_token" + assert headers["organizationguid"] == "mock_org_guid" + assert "project" not in headers + + +def test_get_headers_with_custom_values(mock_config): # noqa: F811 + # Call the method with custom organization_guid and project_guid + headers = mock_config.get_headers( + organization_guid="custom_org_guid", + project_guid="custom_project_guid", + ) + + # Verify the headers + assert headers["Authorization"] == "Bearer mock_auth_token" + assert headers["organizationguid"] == "custom_org_guid" + assert headers["project"] == "custom_project_guid" + + +def test_get_headers_with_request_id(mocker, mock_config): # noqa: F811 + # Mock the environment variable + mocker.patch("os.getenv", return_value="mock_request_id") + + # Call the method + headers = mock_config.get_headers() + + # Verify the headers + assert headers["Authorization"] == "Bearer mock_auth_token" + assert headers["organizationguid"] == "mock_org_guid" + assert headers["project"] == "mock_project_guid" + assert headers["X-Request-ID"] == "mock_request_id" diff --git a/tests/sync_tests/test_configtree.py b/tests/sync_tests/test_configtree.py new file mode 100644 index 0000000..3d2f27a --- /dev/null +++ b/tests/sync_tests/test_configtree.py @@ -0,0 +1,341 @@ +import httpx +import pytest +from munch import Munch +from pytest_mock import MockFixture + +from tests.data.mock_data import configtree_body # noqa: F401 +from tests.utils.fixtures import client # noqa: F401 + + +def test_list_configtrees_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-configtree", "guid": "mock_configtree_guid"}], + }, + ) + + # Call the list_configtrees method + response = client.list_configtrees() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [ + {"name": "test-configtree", "guid": "mock_configtree_guid"} + ] + + +def test_list_configtrees_bad_gateway(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=502, + json={"error": "bad gateway"}, + ) + + # Call the list_configtrees method + with pytest.raises(Exception) as exc: + client.list_configtrees() + + assert str(exc.value) == "bad gateway" + + +def test_create_configtree_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": {"guid": "test_configtree_guid", "name": "test_configtree"}, + }, + ) + + # Call the create_configtree method + response = client.create_configtree(configtree_body) + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "test_configtree_guid" + + +def test_create_configtree_service_unavailable(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=503, + json={"error": "service unavailable"}, + ) + + # Call the create_configtree method + with pytest.raises(Exception) as exc: + client.create_configtree(configtree_body) + + assert str(exc.value) == "service unavailable" + + +def test_get_configtree_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_configtree_guid", "name": "test_configtree"}, + }, + ) + + # Call the get_configtree method + response = client.get_configtree(name="mock_configtree_name") + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_configtree_guid" + assert response.metadata.name == "test_configtree" + + +def test_set_configtree_revision_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.put method + mock_put = mocker.patch("httpx.Client.put") + + # Set up the mock response + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_configtree_guid", "name": "test_configtree"}, + }, + ) + + # Call the set_configtree_revision method + response = client.set_configtree_revision( + name="mock_configtree_name", configtree=configtree_body + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_configtree_guid" + assert response.metadata.name == "test_configtree" + + +def test_update_configtree_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.put method + mock_put = mocker.patch("httpx.Client.put") + + # Set up the mock response + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_configtree_guid", "name": "test_configtree"}, + }, + ) + + # Call the update_configtree method + response = client.update_configtree(name="mock_configtree_name", body=configtree_body) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_configtree_guid" + assert response.metadata.name == "test_configtree" + + +def test_delete_configtree_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.delete method + mock_delete = mocker.patch("httpx.Client.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_configtree method + response = client.delete_configtree(name="mock_configtree_name") + + # Validate the response + assert response["success"] is True + + +def test_list_revisions_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-configtree", "guid": "mock_configtree_guid"}], + }, + ) + + # Call the list_revisions method + response = client.list_revisions(tree_name="mock_configtree_name") + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [ + {"name": "test-configtree", "guid": "mock_configtree_guid"} + ] + + +def test_create_revision_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": {"guid": "test_revision_guid", "name": "test_revision"}, + }, + ) + + # Call the create_revision method + response = client.create_revision(name="mock_configtree_name", body=configtree_body) + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "test_revision_guid" + + +def test_put_keys_in_revision_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.put method + mock_put = mocker.patch("httpx.Client.put") + + # Set up the mock response + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_revision_guid", "name": "test_revision"}, + }, + ) + + # Call the put_keys_in_revision method + response = client.put_keys_in_revision( + name="mock_configtree_name", + revision_id="mock_revision_id", + config_values=["mock_value1", "mock_value2"], + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_revision_guid" + assert response.metadata.name == "test_revision" + + +def test_commit_revision_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.put method + mock_patch = mocker.patch("httpx.Client.patch") + + # Set up the mock response + mock_patch.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_revision_guid", "name": "test_revision"}, + }, + ) + + # Call the commit_revision method + response = client.commit_revision( + tree_name="mock_configtree_name", + revision_id="mock_revision_id", + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_revision_guid" + assert response.metadata.name == "test_revision" + + +def test_get_key_in_revision(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_revision_guid", "name": "test_revision"}, + }, + ) + + # Call the get_key_in_revision method + response = client.get_key_in_revision( + tree_name="mock_configtree_name", revision_id="mock_revision_id", key="mock_key" + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_revision_guid" + assert response.metadata.name == "test_revision" + + +def test_put_key_in_revision_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.put method + mock_put = mocker.patch("httpx.Client.put") + + # Set up the mock response + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_revision_guid", "name": "test_revision"}, + }, + ) + + # Call the put_key_in_revision method + response = client.put_key_in_revision( + tree_name="mock_configtree_name", revision_id="mock_revision_id", key="mock_key" + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_revision_guid" + assert response.metadata.name == "test_revision" + + +def test_delete_key_in_revision_success(client, mocker: MockFixture): # noqa: F811 + mock_delete = mocker.patch("httpx.Client.delete") + + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + response = client.delete_key_in_revision( + tree_name="mock_configtree_name", revision_id="mock_revision_id", key="mock_key" + ) + + assert response["success"] is True + + +def test_rename_key_in_revision_success(client, mocker: MockFixture): # noqa: F811 + mock_patch = mocker.patch("httpx.Client.patch") + + mock_patch.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_revision_guid", "name": "test_revision"}, + }, + ) + + response = client.rename_key_in_revision( + tree_name="mock_configtree_name", + revision_id="mock_revision_id", + key="mock_key", + config_key_rename={"metadata": {"name": "test_key"}}, + ) + + assert isinstance(response, Munch) + assert response.metadata.guid == "test_revision_guid" + assert response.metadata.name == "test_revision" diff --git a/tests/sync_tests/test_deployment.py b/tests/sync_tests/test_deployment.py new file mode 100644 index 0000000..3e30af4 --- /dev/null +++ b/tests/sync_tests/test_deployment.py @@ -0,0 +1,142 @@ +import httpx +import pytest +from munch import Munch +from pytest_mock import MockFixture + +from tests.data.mock_data import deployment_body # noqa: F401 +from tests.utils.fixtures import client # noqa: F401 + + +def test_list_deployments_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-deployment", "guid": "mock_deployment_guid"}], + }, + ) + + # Call the list_deployments method + response = client.list_deployments() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [ + {"name": "test-deployment", "guid": "mock_deployment_guid"} + ] + + +def test_list_deployments_not_found(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + with pytest.raises(Exception) as exc: + client.list_deployments() + + assert str(exc.value) == "not found" + + +def test_get_deployment_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Deployment", + "metadata": {"guid": "test_deployment_guid", "name": "test_deployment"}, + }, + ) + + # Call the get_deployment method + response = client.get_deployment(name="mock_deployment_name") + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "test_deployment_guid" + + +def test_get_deployment_not_found(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "deployment not found"}, + ) + + # Call the get_deployment method + with pytest.raises(Exception) as exc: + client.get_deployment(name="mock_deployment_name") + + assert str(exc.value) == "deployment not found" + + +def test_create_deployment_success(client, deployment_body, mocker: MockFixture): # noqa: F811 + mock_post = mocker.patch("httpx.Client.post") + + mock_post.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Deployment", + "metadata": {"guid": "test_deployment_guid", "name": "test_deployment"}, + }, + ) + + response = client.create_deployment(body=deployment_body) + + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "test_deployment_guid" + + +def test_create_deployment_unauthorized(client, deployment_body, mocker: MockFixture): # noqa: F811 + mock_post = mocker.patch("httpx.Client.post") + + mock_post.return_value = httpx.Response( + status_code=401, + json={"error": "unauthorized"}, + ) + + with pytest.raises(Exception) as exc: + client.create_deployment(body=deployment_body) + + assert str(exc.value) == "unauthorized" + + +def test_update_deployment_success(client, deployment_body, mocker: MockFixture): # noqa: F811 + mock_put = mocker.patch("httpx.Client.put") + + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Deployment", + "metadata": {"guid": "test_deployment_guid", "name": "test_deployment"}, + }, + ) + + response = client.update_deployment(name="mock_deployment_name", body=deployment_body) + + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "test_deployment_guid" + + +def test_delete_deployment_success(client, mocker: MockFixture): # noqa: F811 + mock_delete = mocker.patch("httpx.Client.delete") + + mock_delete.return_value = httpx.Response(status_code=204, json={"success": True}) + + response = client.delete_deployment(name="mock_deployment_name") + + assert response["success"] is True diff --git a/tests/sync_tests/test_disk.py b/tests/sync_tests/test_disk.py new file mode 100644 index 0000000..cb9e01e --- /dev/null +++ b/tests/sync_tests/test_disk.py @@ -0,0 +1,134 @@ +import httpx +import pytest +from munch import Munch +from pytest_mock import MockFixture + +from tests.data.mock_data import disk_body # noqa: F401 +from tests.utils.fixtures import client # noqa: F401 + + +def test_list_disks_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-disk", "guid": "mock_disk_guid"}], + }, + ) + + # Call the list_disks method + response = client.list_disks() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test-disk", "guid": "mock_disk_guid"}] + + +def test_list_disks_not_found(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + with pytest.raises(Exception) as exc: + client.list_disks() + + assert str(exc.value) == "not found" + + +def test_get_disk_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Disk", + "metadata": {"guid": "test_disk_guid", "name": "mock_disk_name"}, + }, + ) + + # Call the get_disk method + response = client.get_disk(name="mock_disk_name") + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "test_disk_guid" + + +def test_get_disk_not_found(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "disk not found"}, + ) + + # Call the get_disk method + with pytest.raises(Exception) as exc: + client.get_disk(name="mock_disk_name") + + assert str(exc.value) == "disk not found" + + +def test_create_disk_success(client, disk_body, mocker: MockFixture): # noqa: F811 + mock_post = mocker.patch("httpx.Client.post") + + mock_post.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Disk", + "metadata": {"guid": "test_disk_guid", "name": "test_disk"}, + }, + ) + + response = client.create_disk(body=disk_body, project_guid="mock_project_guid") + + assert isinstance(response, Munch) + assert response.metadata.guid == "test_disk_guid" + assert response.metadata.name == "test_disk" + + +def test_delete_disk_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.delete method + mock_delete = mocker.patch("httpx.Client.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_disk method + response = client.delete_disk(name="mock_disk_name") + + # Validate the response + assert response["success"] is True + + +def test_delete_disk_not_found(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.delete method + mock_delete = mocker.patch("httpx.Client.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=404, + json={"error": "disk not found"}, + ) + + # Call the delete_disk method + with pytest.raises(Exception) as exc: + client.delete_disk(name="mock_disk_name") + + assert str(exc.value) == "disk not found" diff --git a/tests/sync_tests/test_main.py b/tests/sync_tests/test_main.py new file mode 100644 index 0000000..b906dae --- /dev/null +++ b/tests/sync_tests/test_main.py @@ -0,0 +1,62 @@ +import httpx +import pytest +from pytest_mock import MockFixture + +from tests.utils.fixtures import client # noqa: F401 + + +def test_get_auth_token_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=200, + json={ + "success": True, + "data": { + "token": "mock_token", + }, + }, + ) + + # Call the get_auth_token method + response = client.get_auth_token(email="mock_email", password="mock_password") + + assert response == "mock_token" + + +def test_login_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = None + + # Mock the `get_auth_token` method + mocker.patch.object(client, "get_auth_token", return_value="mock_token_2") + + # Call the login method + client.login(email="mock_email", password="mock_password") + + assert client.config.auth_token == "mock_token_2" + + +def test_login_failure(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = None + + mocker.patch.object( + client, + "get_auth_token", + side_effect=Exception("unauthorized permission access"), + ) + + # Call the login method + with pytest.raises(Exception) as e: + client.login(email="mock_email", password="mock_password") + + assert str(e.value) == "unauthorized permission access" diff --git a/tests/sync_tests/test_managedservice.py b/tests/sync_tests/test_managedservice.py new file mode 100644 index 0000000..cdd08d3 --- /dev/null +++ b/tests/sync_tests/test_managedservice.py @@ -0,0 +1,199 @@ +import httpx +import pytest # noqa: F401 +from munch import Munch +from pytest_mock import MockFixture + +from tests.utils.fixtures import client # noqa: F401 + + +def test_list_providers_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-provider", "guid": "mock_provider_guid"}], + }, + ) + + # Call the list_providers method + response = client.list_providers() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test-provider", "guid": "mock_provider_guid"}] + + +def test_list_instances_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-instance", "guid": "mock_instance_guid"}], + }, + ) + + # Call the list_instances method + response = client.list_instances() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test-instance", "guid": "mock_instance_guid"}] + + +def test_get_instance_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_instance_guid", "name": "test_instance"}, + }, + ) + + # Call the get_instance method + response = client.get_instance(name="mock_instance_name") + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_instance_guid" + + +def test_create_instance_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": {"guid": "test_instance_guid", "name": "test_instance"}, + }, + ) + + # Call the create_instance method + response = client.create_instance(body={"name": "test_instance"}) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_instance_guid" + + +def test_delete_instance_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.delete method + mock_delete = mocker.patch("httpx.Client.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_instance method + response = client.delete_instance(name="mock_instance_name") + + # Validate the response + assert response["success"] is True + + +def test_list_instance_bindings_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [ + {"name": "test-instance-binding", "guid": "mock_instance_binding_guid"} + ], + }, + ) + + # Call the list_instance_bindings method + response = client.list_instance_bindings("mock_instance_name") + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [ + {"name": "test-instance-binding", "guid": "mock_instance_binding_guid"} + ] + + +def test_get_instance_binding_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": { + "guid": "test_instance_binding_guid", + "name": "test_instance_binding", + }, + }, + ) + + # Call the get_instance_binding method + response = client.get_instance_binding( + name="mock_instance_binding_name", instance_name="mock_instance_name" + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_instance_binding_guid" + + +def test_create_instance_binding_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": { + "guid": "test_instance_binding_guid", + "name": "test_instance_binding", + }, + }, + ) + + # Call the create_instance_binding method + response = client.create_instance_binding( + body={"name": "test_instance_binding"}, instance_name="mock_instance_name" + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_instance_binding_guid" + + +def test_delete_instance_binding_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.delete method + mock_delete = mocker.patch("httpx.Client.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_instance_binding method + response = client.delete_instance_binding( + name="mock_instance_binding_name", instance_name="mock_instance_name" + ) + + # Validate the response + assert response["success"] is True diff --git a/tests/sync_tests/test_network.py b/tests/sync_tests/test_network.py new file mode 100644 index 0000000..6fe9826 --- /dev/null +++ b/tests/sync_tests/test_network.py @@ -0,0 +1,117 @@ +import httpx +import pytest +from munch import Munch +from pytest_mock import MockFixture + +from tests.data.mock_data import network_body # noqa: F401 +from tests.utils.fixtures import client # noqa: F401 + + +def test_list_networks_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-network", "guid": "mock_network_guid"}], + }, + ) + + # Call the list_networks method + response = client.list_networks() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test-network", "guid": "mock_network_guid"}] + + +def test_list_networks_not_found(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + with pytest.raises(Exception) as exc: + client.list_networks() + + assert str(exc.value) == "not found" + + +def test_create_network_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": {"guid": "mock_network_guid", "name": "test-network"}, + }, + ) + + # Call the create_network method + response = client.create_network(body=network_body) + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["name"] == "test-network" + + +def test_create_network_failure(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=409, + json={"error": "already exists"}, + ) + + with pytest.raises(Exception) as exc: + client.create_network(body=network_body) + + assert str(exc.value) == "already exists" + + +def test_get_network_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "mock_network_guid", "name": "test-network"}, + }, + ) + + # Call the get_network method + response = client.get_network(name="test-network") + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "mock_network_guid" + + +def test_delete_network_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.delete method + mock_delete = mocker.patch("httpx.Client.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_network method + response = client.delete_network(name="test-network") + + # Validate the response + assert response["success"] is True diff --git a/tests/sync_tests/test_package.py b/tests/sync_tests/test_package.py new file mode 100644 index 0000000..c8c8991 --- /dev/null +++ b/tests/sync_tests/test_package.py @@ -0,0 +1,107 @@ +import httpx +import pytest +from munch import Munch +from pytest_mock import MockFixture + +from tests.data.mock_data import package_body # noqa: F401 +from tests.utils.fixtures import client # noqa: F401 + + +def test_list_packages_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test_package", "guid": "mock_package_guid"}], + }, + ) + + # Call the list_packages method + response = client.list_packages() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test_package", "guid": "mock_package_guid"}] + + +def test_list_packages_not_found(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + # Call the list_packages method + with pytest.raises(Exception) as exc: + client.list_packages() + + # Validate the exception message + assert str(exc.value) == "not found" + # assert response. == "not found" + + +def test_get_package_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_package_guid", "name": "test_package"}, + }, + ) + + # Call the get_package method + response = client.get_package(name="mock_package_name") + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_package_guid" + assert response.metadata.name == "test_package" + + +def test_get_package_not_found(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + # Call the get_package method + with pytest.raises(Exception) as exc: + client.get_package(name="mock_package_name") + + # Validate the exception message + assert str(exc.value) == "not found" + + +def test_create_package_success(client, package_body, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": {"guid": "test_package_guid", "name": "test_package"}, + }, + ) + + # Call the create_package method + response = client.create_package(body=package_body) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_package_guid" + assert response.metadata.name == "test_package" diff --git a/tests/sync_tests/test_project.py b/tests/sync_tests/test_project.py new file mode 100644 index 0000000..b398052 --- /dev/null +++ b/tests/sync_tests/test_project.py @@ -0,0 +1,175 @@ +import httpx +import pytest +from munch import Munch +from pytest_mock import MockFixture + +from tests.data.mock_data import ( + mock_response_project, # noqa: F401 + project_body, +) # noqa: F401 +from tests.utils.fixtures import client # noqa: F401 + + +# Test function for list_projects +def test_list_projects_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-project", "guid": "mock_project_guid"}], + }, + ) + + # Call the list_projects method + response = client.list_projects() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test-project", "guid": "mock_project_guid"}] + + +def test_list_projects_unauthorized(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=401, + json={"error": "unauthorized permission access"}, + ) + + # Call the list_projects method + with pytest.raises(Exception) as exc: + client.list_projects() + + # Validate the exception message + assert str(exc.value) == "unauthorized permission access" + + +def test_list_projects_not_found(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + # Call the list_projects method + with pytest.raises(Exception) as exc: + client.list_projects() + + # Validate the exception message + assert str(exc.value) == "not found" + + +def test_get_project_success(client, mock_response_project, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "kind": "Project", + "metadata": {"guid": "test_project_guid", "name": "test_project"}, + }, + ) + + # Call the get_project method + response = client.get_project(project_guid="mock_project_guid") + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "test_project_guid" + + +def test_get_project_not_found(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "project not found"}, + ) + + # Call the get_project method + with pytest.raises(Exception) as exc: + client.get_project(project_guid="mock_project_guid") + + # Validate the exception message + assert str(exc.value) == "project not found" + + +def test_create_project_success(client, mock_response_project, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json=mock_response_project, + ) + + # Call the create_project method + response = client.create_project(body=project_body) + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "mock_project_guid" + + +def test_create_project_unauthorized(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=401, + json={"error": "unauthorized permission access"}, + ) + + # Call the create_project method + with pytest.raises(Exception) as exc: + client.create_project(body=project_body) + + # Validate the exception message + assert str(exc.value) == "unauthorized permission access" + + +def test_update_project_success(client, mock_response_project, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.put method + mock_put = mocker.patch("httpx.Client.put") + + # Set up the mock response + mock_put.return_value = httpx.Response( + status_code=200, + json=mock_response_project, + ) + + # Call the update_project method + response = client.update_project(project_guid="mock_project_guid", body=project_body) + + # Validate the response + assert isinstance(response, Munch) + assert response["metadata"]["guid"] == "mock_project_guid" + + +def test_delete_project_success(client, mock_response_project, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.delete method + mock_delete = mocker.patch("httpx.Client.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response(status_code=200, json={"success": True}) + + # Call the delete_project method + response = client.delete_project(project_guid="mock_project_guid") + + # Validate the response + assert response["success"] is True diff --git a/tests/sync_tests/test_secret.py b/tests/sync_tests/test_secret.py new file mode 100644 index 0000000..5d1e6ad --- /dev/null +++ b/tests/sync_tests/test_secret.py @@ -0,0 +1,138 @@ +import httpx +import pytest +from munch import Munch +from pytest_mock import MockFixture + +from tests.data.mock_data import secret_body # noqa: F401 +from tests.utils.fixtures import client # noqa: F401 + + +def test_list_secrets_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-secret", "guid": "mock_secret_guid"}], + }, + ) + + # Call the list_secrets method + response = client.list_secrets() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [{"name": "test-secret", "guid": "mock_secret_guid"}] + + +def test_list_secrets_not_found(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + with pytest.raises(Exception) as exc: + client.list_secrets() + + assert str(exc.value) == "not found" + + +def test_create_secret_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": {"guid": "test_secret_guid", "name": "test_secret"}, + }, + ) + + # Call the create_secret method + response = client.create_secret(secret_body) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_secret_guid" + + +def test_create_secret_already_exists(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=409, + json={"error": "secret already exists"}, + ) + + with pytest.raises(Exception) as exc: + client.create_secret(secret_body) + + assert str(exc.value) == "secret already exists" + + +def test_update_secret_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.put method + mock_put = mocker.patch("httpx.Client.put") + + # Set up the mock response + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_secret_guid", "name": "test_secret"}, + }, + ) + + # Call the update_secret method + response = client.update_secret("mock_secret_guid", body=secret_body) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_secret_guid" + + +def test_delete_secret_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.delete method + mock_delete = mocker.patch("httpx.Client.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_secret method + response = client.delete_secret("mock_secret_guid") + + # Validate the response + assert response == {"success": True} + + +def test_get_secret_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_secret_guid", "name": "test_secret"}, + }, + ) + + # Call the get_secret method + response = client.get_secret("mock_secret_guid") + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_secret_guid" + assert response.metadata.name == "test_secret" diff --git a/tests/sync_tests/test_staticroute.py b/tests/sync_tests/test_staticroute.py new file mode 100644 index 0000000..24bb1eb --- /dev/null +++ b/tests/sync_tests/test_staticroute.py @@ -0,0 +1,141 @@ +import httpx +import pytest +from munch import Munch +from pytest_mock import MockFixture + +from tests.data.mock_data import staticroute_body # noqa: F401 +from tests.utils.fixtures import client # noqa: F401 + + +def test_list_staticroutes_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock responses for pagination + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"continue": 1}, + "items": [{"name": "test-staticroute", "guid": "mock_staticroute_guid"}], + }, + ) + + # Call the list_staticroutes method + response = client.list_staticroutes() + + # Validate the response + assert isinstance(response, Munch) + assert response["items"] == [ + {"name": "test-staticroute", "guid": "mock_staticroute_guid"} + ] + + +def test_list_staticroutes_not_found(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=404, + json={"error": "not found"}, + ) + + with pytest.raises(Exception) as exc: + client.list_staticroutes() + + assert str(exc.value) == "not found" + + +def test_create_staticroute_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=201, + json={ + "metadata": {"guid": "test_staticroute_guid", "name": "test_staticroute"}, + }, + ) + + # Call the create_staticroute method + response = client.create_staticroute(body=staticroute_body) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_staticroute_guid" + + +def test_create_staticroute_bad_request(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.post method + mock_post = mocker.patch("httpx.Client.post") + + # Set up the mock response + mock_post.return_value = httpx.Response( + status_code=409, + json={"error": "already exists"}, + ) + + with pytest.raises(Exception) as exc: + client.create_staticroute(body=staticroute_body) + + assert str(exc.value) == "already exists" + + +def test_get_staticroute_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.get method + mock_get = mocker.patch("httpx.Client.get") + + # Set up the mock response + mock_get.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_staticroute_guid", "name": "test_staticroute"}, + }, + ) + + # Call the get_staticroute method + response = client.get_staticroute(name="mock_staticroute_name") + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_staticroute_guid" + + +def test_update_staticroute_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.put method + mock_put = mocker.patch("httpx.Client.put") + + # Set up the mock response + mock_put.return_value = httpx.Response( + status_code=200, + json={ + "metadata": {"guid": "test_staticroute_guid", "name": "test_staticroute"}, + }, + ) + + # Call the update_staticroute method + response = client.update_staticroute( + name="mock_staticroute_name", body=staticroute_body + ) + + # Validate the response + assert isinstance(response, Munch) + assert response.metadata.guid == "test_staticroute_guid" + + +def test_delete_staticroute_success(client, mocker: MockFixture): # noqa: F811 + # Mock the httpx.Client.delete method + mock_delete = mocker.patch("httpx.Client.delete") + + # Set up the mock response + mock_delete.return_value = httpx.Response( + status_code=204, + json={"success": True}, + ) + + # Call the delete_staticroute method + response = client.delete_staticroute(name="mock_staticroute_name") + + # Validate the response + assert response["success"] is True diff --git a/tests/utils/fixtures.py b/tests/utils/fixtures.py new file mode 100644 index 0000000..2ec867b --- /dev/null +++ b/tests/utils/fixtures.py @@ -0,0 +1,25 @@ +import pytest +from rapyuta_io_sdk_v2 import Client, AsyncClient + + +# Fixture to initialize the Client +@pytest.fixture +def client(): + client = Client() + client.config.hosts["v2api_host"] = "https://mock-api.rapyuta.io" + client.config.auth_token = "mock_token" + client.config.organization_guid = "mock_org_guid" + client.config.project_guid = "mock_project_guid" + client.config.environment = "mock" + return client + + +@pytest.fixture +def async_client(): + client = AsyncClient() + client.config.hosts["v2api_host"] = "https://mock-api.rapyuta.io" + client.config.auth_token = "mock_token" + client.config.organization_guid = "mock_org_guid" + client.config.project_guid = "mock_project_guid" + client.config.environment = "mock" + return client diff --git a/uv.lock b/uv.lock index 7196b19..6071ac1 100644 --- a/uv.lock +++ b/uv.lock @@ -16,6 +16,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1b/b4/f7e396030e3b11394436358ca258a81d6010106582422f23443c16ca1873/anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f", size = 89766 }, ] +[[package]] +name = "asyncmock" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mock" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c8/58/fa6b3147951a8d82cc78e628dffee0aa5838328c52ebfee4e0ddceb5d92b/asyncmock-0.4.2.tar.gz", hash = "sha256:c251889d542e98fe5f7ece2b5b8643b7d62b50a5657d34a4cbce8a1d5170d750", size = 3191 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/e3/873f433eca053c92d3cdb9336a379ee025bc1a86d4624ef87bf97a9ac7bc/asyncmock-0.4.2-py3-none-any.whl", hash = "sha256:fd8bc4e7813251a8959d1140924ccba3adbbc7af885dba7047c67f73c0b664b1", size = 4190 }, +] + [[package]] name = "certifi" version = "2024.8.30" @@ -34,6 +46,90 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, ] +[[package]] +name = "coverage" +version = "7.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/08/7e37f82e4d1aead42a7443ff06a1e406aabf7302c4f00a546e4b320b994c/coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d", size = 798791 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/61/eb7ce5ed62bacf21beca4937a90fe32545c91a3c8a42a30c6616d48fc70d/coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16", size = 206690 }, + { url = "https://files.pythonhosted.org/packages/7d/73/041928e434442bd3afde5584bdc3f932fb4562b1597629f537387cec6f3d/coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36", size = 207127 }, + { url = "https://files.pythonhosted.org/packages/c7/c8/6ca52b5147828e45ad0242388477fdb90df2c6cbb9a441701a12b3c71bc8/coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02", size = 235654 }, + { url = "https://files.pythonhosted.org/packages/d5/da/9ac2b62557f4340270942011d6efeab9833648380109e897d48ab7c1035d/coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc", size = 233598 }, + { url = "https://files.pythonhosted.org/packages/53/23/9e2c114d0178abc42b6d8d5281f651a8e6519abfa0ef460a00a91f80879d/coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23", size = 234732 }, + { url = "https://files.pythonhosted.org/packages/0f/7e/a0230756fb133343a52716e8b855045f13342b70e48e8ad41d8a0d60ab98/coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34", size = 233816 }, + { url = "https://files.pythonhosted.org/packages/28/7c/3753c8b40d232b1e5eeaed798c875537cf3cb183fb5041017c1fdb7ec14e/coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c", size = 232325 }, + { url = "https://files.pythonhosted.org/packages/57/e3/818a2b2af5b7573b4b82cf3e9f137ab158c90ea750a8f053716a32f20f06/coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959", size = 233418 }, + { url = "https://files.pythonhosted.org/packages/c8/fb/4532b0b0cefb3f06d201648715e03b0feb822907edab3935112b61b885e2/coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232", size = 209343 }, + { url = "https://files.pythonhosted.org/packages/5a/25/af337cc7421eca1c187cc9c315f0a755d48e755d2853715bfe8c418a45fa/coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0", size = 210136 }, + { url = "https://files.pythonhosted.org/packages/ad/5f/67af7d60d7e8ce61a4e2ddcd1bd5fb787180c8d0ae0fbd073f903b3dd95d/coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93", size = 206796 }, + { url = "https://files.pythonhosted.org/packages/e1/0e/e52332389e057daa2e03be1fbfef25bb4d626b37d12ed42ae6281d0a274c/coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3", size = 207244 }, + { url = "https://files.pythonhosted.org/packages/aa/cd/766b45fb6e090f20f8927d9c7cb34237d41c73a939358bc881883fd3a40d/coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff", size = 239279 }, + { url = "https://files.pythonhosted.org/packages/70/6c/a9ccd6fe50ddaf13442a1e2dd519ca805cbe0f1fcd377fba6d8339b98ccb/coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d", size = 236859 }, + { url = "https://files.pythonhosted.org/packages/14/6f/8351b465febb4dbc1ca9929505202db909c5a635c6fdf33e089bbc3d7d85/coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6", size = 238549 }, + { url = "https://files.pythonhosted.org/packages/68/3c/289b81fa18ad72138e6d78c4c11a82b5378a312c0e467e2f6b495c260907/coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56", size = 237477 }, + { url = "https://files.pythonhosted.org/packages/ed/1c/aa1efa6459d822bd72c4abc0b9418cf268de3f60eeccd65dc4988553bd8d/coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234", size = 236134 }, + { url = "https://files.pythonhosted.org/packages/fb/c8/521c698f2d2796565fe9c789c2ee1ccdae610b3aa20b9b2ef980cc253640/coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133", size = 236910 }, + { url = "https://files.pythonhosted.org/packages/7d/30/033e663399ff17dca90d793ee8a2ea2890e7fdf085da58d82468b4220bf7/coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c", size = 209348 }, + { url = "https://files.pythonhosted.org/packages/20/05/0d1ccbb52727ccdadaa3ff37e4d2dc1cd4d47f0c3df9eb58d9ec8508ca88/coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6", size = 210230 }, + { url = "https://files.pythonhosted.org/packages/7e/d4/300fc921dff243cd518c7db3a4c614b7e4b2431b0d1145c1e274fd99bd70/coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778", size = 206983 }, + { url = "https://files.pythonhosted.org/packages/e1/ab/6bf00de5327ecb8db205f9ae596885417a31535eeda6e7b99463108782e1/coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391", size = 207221 }, + { url = "https://files.pythonhosted.org/packages/92/8f/2ead05e735022d1a7f3a0a683ac7f737de14850395a826192f0288703472/coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8", size = 240342 }, + { url = "https://files.pythonhosted.org/packages/0f/ef/94043e478201ffa85b8ae2d2c79b4081e5a1b73438aafafccf3e9bafb6b5/coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d", size = 237371 }, + { url = "https://files.pythonhosted.org/packages/1f/0f/c890339dd605f3ebc269543247bdd43b703cce6825b5ed42ff5f2d6122c7/coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca", size = 239455 }, + { url = "https://files.pythonhosted.org/packages/d1/04/7fd7b39ec7372a04efb0f70c70e35857a99b6a9188b5205efb4c77d6a57a/coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163", size = 238924 }, + { url = "https://files.pythonhosted.org/packages/ed/bf/73ce346a9d32a09cf369f14d2a06651329c984e106f5992c89579d25b27e/coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a", size = 237252 }, + { url = "https://files.pythonhosted.org/packages/86/74/1dc7a20969725e917b1e07fe71a955eb34bc606b938316bcc799f228374b/coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d", size = 238897 }, + { url = "https://files.pythonhosted.org/packages/b6/e9/d9cc3deceb361c491b81005c668578b0dfa51eed02cd081620e9a62f24ec/coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5", size = 209606 }, + { url = "https://files.pythonhosted.org/packages/47/c8/5a2e41922ea6740f77d555c4d47544acd7dc3f251fe14199c09c0f5958d3/coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb", size = 210373 }, + { url = "https://files.pythonhosted.org/packages/8c/f9/9aa4dfb751cb01c949c990d136a0f92027fbcc5781c6e921df1cb1563f20/coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106", size = 207007 }, + { url = "https://files.pythonhosted.org/packages/b9/67/e1413d5a8591622a46dd04ff80873b04c849268831ed5c304c16433e7e30/coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9", size = 207269 }, + { url = "https://files.pythonhosted.org/packages/14/5b/9dec847b305e44a5634d0fb8498d135ab1d88330482b74065fcec0622224/coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c", size = 239886 }, + { url = "https://files.pythonhosted.org/packages/7b/b7/35760a67c168e29f454928f51f970342d23cf75a2bb0323e0f07334c85f3/coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a", size = 237037 }, + { url = "https://files.pythonhosted.org/packages/f7/95/d2fd31f1d638df806cae59d7daea5abf2b15b5234016a5ebb502c2f3f7ee/coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060", size = 239038 }, + { url = "https://files.pythonhosted.org/packages/6e/bd/110689ff5752b67924efd5e2aedf5190cbbe245fc81b8dec1abaffba619d/coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862", size = 238690 }, + { url = "https://files.pythonhosted.org/packages/d3/a8/08d7b38e6ff8df52331c83130d0ab92d9c9a8b5462f9e99c9f051a4ae206/coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388", size = 236765 }, + { url = "https://files.pythonhosted.org/packages/d6/6a/9cf96839d3147d55ae713eb2d877f4d777e7dc5ba2bce227167d0118dfe8/coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155", size = 238611 }, + { url = "https://files.pythonhosted.org/packages/74/e4/7ff20d6a0b59eeaab40b3140a71e38cf52547ba21dbcf1d79c5a32bba61b/coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a", size = 209671 }, + { url = "https://files.pythonhosted.org/packages/35/59/1812f08a85b57c9fdb6d0b383d779e47b6f643bc278ed682859512517e83/coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129", size = 210368 }, + { url = "https://files.pythonhosted.org/packages/9c/15/08913be1c59d7562a3e39fce20661a98c0a3f59d5754312899acc6cb8a2d/coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e", size = 207758 }, + { url = "https://files.pythonhosted.org/packages/c4/ae/b5d58dff26cade02ada6ca612a76447acd69dccdbb3a478e9e088eb3d4b9/coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962", size = 208035 }, + { url = "https://files.pythonhosted.org/packages/b8/d7/62095e355ec0613b08dfb19206ce3033a0eedb6f4a67af5ed267a8800642/coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb", size = 250839 }, + { url = "https://files.pythonhosted.org/packages/7c/1e/c2967cb7991b112ba3766df0d9c21de46b476d103e32bb401b1b2adf3380/coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704", size = 246569 }, + { url = "https://files.pythonhosted.org/packages/8b/61/a7a6a55dd266007ed3b1df7a3386a0d760d014542d72f7c2c6938483b7bd/coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b", size = 248927 }, + { url = "https://files.pythonhosted.org/packages/c8/fa/13a6f56d72b429f56ef612eb3bc5ce1b75b7ee12864b3bd12526ab794847/coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f", size = 248401 }, + { url = "https://files.pythonhosted.org/packages/75/06/0429c652aa0fb761fc60e8c6b291338c9173c6aa0f4e40e1902345b42830/coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223", size = 246301 }, + { url = "https://files.pythonhosted.org/packages/52/76/1766bb8b803a88f93c3a2d07e30ffa359467810e5cbc68e375ebe6906efb/coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3", size = 247598 }, + { url = "https://files.pythonhosted.org/packages/66/8b/f54f8db2ae17188be9566e8166ac6df105c1c611e25da755738025708d54/coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f", size = 210307 }, + { url = "https://files.pythonhosted.org/packages/9f/b0/e0dca6da9170aefc07515cce067b97178cefafb512d00a87a1c717d2efd5/coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657", size = 211453 }, + { url = "https://files.pythonhosted.org/packages/81/d0/d9e3d554e38beea5a2e22178ddb16587dbcbe9a1ef3211f55733924bf7fa/coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0", size = 206674 }, + { url = "https://files.pythonhosted.org/packages/38/ea/cab2dc248d9f45b2b7f9f1f596a4d75a435cb364437c61b51d2eb33ceb0e/coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a", size = 207101 }, + { url = "https://files.pythonhosted.org/packages/ca/6f/f82f9a500c7c5722368978a5390c418d2a4d083ef955309a8748ecaa8920/coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b", size = 236554 }, + { url = "https://files.pythonhosted.org/packages/a6/94/d3055aa33d4e7e733d8fa309d9adf147b4b06a82c1346366fc15a2b1d5fa/coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3", size = 234440 }, + { url = "https://files.pythonhosted.org/packages/e4/6e/885bcd787d9dd674de4a7d8ec83faf729534c63d05d51d45d4fa168f7102/coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de", size = 235889 }, + { url = "https://files.pythonhosted.org/packages/f4/63/df50120a7744492710854860783d6819ff23e482dee15462c9a833cc428a/coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6", size = 235142 }, + { url = "https://files.pythonhosted.org/packages/3a/5d/9d0acfcded2b3e9ce1c7923ca52ccc00c78a74e112fc2aee661125b7843b/coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569", size = 233805 }, + { url = "https://files.pythonhosted.org/packages/c4/56/50abf070cb3cd9b1dd32f2c88f083aab561ecbffbcd783275cb51c17f11d/coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989", size = 234655 }, + { url = "https://files.pythonhosted.org/packages/25/ee/b4c246048b8485f85a2426ef4abab88e48c6e80c74e964bea5cd4cd4b115/coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7", size = 209296 }, + { url = "https://files.pythonhosted.org/packages/5c/1c/96cf86b70b69ea2b12924cdf7cabb8ad10e6130eab8d767a1099fbd2a44f/coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8", size = 210137 }, + { url = "https://files.pythonhosted.org/packages/19/d3/d54c5aa83268779d54c86deb39c1c4566e5d45c155369ca152765f8db413/coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255", size = 206688 }, + { url = "https://files.pythonhosted.org/packages/a5/fe/137d5dca72e4a258b1bc17bb04f2e0196898fe495843402ce826a7419fe3/coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8", size = 207120 }, + { url = "https://files.pythonhosted.org/packages/78/5b/a0a796983f3201ff5485323b225d7c8b74ce30c11f456017e23d8e8d1945/coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2", size = 235249 }, + { url = "https://files.pythonhosted.org/packages/4e/e1/76089d6a5ef9d68f018f65411fcdaaeb0141b504587b901d74e8587606ad/coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a", size = 233237 }, + { url = "https://files.pythonhosted.org/packages/9a/6f/eef79b779a540326fee9520e5542a8b428cc3bfa8b7c8f1022c1ee4fc66c/coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc", size = 234311 }, + { url = "https://files.pythonhosted.org/packages/75/e1/656d65fb126c29a494ef964005702b012f3498db1a30dd562958e85a4049/coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004", size = 233453 }, + { url = "https://files.pythonhosted.org/packages/68/6a/45f108f137941a4a1238c85f28fd9d048cc46b5466d6b8dda3aba1bb9d4f/coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb", size = 231958 }, + { url = "https://files.pythonhosted.org/packages/9b/e7/47b809099168b8b8c72ae311efc3e88c8d8a1162b3ba4b8da3cfcdb85743/coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36", size = 232938 }, + { url = "https://files.pythonhosted.org/packages/52/80/052222ba7058071f905435bad0ba392cc12006380731c37afaf3fe749b88/coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c", size = 209352 }, + { url = "https://files.pythonhosted.org/packages/b8/d8/1b92e0b3adcf384e98770a00ca095da1b5f7b483e6563ae4eb5e935d24a1/coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca", size = 210153 }, + { url = "https://files.pythonhosted.org/packages/a5/2b/0354ed096bca64dc8e32a7cbcae28b34cb5ad0b1fe2125d6d99583313ac0/coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df", size = 198926 }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -152,6 +248,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2", size = 342341 }, ] +[[package]] +name = "pytest-asyncio" +version = "0.24.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/6d/c6cf50ce320cf8611df7a1254d86233b3df7cc07f9b5f5cbcb82e08aa534/pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276", size = 49855 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/31/6607dab48616902f76885dfcf62c08d929796fc3b2d2318faf9fd54dbed9/pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b", size = 18024 }, +] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/67/00efc8d11b630c56f15f4ad9c7f9223f1e5ec275aaae3fa9118c6a223ad2/pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857", size = 63042 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/3a/af5b4fa5961d9a1e6237b530eb87dd04aea6eb83da09d2a4073d81b54ccf/pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652", size = 21990 }, +] + [[package]] name = "pytest-mock" version = "3.14.0" @@ -170,21 +291,37 @@ version = "0.0.1" source = { editable = "." } dependencies = [ { name = "httpx" }, - { name = "mock" }, { name = "munch" }, +] + +[package.dev-dependencies] +dev = [ + { name = "asyncmock" }, + { name = "coverage" }, + { name = "mock" }, { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, { name = "pytest-mock" }, - { name = "tenacity" }, + { name = "typing-extensions" }, ] [package.metadata] requires-dist = [ { name = "httpx", specifier = ">=0.27.2" }, - { name = "mock", specifier = ">=5.1.0" }, { name = "munch", specifier = ">=4.0.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "asyncmock", specifier = ">=0.4.2" }, + { name = "coverage", specifier = ">=7.6.1" }, + { name = "mock", specifier = ">=5.1.0" }, { name = "pytest", specifier = ">=8.3.3" }, + { name = "pytest-asyncio", specifier = ">=0.24.0" }, + { name = "pytest-cov", specifier = ">=5.0.0" }, { name = "pytest-mock", specifier = ">=3.14.0" }, - { name = "tenacity", specifier = ">=9.0.0" }, + { name = "typing-extensions", specifier = ">=4.12.2" }, ] [[package]] @@ -196,15 +333,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, ] -[[package]] -name = "tenacity" -version = "9.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/94/91fccdb4b8110642462e653d5dcb27e7b674742ad68efd146367da7bdb10/tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b", size = 47421 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/cb/b86984bed139586d01532a587464b5805f12e397594f19f931c4c2fbfa61/tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539", size = 28169 }, -] - [[package]] name = "tomli" version = "2.0.2"