From 37cb76113f6fc86e3fd9a6b99ab1781ce6fd6838 Mon Sep 17 00:00:00 2001 From: mibe Date: Tue, 30 Apr 2024 09:29:45 +0100 Subject: [PATCH 01/14] #113 Added implementation for the SaaS Bucket. --- doc/changes/unreleased.md | 1 + exasol/bucketfs/__init__.py | 4 + exasol/bucketfs/_buckets.py | 94 ++++++- poetry.lock | 372 ++++++++++++--------------- pyproject.toml | 3 +- test/integration/bucketfs_test.py | 94 +++++++ test/integration/conftest.py | 91 ++++++- test/integration/test_bucket_path.py | 56 ++++ 8 files changed, 498 insertions(+), 217 deletions(-) diff --git a/doc/changes/unreleased.md b/doc/changes/unreleased.md index 2e2efb37..89aed9a8 100644 --- a/doc/changes/unreleased.md +++ b/doc/changes/unreleased.md @@ -27,6 +27,7 @@ Extracted bucket interface into BucketLike protocol. Implemented PathLike for buckets based on BucketLike protocol. Added a path factory function. + Added implementation of the BucketLike for the SaaS BucketFS. ## Internal diff --git a/exasol/bucketfs/__init__.py b/exasol/bucketfs/__init__.py index e0f81e15..bf91ee2b 100644 --- a/exasol/bucketfs/__init__.py +++ b/exasol/bucketfs/__init__.py @@ -48,6 +48,8 @@ from exasol.bucketfs._buckets import ( BucketLike, Bucket, + SaaSBucket, + MountedBucket, MappedBucket, ) from exasol.bucketfs._convert import ( @@ -64,6 +66,8 @@ "Service", "BucketLike", "Bucket", + "SaaSBucket", + "MountedBucket", "MappedBucket", "BucketFsError", "path", diff --git a/exasol/bucketfs/_buckets.py b/exasol/bucketfs/_buckets.py index b402f461..d11d2ae3 100644 --- a/exasol/bucketfs/_buckets.py +++ b/exasol/bucketfs/_buckets.py @@ -18,6 +18,13 @@ from requests import HTTPError from requests.auth import HTTPBasicAuth +from exasol.saas.client.openapi.client import AuthenticatedClient as SaasAuthenticatedClient +from exasol.saas.client.openapi.models.file import File as SaasFile +from exasol.saas.client.openapi.api.files.list_files import sync as saas_list_files +from exasol.saas.client.openapi.api.files.delete_file import sync_detailed as saas_delete_file +from exasol.saas.client.openapi.api.files.upload_file import sync_detailed as saas_upload_file +from exasol.saas.client.openapi.api.files.download_file import sync_detailed as saas_download_file + from exasol.bucketfs._error import BucketFsError from exasol.bucketfs._logging import LOGGER from exasol.bucketfs._shared import ( @@ -221,6 +228,7 @@ def delete(self, path) -> None: url = _build_url(service_url=self._service, bucket=self.name, path=path) LOGGER.info(f"Deleting {path} from bucket {self.name}.") response = requests.delete(url, auth=self._auth, verify=self._verify) + try: response.raise_for_status() except HTTPError as ex: @@ -252,12 +260,16 @@ def download(self, path: str, chunk_size: int = 8192) -> Iterable[ByteString]: yield from response.iter_content(chunk_size=chunk_size) +def _to_path_in_url(path: str) -> str: + return path.replace('/', '%2F') + + class SaaSBucket: def __init__(self, url: str, account_id: str, database_id: str, pat: str) -> None: self._url = url self._account_id = account_id - self.database_id = database_id + self._database_id = database_id self._pat = pat @property @@ -268,24 +280,86 @@ def name(self) -> str: def udf_path(self) -> str: return f'/buckets/uploads/{self.name}' + @property def files(self) -> Iterable[str]: - """To be provided""" - raise NotImplementedError() + LOGGER.info("Retrieving the bucket listing.") + with SaasAuthenticatedClient(base_url=self._url, + token=self._pat, + raise_on_unexpected_status=True) as client: + content = saas_list_files(account_id=self._account_id, + database_id=self._database_id, + client=client) + + file_list: list[str] = [] + + def recursive_file_collector(node: SaasFile) -> None: + if node.children: + for child in node.children: + recursive_file_collector(child) + else: + file_list.append(node.path) + + for root_node in content: + recursive_file_collector(root_node) + + return file_list def delete(self, path: str) -> None: - """To be provided""" - raise NotImplementedError() + LOGGER.info(f"Deleting {path} from the bucket.") + with SaasAuthenticatedClient(base_url=self._url, + token=self._pat, + raise_on_unexpected_status=True) as client: + saas_delete_file(account_id=self._account_id, + database_id=self._database_id, + key=_to_path_in_url(path), + client=client) def upload(self, path: str, data: ByteString | BinaryIO) -> None: - """To be provided""" - raise NotImplementedError() + LOGGER.info(f"Uploading {path} to the bucket.") + # Q. The service can handle any characters in the path. + # Do we need to check this path for presence of characters deemed + # invalid in the BucketLike protocol? + with SaasAuthenticatedClient(base_url=self._url, + token=self._pat, + raise_on_unexpected_status=False) as client: + response = saas_upload_file(account_id=self._account_id, + database_id=self._database_id, + key=_to_path_in_url(path), + client=client) + if response.status_code >= 400: + # Q. Is it the right type of exception? + raise RuntimeError(f'Request for a presigned url to upload the file {path} ' + f'failed with the status code {response.status_code}') + upload_url = response.parsed.url.replace(r'\u0026', '&') + + response = requests.put(upload_url, data=data) + response.raise_for_status() def download(self, path: str, chunk_size: int = 8192) -> Iterable[ByteString]: - """To be provided""" - raise NotImplementedError() + LOGGER.info(f"Downloading {path} from the bucket.") + with SaasAuthenticatedClient(base_url=self._url, + token=self._pat, + raise_on_unexpected_status=False) as client: + response = saas_download_file(account_id=self._account_id, + database_id=self._database_id, + key=_to_path_in_url(path), + client=client) + if response.status_code == 404: + raise BucketFsError("The file {path} doesn't exist in the SaaS BucketFs.") + elif response.status_code >= 400: + # Q. Is it the right type of exception? + raise RuntimeError(f'Request for a presigned url to download the file {path} ' + f'failed with the status code {response.status_code}') + download_url = response.parsed.url.replace(r'\u0026', '&') + + response = requests.get(download_url, stream=True) + response.raise_for_status() + for chunk in response.iter_content(chunk_size=chunk_size): + if chunk: + yield chunk def __str__(self): - return f"SaaSBucket<{self.name} | on: {self._url}>" + return f"SaaSBucket" class MountedBucket: diff --git a/poetry.lock b/poetry.lock index fa11a9f7..7b0069fb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -16,7 +15,6 @@ files = [ name = "argcomplete" version = "2.1.2" description = "Bash tab completion for argparse" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -32,7 +30,6 @@ test = ["coverage", "flake8", "mypy", "pexpect", "wheel"] name = "astroid" version = "3.1.0" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -47,7 +44,6 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} name = "babel" version = "2.14.0" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -65,7 +61,6 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "beautifulsoup4" version = "4.12.3" description = "Screen-scraping library" -category = "dev" optional = false python-versions = ">=3.6.0" files = [ @@ -87,7 +82,6 @@ lxml = ["lxml"] name = "black" version = "23.12.1" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -134,7 +128,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "certifi" version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -146,7 +139,6 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -211,7 +203,6 @@ pycparser = "*" name = "cfgv" version = "3.4.0" description = "Validate configuration and produce human readable error messages." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -223,7 +214,6 @@ files = [ name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -323,7 +313,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -338,7 +327,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -350,7 +338,6 @@ files = [ name = "colorlog" version = "6.8.2" description = "Add colours to the output of Python's logging module." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -366,64 +353,63 @@ development = ["black", "flake8", "mypy", "pytest", "types-colorama"] [[package]] name = "coverage" -version = "7.4.4" +version = "7.5.0" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, - {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, - {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, - {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, - {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, - {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, - {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, - {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, - {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, - {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, - {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, - {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, - {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, - {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, - {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, - {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, - {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, - {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, - {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, - {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, - {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, - {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, - {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, - {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, - {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, - {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, - {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, - {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, + {file = "coverage-7.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:432949a32c3e3f820af808db1833d6d1631664d53dd3ce487aa25d574e18ad1c"}, + {file = "coverage-7.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2bd7065249703cbeb6d4ce679c734bef0ee69baa7bff9724361ada04a15b7e3b"}, + {file = "coverage-7.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbfe6389c5522b99768a93d89aca52ef92310a96b99782973b9d11e80511f932"}, + {file = "coverage-7.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39793731182c4be939b4be0cdecde074b833f6171313cf53481f869937129ed3"}, + {file = "coverage-7.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85a5dbe1ba1bf38d6c63b6d2c42132d45cbee6d9f0c51b52c59aa4afba057517"}, + {file = "coverage-7.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:357754dcdfd811462a725e7501a9b4556388e8ecf66e79df6f4b988fa3d0b39a"}, + {file = "coverage-7.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a81eb64feded34f40c8986869a2f764f0fe2db58c0530d3a4afbcde50f314880"}, + {file = "coverage-7.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:51431d0abbed3a868e967f8257c5faf283d41ec882f58413cf295a389bb22e58"}, + {file = "coverage-7.5.0-cp310-cp310-win32.whl", hash = "sha256:f609ebcb0242d84b7adeee2b06c11a2ddaec5464d21888b2c8255f5fd6a98ae4"}, + {file = "coverage-7.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:6782cd6216fab5a83216cc39f13ebe30adfac2fa72688c5a4d8d180cd52e8f6a"}, + {file = "coverage-7.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e768d870801f68c74c2b669fc909839660180c366501d4cc4b87efd6b0eee375"}, + {file = "coverage-7.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:84921b10aeb2dd453247fd10de22907984eaf80901b578a5cf0bb1e279a587cb"}, + {file = "coverage-7.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:710c62b6e35a9a766b99b15cdc56d5aeda0914edae8bb467e9c355f75d14ee95"}, + {file = "coverage-7.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c379cdd3efc0658e652a14112d51a7668f6bfca7445c5a10dee7eabecabba19d"}, + {file = "coverage-7.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fea9d3ca80bcf17edb2c08a4704259dadac196fe5e9274067e7a20511fad1743"}, + {file = "coverage-7.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:41327143c5b1d715f5f98a397608f90ab9ebba606ae4e6f3389c2145410c52b1"}, + {file = "coverage-7.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:565b2e82d0968c977e0b0f7cbf25fd06d78d4856289abc79694c8edcce6eb2de"}, + {file = "coverage-7.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cf3539007202ebfe03923128fedfdd245db5860a36810136ad95a564a2fdffff"}, + {file = "coverage-7.5.0-cp311-cp311-win32.whl", hash = "sha256:bf0b4b8d9caa8d64df838e0f8dcf68fb570c5733b726d1494b87f3da85db3a2d"}, + {file = "coverage-7.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c6384cc90e37cfb60435bbbe0488444e54b98700f727f16f64d8bfda0b84656"}, + {file = "coverage-7.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fed7a72d54bd52f4aeb6c6e951f363903bd7d70bc1cad64dd1f087980d309ab9"}, + {file = "coverage-7.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cbe6581fcff7c8e262eb574244f81f5faaea539e712a058e6707a9d272fe5b64"}, + {file = "coverage-7.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad97ec0da94b378e593ef532b980c15e377df9b9608c7c6da3506953182398af"}, + {file = "coverage-7.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd4bacd62aa2f1a1627352fe68885d6ee694bdaebb16038b6e680f2924a9b2cc"}, + {file = "coverage-7.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adf032b6c105881f9d77fa17d9eebe0ad1f9bfb2ad25777811f97c5362aa07f2"}, + {file = "coverage-7.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ba01d9ba112b55bfa4b24808ec431197bb34f09f66f7cb4fd0258ff9d3711b1"}, + {file = "coverage-7.5.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f0bfe42523893c188e9616d853c47685e1c575fe25f737adf473d0405dcfa7eb"}, + {file = "coverage-7.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a9a7ef30a1b02547c1b23fa9a5564f03c9982fc71eb2ecb7f98c96d7a0db5cf2"}, + {file = "coverage-7.5.0-cp312-cp312-win32.whl", hash = "sha256:3c2b77f295edb9fcdb6a250f83e6481c679335ca7e6e4a955e4290350f2d22a4"}, + {file = "coverage-7.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:427e1e627b0963ac02d7c8730ca6d935df10280d230508c0ba059505e9233475"}, + {file = "coverage-7.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9dd88fce54abbdbf4c42fb1fea0e498973d07816f24c0e27a1ecaf91883ce69e"}, + {file = "coverage-7.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a898c11dca8f8c97b467138004a30133974aacd572818c383596f8d5b2eb04a9"}, + {file = "coverage-7.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07dfdd492d645eea1bd70fb1d6febdcf47db178b0d99161d8e4eed18e7f62fe7"}, + {file = "coverage-7.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3d117890b6eee85887b1eed41eefe2e598ad6e40523d9f94c4c4b213258e4a4"}, + {file = "coverage-7.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6afd2e84e7da40fe23ca588379f815fb6dbbb1b757c883935ed11647205111cb"}, + {file = "coverage-7.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a9960dd1891b2ddf13a7fe45339cd59ecee3abb6b8326d8b932d0c5da208104f"}, + {file = "coverage-7.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ced268e82af993d7801a9db2dbc1d2322e786c5dc76295d8e89473d46c6b84d4"}, + {file = "coverage-7.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e7c211f25777746d468d76f11719e64acb40eed410d81c26cefac641975beb88"}, + {file = "coverage-7.5.0-cp38-cp38-win32.whl", hash = "sha256:262fffc1f6c1a26125d5d573e1ec379285a3723363f3bd9c83923c9593a2ac25"}, + {file = "coverage-7.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:eed462b4541c540d63ab57b3fc69e7d8c84d5957668854ee4e408b50e92ce26a"}, + {file = "coverage-7.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0194d654e360b3e6cc9b774e83235bae6b9b2cac3be09040880bb0e8a88f4a1"}, + {file = "coverage-7.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33c020d3322662e74bc507fb11488773a96894aa82a622c35a5a28673c0c26f5"}, + {file = "coverage-7.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbdf2cae14a06827bec50bd58e49249452d211d9caddd8bd80e35b53cb04631"}, + {file = "coverage-7.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3235d7c781232e525b0761730e052388a01548bd7f67d0067a253887c6e8df46"}, + {file = "coverage-7.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2de4e546f0ec4b2787d625e0b16b78e99c3e21bc1722b4977c0dddf11ca84e"}, + {file = "coverage-7.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0e206259b73af35c4ec1319fd04003776e11e859936658cb6ceffdeba0f5be"}, + {file = "coverage-7.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2055c4fb9a6ff624253d432aa471a37202cd8f458c033d6d989be4499aed037b"}, + {file = "coverage-7.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:075299460948cd12722a970c7eae43d25d37989da682997687b34ae6b87c0ef0"}, + {file = "coverage-7.5.0-cp39-cp39-win32.whl", hash = "sha256:280132aada3bc2f0fac939a5771db4fbb84f245cb35b94fae4994d4c1f80dae7"}, + {file = "coverage-7.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:c58536f6892559e030e6924896a44098bc1290663ea12532c78cef71d0df8493"}, + {file = "coverage-7.5.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:2b57780b51084d5223eee7b59f0d4911c31c16ee5aa12737c7a02455829ff067"}, + {file = "coverage-7.5.0.tar.gz", hash = "sha256:cf62d17310f34084c59c01e027259076479128d11e4661bb6c9acb38c5e19bb8"}, ] [package.extras] @@ -433,7 +419,6 @@ toml = ["tomli"] name = "cryptography" version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -488,7 +473,6 @@ test-randomorder = ["pytest-randomly"] name = "dill" version = "0.3.8" description = "serialize all of Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -504,7 +488,6 @@ profile = ["gprof2dot (>=2022.7.29)"] name = "distlib" version = "0.3.8" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" files = [ @@ -516,7 +499,6 @@ files = [ name = "docutils" version = "0.19" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -528,7 +510,6 @@ files = [ name = "exasol-toolbox" version = "0.8.0" description = "" -category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -557,7 +538,6 @@ typer = {version = ">=0.7.0", extras = ["all"]} name = "exasol-udf-mock-python" version = "0.1.0" description = "Mocking framework for Exasol Python UDFs" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -572,14 +552,13 @@ pandas = ">=1.4,<2.0" [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.1" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, ] [package.extras] @@ -589,7 +568,6 @@ test = ["pytest (>=6)"] name = "filelock" version = "3.13.4" description = "A platform independent file lock." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -606,7 +584,6 @@ typing = ["typing-extensions (>=4.8)"] name = "furo" version = "2022.12.7" description = "A clean customisable Sphinx documentation theme." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -622,14 +599,13 @@ sphinx-basic-ng = "*" [[package]] name = "identify" -version = "2.5.35" +version = "2.5.36" description = "File identification library for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, - {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, + {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, + {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, ] [package.extras] @@ -639,7 +615,6 @@ license = ["ukkonen"] name = "idna" version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -647,11 +622,21 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] +[[package]] +name = "ifaddr" +version = "0.2.0" +description = "Cross-platform network interface and IP address enumeration library" +optional = false +python-versions = "*" +files = [ + {file = "ifaddr-0.2.0-py3-none-any.whl", hash = "sha256:085e0305cfe6f16ab12d72e2024030f5d52674afad6911bb1eee207177b8a748"}, + {file = "ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4"}, +] + [[package]] name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -663,7 +648,6 @@ files = [ name = "importlib-metadata" version = "7.1.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -683,7 +667,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", name = "importlib-resources" version = "6.4.0" description = "Read resources from Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -702,7 +685,6 @@ testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "p name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -714,7 +696,6 @@ files = [ name = "isort" version = "5.13.2" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -729,7 +710,6 @@ colors = ["colorama (>=0.4.6)"] name = "jinja2" version = "3.1.3" description = "A very fast and expressive template engine." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -747,7 +727,6 @@ i18n = ["Babel (>=2.7)"] name = "joblib" version = "1.4.0" description = "Lightweight pipelining with Python functions" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -759,7 +738,6 @@ files = [ name = "markdown-it-py" version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -784,7 +762,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -854,7 +831,6 @@ files = [ name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -866,7 +842,6 @@ files = [ name = "mdit-py-plugins" version = "0.3.5" description = "Collection of plugins for markdown-it-py" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -886,7 +861,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -896,39 +870,38 @@ files = [ [[package]] name = "mypy" -version = "1.9.0" +version = "1.10.0" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, - {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, - {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, - {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, - {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, - {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, - {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, - {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, - {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, - {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, - {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, - {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, - {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, - {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, - {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, - {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, - {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, - {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, - {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, + {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, + {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, + {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, + {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, + {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, + {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, + {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, + {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, + {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, + {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, + {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, + {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, + {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, + {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, + {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, + {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, + {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, + {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, + {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, ] [package.dependencies] @@ -946,7 +919,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -958,7 +930,6 @@ files = [ name = "myst-parser" version = "0.18.1" description = "An extended commonmark compliant parser, with bridges to docutils & sphinx." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -985,7 +956,6 @@ testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=6,<7)", "pytest-cov", name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" -category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -1000,7 +970,6 @@ setuptools = "*" name = "nox" version = "2022.11.21" description = "Flexible test automation." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1021,7 +990,6 @@ tox-to-nox = ["jinja2", "tox"] name = "numpy" version = "1.24.4" description = "Fundamental package for array computing in Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1059,7 +1027,6 @@ files = [ name = "packaging" version = "24.0" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1071,7 +1038,6 @@ files = [ name = "pandas" version = "1.5.3" description = "Powerful data structures for data analysis, time series, and statistics" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1107,8 +1073,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, ] python-dateutil = ">=2.8.1" pytz = ">=2020.1" @@ -1120,7 +1086,6 @@ test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1130,30 +1095,29 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" +version = "4.2.1" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.2.1-py3-none-any.whl", hash = "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"}, + {file = "platformdirs-4.2.1.tar.gz", hash = "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -1164,7 +1128,6 @@ testing = ["pytest", "pytest-benchmark"] name = "pre-commit" version = "3.5.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1183,7 +1146,6 @@ virtualenv = ">=20.10.0" name = "prysk" version = "0.19.0" description = "Functional tests for command line applications" -category = "dev" optional = false python-versions = "<4.0.0,>=3.8" files = [ @@ -1201,7 +1163,6 @@ pytest-plugin = ["pytest-prysk (>=0.2.0,<0.3.0)"] name = "pyasn1" version = "0.6.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1213,7 +1174,6 @@ files = [ name = "pycparser" version = "2.22" description = "C parser in Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1223,17 +1183,17 @@ files = [ [[package]] name = "pyexasol" -version = "0.24.0" +version = "0.25.2" description = "Exasol python driver with extra features" -category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "pyexasol-0.24.0-py3-none-any.whl", hash = "sha256:ec39abd065794d3a1a6a2686cfc2f8b39c30b90382478b468d419b12e410fe2b"}, - {file = "pyexasol-0.24.0.tar.gz", hash = "sha256:284138e599575a4edbcc251789595138c9a34826eca2bf6d8bad45fc063bc0e1"}, + {file = "pyexasol-0.25.2-py3-none-any.whl", hash = "sha256:54be5c75f0867a4838b84b5b5a37466c33fa9b1ca6bf51d9c3d821d367936e6e"}, + {file = "pyexasol-0.25.2.tar.gz", hash = "sha256:3b42cb2c32b7b2ffe7a78c82bf21c3a391043758f2a575c48460252a72386691"}, ] [package.dependencies] +packaging = "*" pyopenssl = "*" rsa = "*" websocket-client = ">=1.0.1" @@ -1249,7 +1209,6 @@ ujson = ["ujson"] name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1265,7 +1224,6 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pylint" version = "3.1.0" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1278,8 +1236,8 @@ astroid = ">=3.1.0,<=3.2.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" mccabe = ">=0.6,<0.8" @@ -1296,7 +1254,6 @@ testutils = ["gitpython (>3)"] name = "pyopenssl" version = "24.1.0" description = "Python wrapper module around the OpenSSL library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1315,7 +1272,6 @@ test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1338,7 +1294,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-localserver" version = "0.8.1" description = "pytest plugin to test server connections locally." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1356,7 +1311,6 @@ smtp = ["aiosmtpd"] name = "python-dateutil" version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -1371,7 +1325,6 @@ six = ">=1.5" name = "pytz" version = "2024.1" description = "World timezone definitions, modern and historical" -category = "dev" optional = false python-versions = "*" files = [ @@ -1383,7 +1336,6 @@ files = [ name = "pyupgrade" version = "3.8.0" description = "A tool to automatically upgrade syntax for newer versions." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1398,7 +1350,6 @@ tokenize-rt = ">=3.2.0" name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1407,6 +1358,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1414,8 +1366,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1432,6 +1391,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1439,6 +1399,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1448,7 +1409,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1470,7 +1430,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rich" version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -1490,7 +1449,6 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -category = "dev" optional = false python-versions = ">=3.6,<4" files = [ @@ -1501,28 +1459,42 @@ files = [ [package.dependencies] pyasn1 = ">=0.1.3" +[[package]] +name = "saas-api" +version = "0.2.0" +description = "API enabling Python applications connecting to Exasol database SaaS instances and using their SaaS services" +optional = false +python-versions = "<4.0,>=3.8.0" +files = [ + {file = "saas_api-0.2.0-py3-none-any.whl", hash = "sha256:aab26c84057e78247a9a7ba2c4b71c289ca2740490d4c26fa51c946e094ef1c6"}, + {file = "saas_api-0.2.0.tar.gz", hash = "sha256:03bb3a4b41c16e5b4214348e505558817c7894991fd3106585238a92c8df2965"}, +] + +[package.dependencies] +ifaddr = ">=0.2.0,<0.3.0" +requests = ">=2.31.0,<3.0.0" +types-requests = ">=2.31.0.6,<3.0.0.0" + [[package]] name = "setuptools" -version = "69.2.0" +version = "69.5.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, - {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "shellingham" version = "1.5.4" description = "Tool to Detect Surrounding Shell" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1534,7 +1506,6 @@ files = [ name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1546,7 +1517,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -1558,7 +1528,6 @@ files = [ name = "soupsieve" version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1570,7 +1539,6 @@ files = [ name = "sphinx" version = "5.3.0" description = "Python documentation generator" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1606,7 +1574,6 @@ test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] name = "sphinx-basic-ng" version = "1.0.0b2" description = "A modern skeleton for Sphinx themes." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1624,7 +1591,6 @@ docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-ta name = "sphinx-copybutton" version = "0.5.2" description = "Add a copy button to each of your code cells." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1643,7 +1609,6 @@ rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] name = "sphinxcontrib-applehelp" version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1659,7 +1624,6 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1675,7 +1639,6 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1691,7 +1654,6 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1706,7 +1668,6 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1722,7 +1683,6 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1738,7 +1698,6 @@ test = ["pytest"] name = "tokenize-rt" version = "5.2.0" description = "A wrapper around the stdlib `tokenize` which roundtrips." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1750,7 +1709,6 @@ files = [ name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1762,7 +1720,6 @@ files = [ name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1774,7 +1731,6 @@ files = [ name = "tomlkit" version = "0.12.4" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1786,7 +1742,6 @@ files = [ name = "typeguard" version = "4.0.0" description = "Run-time type checker for Python" -category = "main" optional = false python-versions = ">=3.7.4" files = [ @@ -1806,7 +1761,6 @@ test = ["mypy (>=1.2.0)", "pytest (>=7)"] name = "typer" version = "0.12.3" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1820,11 +1774,24 @@ rich = ">=10.11.0" shellingham = ">=1.3.0" typing-extensions = ">=3.7.4.3" +[[package]] +name = "types-requests" +version = "2.31.0.20240406" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"}, + {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"}, +] + +[package.dependencies] +urllib3 = ">=2" + [[package]] name = "typing-extensions" version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1836,7 +1803,6 @@ files = [ name = "urllib3" version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1852,14 +1818,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.25.1" +version = "20.26.0" description = "Virtual Python Environment builder" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, - {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, + {file = "virtualenv-20.26.0-py3-none-any.whl", hash = "sha256:0846377ea76e818daaa3e00a4365c018bc3ac9760cbb3544de542885aad61fb3"}, + {file = "virtualenv-20.26.0.tar.gz", hash = "sha256:ec25a9671a5102c8d2657f62792a27b48f016664c6873f6beed3800008577210"}, ] [package.dependencies] @@ -1868,23 +1833,22 @@ filelock = ">=3.12.2,<4" platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "websocket-client" -version = "1.7.0" +version = "1.8.0" description = "WebSocket client for Python with low level API options" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, - {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, ] [package.extras] -docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] @@ -1892,7 +1856,6 @@ test = ["websockets"] name = "werkzeug" version = "3.0.2" description = "The comprehensive WSGI web application library." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1910,7 +1873,6 @@ watchdog = ["watchdog (>=2.3)"] name = "zipp" version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1925,4 +1887,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.8,<4.0" -content-hash = "05d72eb6fbef9f24ffecbf1225dee840a747f4e52032388b2ad3a04ade548467" +content-hash = "209db6a037ba462efc3e72fa8352ce4d131bf47576be5939a76e84ca370e3315" diff --git a/pyproject.toml b/pyproject.toml index ff7d3770..5fbf8cd2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,10 +33,11 @@ python = ">=3.8,<4.0" requests = ">=2.24.0" joblib=">=1.0.1" typeguard = "4.0.0" +saas-api=">=0.2.0" [tool.poetry.dev-dependencies] -pyexasol = "^0.24.0" +pyexasol = "^0.25.2" dill = "^0.3.4" exasol-udf-mock-python = "^0.1.0" toml = ">=0.10.2" diff --git a/test/integration/bucketfs_test.py b/test/integration/bucketfs_test.py index 80a253e4..c4ac8398 100644 --- a/test/integration/bucketfs_test.py +++ b/test/integration/bucketfs_test.py @@ -21,6 +21,7 @@ from exasol.bucketfs import ( Bucket, Service, + SaaSBucket, as_bytes, as_string, ) @@ -308,3 +309,96 @@ def test_any_log_message_get_emitted(httpserver, caplog): ] # The log level DEBUG should emit at least one log message assert log_records + + +def test_write_bytes_to_saas_bucket(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id): + """ + Uploads some bytes into a SaaS bucket file and checks that the file is listed + in the SaaS BucketFS. + """ + bucket = SaaSBucket(url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + + file_name = 'bucketfs_test/test_write_bytes_to_saas_bucket/the_file.dat' + bucket.upload(path=file_name, data=b'abcd12345') + assert file_name in bucket.files + + +def test_write_file_to_saas_bucket(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id, + tmpdir): + """ + Uploads a file from a local file system into a SaaS bucket and checks that + the file is listed in the SaaS BucketFS. + """ + bucket = SaaSBucket(url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + + tmp_file = tmpdir / 'the_file.dat' + tmp_file.write_binary(b'abcd12345') + file_name = 'bucketfs_test/test_write_file_to_saas_bucket/the_file.dat' + with open(tmp_file, 'rb') as f: + bucket.upload(path=file_name, data=f) + assert file_name in bucket.files + + +def test_read_bytes_from_saas_bucket(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id): + """ + Uploads some bytes into a SaaS bucket file, reads them back and checks that + they are unchanged. + """ + bucket = SaaSBucket(url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + + file_name = 'bucketfs_test/test_read_bytes_from_saas_bucket/the_file.dat' + content = b'A string long enough to be downloaded in chunks.' + bucket.upload(path=file_name, data=content) + received_content = b''.join(bucket.download(file_name, chunk_size=20)) + assert received_content == content + + +def test_read_file_from_saas_bucket(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id, + tmpdir): + """ + Uploads a file from a local file system into a SaaS bucket, reads its content + back and checks that it's unchanged. + """ + bucket = SaaSBucket(url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + + content = b'A string long enough to be downloaded in chunks.' + tmp_file = tmpdir / 'the_file.dat' + tmp_file.write_binary(content) + file_name = 'bucketfs_test/test_read_file_from_saas_bucket/the_file.dat' + with open(tmp_file, 'rb') as f: + bucket.upload(path=file_name, data=f) + received_content = b''.join(bucket.download(file_name, chunk_size=20)) + assert received_content == content + + +def test_delete_file_from_saas_bucket(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id): + """ + Creates a SaaS bucket file, then deletes it and checks that it is not listed + in the SaaS BucketFS. + """ + bucket = SaaSBucket(url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + + file_name = 'bucketfs_test/test_delete_file_from_saas_bucket/the_file.dat' + bucket.upload(path=file_name, data=b'abcd12345') + bucket.delete(file_name) + assert file_name not in bucket.files diff --git a/test/integration/conftest.py b/test/integration/conftest.py index 01691c8b..0ddf6ffd 100644 --- a/test/integration/conftest.py +++ b/test/integration/conftest.py @@ -7,12 +7,20 @@ Tuple, Union, ) -from exasol.bucketfs._shared import _build_url +import os +import time import pytest import requests from requests.auth import HTTPBasicAuth +from exasol.bucketfs._shared import _build_url +from exasol.saas.client import openapi +from exasol.saas.client.openapi.api.databases.create_database import sync as create_saas_database +from exasol.saas.client.openapi.api.databases.delete_database import sync_detailed as delete_saas_database +from exasol.saas.client.openapi.api.databases.get_database import sync as get_saas_database +from exasol.saas.client.openapi.models.status import Status as SaasStatus + def upload_file( service: str, @@ -124,3 +132,84 @@ def temporary_bucket_files(request) -> Tuple[str, Iterable[File]]: options.bucketfs_password, file.name, ) + + +def create_saas_test_client(url: str, token: str, raise_on_unexpected_status: bool = True): + return openapi.AuthenticatedClient( + base_url=url, + token=token, + raise_on_unexpected_status=raise_on_unexpected_status + ) + + +def create_saas_test_database(account_id, client): + cluster_spec = openapi.models.CreateCluster( + name="my-cluster", + size="XS", + ) + database_spec = openapi.models.CreateDatabase( + name=f"pytest-created-db", + initial_cluster=cluster_spec, + provider="aws", + region='us-east-1', + ) + return create_saas_database( + account_id=account_id, + body=database_spec, + client=client + ) + + +@pytest.fixture(scope='session') +def saas_test_service_url() -> str: + return os.environ["SAAS_HOST"] + + +@pytest.fixture(scope='session') +def saas_test_token() -> str: + return os.environ["SAAS_PAT"] + + +@pytest.fixture(scope='session') +def saas_test_account_id() -> str: + return os.environ["SAAS_ACCOUNT_ID"] + + +@pytest.fixture(scope='session') +def saas_test_database_id(saas_test_service_url, saas_test_token, saas_test_account_id) -> str: + with create_saas_test_client( + url=saas_test_service_url, + token=saas_test_token + ) as client: + try: + db = create_saas_test_database( + account_id=saas_test_account_id, + client=client + ) + + # Wait till the database gets to the running state. + sleep_time = 600 + small_interval = 20 + max_wait_time = 2400 + max_cycles = 1 + (max_wait_time - sleep_time) // small_interval + for _ in range(max_cycles): + time.sleep(sleep_time) + db = get_saas_database( + account_id=saas_test_account_id, + database_id=db.id, + client=client + ) + if db.status == SaasStatus.RUNNING: + break + sleep_time = 30 + else: + raise RuntimeError(f'Test SaaS database status is {db.status} ' + f'after {max_wait_time} seconds.') + yield db.id + finally: + if db is not None: + delete_saas_database( + account_id=saas_test_account_id, + database_id=db.id, + client=client + ) diff --git a/test/integration/test_bucket_path.py b/test/integration/test_bucket_path.py index 5982e733..82f94fac 100644 --- a/test/integration/test_bucket_path.py +++ b/test/integration/test_bucket_path.py @@ -109,3 +109,59 @@ def test_write_delete_onprem(test_config, children_poem, classic_poem): test_config.password, str(poem_path) ) + + +def test_write_read_back_saas(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id, + children_poem): + + base_path = bfs.path.build_path(backend=bfs.path.StorageBackend.saas, + url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + file_name = 'test_bucket_path/test_write_read_back_saas/little_star.txt' + poem_path = base_path / file_name + + poem_path.write(children_poem) + data_back = b''.join(poem_path.read(20)) + assert data_back == children_poem + + +def test_write_list_files_saas(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id, + children_poem, classic_poem): + + base_path = bfs.path.build_path(backend=bfs.path.StorageBackend.saas, + url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token, + path='test_bucket_path/test_write_list_files_saas') + poem_path1 = base_path / 'children/little_star.txt' + poem_path2 = base_path / 'classic/highlands.txt' + + poem_path1.write(children_poem) + poem_path2.write(classic_poem) + expected_names = {'children', 'classic', 'little_star.txt', 'highlands.txt'} + assert _collect_all_names(base_path) == expected_names + + +def test_write_delete_saas(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id, + children_poem, classic_poem): + + base_path = bfs.path.build_path(backend=bfs.path.StorageBackend.saas, + url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + poems_root = base_path / 'test_bucket_path/test_write_delete_saas' + poem_path1 = poems_root / 'children/little_star.txt' + poem_path2 = poems_root / 'classic/highlands.txt' + + poem_path1.write(children_poem) + poem_path2.write(classic_poem) + poem_path1.rm() + expected_names = {'classic', 'highlands.txt'} + assert _collect_all_names(poems_root) == expected_names From fb16956e9cacbe783763363783fdc773bb52f9fe Mon Sep 17 00:00:00 2001 From: mibe Date: Tue, 30 Apr 2024 10:07:38 +0100 Subject: [PATCH 02/14] #113 Temporarily added the direct dependency on httpx --- poetry.lock | 103 +++++++++++++++++++++++++++++++++++++++++++++---- pyproject.toml | 2 + 2 files changed, 98 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7b0069fb..c281edc9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,6 +11,28 @@ files = [ {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] +[[package]] +name = "anyio" +version = "4.3.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + [[package]] name = "argcomplete" version = "2.1.2" @@ -566,13 +588,13 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.13.4" +version = "3.14.0" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, - {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, + {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, + {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, ] [package.extras] @@ -597,6 +619,62 @@ pygments = ">=2.7" sphinx = ">=5.0,<7.0" sphinx-basic-ng = "*" +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + [[package]] name = "identify" version = "2.5.36" @@ -1513,6 +1591,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -1818,13 +1907,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.0" +version = "20.26.1" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.0-py3-none-any.whl", hash = "sha256:0846377ea76e818daaa3e00a4365c018bc3ac9760cbb3544de542885aad61fb3"}, - {file = "virtualenv-20.26.0.tar.gz", hash = "sha256:ec25a9671a5102c8d2657f62792a27b48f016664c6873f6beed3800008577210"}, + {file = "virtualenv-20.26.1-py3-none-any.whl", hash = "sha256:7aa9982a728ae5892558bff6a2839c00b9ed145523ece2274fad6f414690ae75"}, + {file = "virtualenv-20.26.1.tar.gz", hash = "sha256:604bfdceaeece392802e6ae48e69cec49168b9c5f4a44e483963f9242eb0e78b"}, ] [package.dependencies] @@ -1887,4 +1976,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.8,<4.0" -content-hash = "209db6a037ba462efc3e72fa8352ce4d131bf47576be5939a76e84ca370e3315" +content-hash = "2af74319ff112279ff2155286b2aa6569225a6dba34deac01989dbc71e985747" diff --git a/pyproject.toml b/pyproject.toml index 5fbf8cd2..7e599d61 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,8 @@ requests = ">=2.24.0" joblib=">=1.0.1" typeguard = "4.0.0" saas-api=">=0.2.0" +# Temp.fix +httpx=">=0.27.0" [tool.poetry.dev-dependencies] From ca65786b3c122b4ac538dd44b0b3f998fcbb9432 Mon Sep 17 00:00:00 2001 From: mibe Date: Tue, 30 Apr 2024 10:15:49 +0100 Subject: [PATCH 03/14] #113 Temporarily added the direct dependency on attrs --- poetry.lock | 21 ++++++++++++++++++++- pyproject.toml | 1 + 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index c281edc9..b84fddb1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -62,6 +62,25 @@ files = [ [package.dependencies] typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + [[package]] name = "babel" version = "2.14.0" @@ -1976,4 +1995,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.8,<4.0" -content-hash = "2af74319ff112279ff2155286b2aa6569225a6dba34deac01989dbc71e985747" +content-hash = "6b9f52dc9a2d924b0a26daf962e56c5fdf3fab634f3a6f058cf5fe4077ed8c6c" diff --git a/pyproject.toml b/pyproject.toml index 7e599d61..6bab481b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,6 +36,7 @@ typeguard = "4.0.0" saas-api=">=0.2.0" # Temp.fix httpx=">=0.27.0" +attrs=">=23.2.0" [tool.poetry.dev-dependencies] From 3445f521be8102b83c364ab11ebac4f8ecdb6022 Mon Sep 17 00:00:00 2001 From: mibe Date: Tue, 30 Apr 2024 11:01:46 +0100 Subject: [PATCH 04/14] #113 Providing the SaaS CI credentials --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0e2839c4..720ae8a9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -48,4 +48,8 @@ jobs: working-directory: ../integration-test-docker-environment - name: Run Tests + env: + SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} + SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} + SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} run: poetry run pytest tests From 66b7cd422fed73f7b64181036ccc537680af007d Mon Sep 17 00:00:00 2001 From: mibe Date: Tue, 30 Apr 2024 12:02:03 +0100 Subject: [PATCH 05/14] #113 Providing the SaaS CI credentials --- .github/workflows/ci-cd.yml | 5 +++++ .github/workflows/ci.yml | 9 +++++---- .github/workflows/pr_merge.yml | 5 +++++ 3 files changed, 15 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index ed4c7dc5..59f6a985 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -5,6 +5,11 @@ on: tags: - '**' +env: + SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} + SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} + SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} + jobs: check-tag-version-job: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 720ae8a9..e824c7da 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,6 +13,11 @@ on: # “At 00:00 on every 7th day-of-month from 1 through 31.” (https://crontab.guru) - cron: "0 0 1/7 * *" +env: + SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} + SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} + SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} + jobs: ci-job: @@ -48,8 +53,4 @@ jobs: working-directory: ../integration-test-docker-environment - name: Run Tests - env: - SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} - SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} - SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} run: poetry run pytest tests diff --git a/.github/workflows/pr_merge.yml b/.github/workflows/pr_merge.yml index 406b8ac0..09f4646e 100644 --- a/.github/workflows/pr_merge.yml +++ b/.github/workflows/pr_merge.yml @@ -6,6 +6,11 @@ on: - 'main' - 'master' +env: + SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} + SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} + SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} + jobs: ci-job: From 6ecff5650dcf51e765dc9b83431f06a86c2305ea Mon Sep 17 00:00:00 2001 From: mibe Date: Tue, 30 Apr 2024 12:43:26 +0100 Subject: [PATCH 06/14] #113 Providing the SaaS CI credentials --- .github/workflows/ci-cd.yml | 9 ++++----- .github/workflows/ci.yml | 9 ++++----- .github/workflows/pr_merge.yml | 9 ++++----- 3 files changed, 12 insertions(+), 15 deletions(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 59f6a985..f8133da1 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -5,11 +5,6 @@ on: tags: - '**' -env: - SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} - SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} - SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} - jobs: check-tag-version-job: @@ -19,6 +14,10 @@ jobs: ci-job: name: Checks needs: [ check-tag-version-job ] + env: + SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} + SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} + SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} uses: exasol/python-toolbox/.github/workflows/checks.yml@0.3.0 tests-job: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e824c7da..b6874b24 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,15 +13,14 @@ on: # “At 00:00 on every 7th day-of-month from 1 through 31.” (https://crontab.guru) - cron: "0 0 1/7 * *" -env: - SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} - SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} - SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} - jobs: ci-job: name: Checks + env: + SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} + SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} + SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} uses: exasol/python-toolbox/.github/workflows/checks.yml@0.3.0 tests-job: diff --git a/.github/workflows/pr_merge.yml b/.github/workflows/pr_merge.yml index 09f4646e..6f2ac3f9 100644 --- a/.github/workflows/pr_merge.yml +++ b/.github/workflows/pr_merge.yml @@ -6,15 +6,14 @@ on: - 'main' - 'master' -env: - SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} - SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} - SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} - jobs: ci-job: name: Checks + env: + SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} + SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} + SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} uses: exasol/python-toolbox/.github/workflows/checks.yml@0.3.0 tests-job: From 74ed19a3bd9380864126668ece8f890a6ae2f8b6 Mon Sep 17 00:00:00 2001 From: mibe Date: Tue, 30 Apr 2024 13:33:42 +0100 Subject: [PATCH 07/14] #113 Moved SaaS tests to a separate test set --- .github/workflows/ci-cd.yml | 4 -- .github/workflows/ci.yml | 11 ++-- .github/workflows/pr_merge.yml | 4 -- test/integration/bucketfs_test.py | 94 --------------------------- test/integration/conftest.py | 91 +------------------------- test/integration/test_bucket_path.py | 56 ---------------- test_saas/integration/conftest.py | 90 +++++++++++++++++++++++++ test_saas/integration/test_buckets.py | 94 +++++++++++++++++++++++++++ test_saas/integration/test_path.py | 88 +++++++++++++++++++++++++ 9 files changed, 280 insertions(+), 252 deletions(-) create mode 100644 test_saas/integration/conftest.py create mode 100644 test_saas/integration/test_buckets.py create mode 100644 test_saas/integration/test_path.py diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index f8133da1..ed4c7dc5 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -14,10 +14,6 @@ jobs: ci-job: name: Checks needs: [ check-tag-version-job ] - env: - SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} - SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} - SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} uses: exasol/python-toolbox/.github/workflows/checks.yml@0.3.0 tests-job: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b6874b24..8c0b4b98 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,10 +17,6 @@ jobs: ci-job: name: Checks - env: - SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} - SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} - SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} uses: exasol/python-toolbox/.github/workflows/checks.yml@0.3.0 tests-job: @@ -53,3 +49,10 @@ jobs: - name: Run Tests run: poetry run pytest tests + + - name: Run SaaS Tests + env: + SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} + SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} + SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} + run: poetry run pytest test_saas diff --git a/.github/workflows/pr_merge.yml b/.github/workflows/pr_merge.yml index 6f2ac3f9..406b8ac0 100644 --- a/.github/workflows/pr_merge.yml +++ b/.github/workflows/pr_merge.yml @@ -10,10 +10,6 @@ jobs: ci-job: name: Checks - env: - SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} - SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} - SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} uses: exasol/python-toolbox/.github/workflows/checks.yml@0.3.0 tests-job: diff --git a/test/integration/bucketfs_test.py b/test/integration/bucketfs_test.py index c4ac8398..80a253e4 100644 --- a/test/integration/bucketfs_test.py +++ b/test/integration/bucketfs_test.py @@ -21,7 +21,6 @@ from exasol.bucketfs import ( Bucket, Service, - SaaSBucket, as_bytes, as_string, ) @@ -309,96 +308,3 @@ def test_any_log_message_get_emitted(httpserver, caplog): ] # The log level DEBUG should emit at least one log message assert log_records - - -def test_write_bytes_to_saas_bucket(saas_test_service_url, saas_test_token, - saas_test_account_id, saas_test_database_id): - """ - Uploads some bytes into a SaaS bucket file and checks that the file is listed - in the SaaS BucketFS. - """ - bucket = SaaSBucket(url=saas_test_service_url, - account_id=saas_test_account_id, - database_id=saas_test_database_id, - pat=saas_test_token) - - file_name = 'bucketfs_test/test_write_bytes_to_saas_bucket/the_file.dat' - bucket.upload(path=file_name, data=b'abcd12345') - assert file_name in bucket.files - - -def test_write_file_to_saas_bucket(saas_test_service_url, saas_test_token, - saas_test_account_id, saas_test_database_id, - tmpdir): - """ - Uploads a file from a local file system into a SaaS bucket and checks that - the file is listed in the SaaS BucketFS. - """ - bucket = SaaSBucket(url=saas_test_service_url, - account_id=saas_test_account_id, - database_id=saas_test_database_id, - pat=saas_test_token) - - tmp_file = tmpdir / 'the_file.dat' - tmp_file.write_binary(b'abcd12345') - file_name = 'bucketfs_test/test_write_file_to_saas_bucket/the_file.dat' - with open(tmp_file, 'rb') as f: - bucket.upload(path=file_name, data=f) - assert file_name in bucket.files - - -def test_read_bytes_from_saas_bucket(saas_test_service_url, saas_test_token, - saas_test_account_id, saas_test_database_id): - """ - Uploads some bytes into a SaaS bucket file, reads them back and checks that - they are unchanged. - """ - bucket = SaaSBucket(url=saas_test_service_url, - account_id=saas_test_account_id, - database_id=saas_test_database_id, - pat=saas_test_token) - - file_name = 'bucketfs_test/test_read_bytes_from_saas_bucket/the_file.dat' - content = b'A string long enough to be downloaded in chunks.' - bucket.upload(path=file_name, data=content) - received_content = b''.join(bucket.download(file_name, chunk_size=20)) - assert received_content == content - - -def test_read_file_from_saas_bucket(saas_test_service_url, saas_test_token, - saas_test_account_id, saas_test_database_id, - tmpdir): - """ - Uploads a file from a local file system into a SaaS bucket, reads its content - back and checks that it's unchanged. - """ - bucket = SaaSBucket(url=saas_test_service_url, - account_id=saas_test_account_id, - database_id=saas_test_database_id, - pat=saas_test_token) - - content = b'A string long enough to be downloaded in chunks.' - tmp_file = tmpdir / 'the_file.dat' - tmp_file.write_binary(content) - file_name = 'bucketfs_test/test_read_file_from_saas_bucket/the_file.dat' - with open(tmp_file, 'rb') as f: - bucket.upload(path=file_name, data=f) - received_content = b''.join(bucket.download(file_name, chunk_size=20)) - assert received_content == content - - -def test_delete_file_from_saas_bucket(saas_test_service_url, saas_test_token, - saas_test_account_id, saas_test_database_id): - """ - Creates a SaaS bucket file, then deletes it and checks that it is not listed - in the SaaS BucketFS. - """ - bucket = SaaSBucket(url=saas_test_service_url, - account_id=saas_test_account_id, - database_id=saas_test_database_id, - pat=saas_test_token) - - file_name = 'bucketfs_test/test_delete_file_from_saas_bucket/the_file.dat' - bucket.upload(path=file_name, data=b'abcd12345') - bucket.delete(file_name) - assert file_name not in bucket.files diff --git a/test/integration/conftest.py b/test/integration/conftest.py index 0ddf6ffd..01691c8b 100644 --- a/test/integration/conftest.py +++ b/test/integration/conftest.py @@ -7,20 +7,12 @@ Tuple, Union, ) -import os -import time +from exasol.bucketfs._shared import _build_url import pytest import requests from requests.auth import HTTPBasicAuth -from exasol.bucketfs._shared import _build_url -from exasol.saas.client import openapi -from exasol.saas.client.openapi.api.databases.create_database import sync as create_saas_database -from exasol.saas.client.openapi.api.databases.delete_database import sync_detailed as delete_saas_database -from exasol.saas.client.openapi.api.databases.get_database import sync as get_saas_database -from exasol.saas.client.openapi.models.status import Status as SaasStatus - def upload_file( service: str, @@ -132,84 +124,3 @@ def temporary_bucket_files(request) -> Tuple[str, Iterable[File]]: options.bucketfs_password, file.name, ) - - -def create_saas_test_client(url: str, token: str, raise_on_unexpected_status: bool = True): - return openapi.AuthenticatedClient( - base_url=url, - token=token, - raise_on_unexpected_status=raise_on_unexpected_status - ) - - -def create_saas_test_database(account_id, client): - cluster_spec = openapi.models.CreateCluster( - name="my-cluster", - size="XS", - ) - database_spec = openapi.models.CreateDatabase( - name=f"pytest-created-db", - initial_cluster=cluster_spec, - provider="aws", - region='us-east-1', - ) - return create_saas_database( - account_id=account_id, - body=database_spec, - client=client - ) - - -@pytest.fixture(scope='session') -def saas_test_service_url() -> str: - return os.environ["SAAS_HOST"] - - -@pytest.fixture(scope='session') -def saas_test_token() -> str: - return os.environ["SAAS_PAT"] - - -@pytest.fixture(scope='session') -def saas_test_account_id() -> str: - return os.environ["SAAS_ACCOUNT_ID"] - - -@pytest.fixture(scope='session') -def saas_test_database_id(saas_test_service_url, saas_test_token, saas_test_account_id) -> str: - with create_saas_test_client( - url=saas_test_service_url, - token=saas_test_token - ) as client: - try: - db = create_saas_test_database( - account_id=saas_test_account_id, - client=client - ) - - # Wait till the database gets to the running state. - sleep_time = 600 - small_interval = 20 - max_wait_time = 2400 - max_cycles = 1 + (max_wait_time - sleep_time) // small_interval - for _ in range(max_cycles): - time.sleep(sleep_time) - db = get_saas_database( - account_id=saas_test_account_id, - database_id=db.id, - client=client - ) - if db.status == SaasStatus.RUNNING: - break - sleep_time = 30 - else: - raise RuntimeError(f'Test SaaS database status is {db.status} ' - f'after {max_wait_time} seconds.') - yield db.id - finally: - if db is not None: - delete_saas_database( - account_id=saas_test_account_id, - database_id=db.id, - client=client - ) diff --git a/test/integration/test_bucket_path.py b/test/integration/test_bucket_path.py index 82f94fac..5982e733 100644 --- a/test/integration/test_bucket_path.py +++ b/test/integration/test_bucket_path.py @@ -109,59 +109,3 @@ def test_write_delete_onprem(test_config, children_poem, classic_poem): test_config.password, str(poem_path) ) - - -def test_write_read_back_saas(saas_test_service_url, saas_test_token, - saas_test_account_id, saas_test_database_id, - children_poem): - - base_path = bfs.path.build_path(backend=bfs.path.StorageBackend.saas, - url=saas_test_service_url, - account_id=saas_test_account_id, - database_id=saas_test_database_id, - pat=saas_test_token) - file_name = 'test_bucket_path/test_write_read_back_saas/little_star.txt' - poem_path = base_path / file_name - - poem_path.write(children_poem) - data_back = b''.join(poem_path.read(20)) - assert data_back == children_poem - - -def test_write_list_files_saas(saas_test_service_url, saas_test_token, - saas_test_account_id, saas_test_database_id, - children_poem, classic_poem): - - base_path = bfs.path.build_path(backend=bfs.path.StorageBackend.saas, - url=saas_test_service_url, - account_id=saas_test_account_id, - database_id=saas_test_database_id, - pat=saas_test_token, - path='test_bucket_path/test_write_list_files_saas') - poem_path1 = base_path / 'children/little_star.txt' - poem_path2 = base_path / 'classic/highlands.txt' - - poem_path1.write(children_poem) - poem_path2.write(classic_poem) - expected_names = {'children', 'classic', 'little_star.txt', 'highlands.txt'} - assert _collect_all_names(base_path) == expected_names - - -def test_write_delete_saas(saas_test_service_url, saas_test_token, - saas_test_account_id, saas_test_database_id, - children_poem, classic_poem): - - base_path = bfs.path.build_path(backend=bfs.path.StorageBackend.saas, - url=saas_test_service_url, - account_id=saas_test_account_id, - database_id=saas_test_database_id, - pat=saas_test_token) - poems_root = base_path / 'test_bucket_path/test_write_delete_saas' - poem_path1 = poems_root / 'children/little_star.txt' - poem_path2 = poems_root / 'classic/highlands.txt' - - poem_path1.write(children_poem) - poem_path2.write(classic_poem) - poem_path1.rm() - expected_names = {'classic', 'highlands.txt'} - assert _collect_all_names(poems_root) == expected_names diff --git a/test_saas/integration/conftest.py b/test_saas/integration/conftest.py new file mode 100644 index 00000000..5e4ea7fe --- /dev/null +++ b/test_saas/integration/conftest.py @@ -0,0 +1,90 @@ +import os +import time + +import pytest +from exasol.saas.client import openapi +from exasol.saas.client.openapi.api.databases.create_database import sync as create_saas_database +from exasol.saas.client.openapi.api.databases.delete_database import sync_detailed as delete_saas_database +from exasol.saas.client.openapi.api.databases.get_database import sync as get_saas_database +from exasol.saas.client.openapi.models.status import Status as SaasStatus + + +def create_saas_test_client(url: str, token: str, raise_on_unexpected_status: bool = True): + return openapi.AuthenticatedClient( + base_url=url, + token=token, + raise_on_unexpected_status=raise_on_unexpected_status + ) + + +def create_saas_test_database(account_id, client): + cluster_spec = openapi.models.CreateCluster( + name="my-cluster", + size="XS", + ) + database_spec = openapi.models.CreateDatabase( + name=f"pytest-created-db", + initial_cluster=cluster_spec, + provider="aws", + region='us-east-1', + ) + return create_saas_database( + account_id=account_id, + body=database_spec, + client=client + ) + + +@pytest.fixture(scope='session') +def saas_test_service_url() -> str: + return os.environ["SAAS_HOST"] + + +@pytest.fixture(scope='session') +def saas_test_token() -> str: + return os.environ["SAAS_PAT"] + + +@pytest.fixture(scope='session') +def saas_test_account_id() -> str: + return os.environ["SAAS_ACCOUNT_ID"] + + +@pytest.fixture(scope='session') +def saas_test_database_id(saas_test_service_url, saas_test_token, saas_test_account_id) -> str: + with create_saas_test_client( + url=saas_test_service_url, + token=saas_test_token + ) as client: + try: + db = create_saas_test_database( + account_id=saas_test_account_id, + client=client + ) + + # Wait till the database gets to the running state. + sleep_time = 600 + small_interval = 20 + max_wait_time = 2400 + max_cycles = 1 + (max_wait_time - sleep_time) // small_interval + for _ in range(max_cycles): + time.sleep(sleep_time) + db = get_saas_database( + account_id=saas_test_account_id, + database_id=db.id, + client=client + ) + if db.status == SaasStatus.RUNNING: + break + sleep_time = 30 + else: + raise RuntimeError(f'Test SaaS database status is {db.status} ' + f'after {max_wait_time} seconds.') + yield db.id + finally: + if db is not None: + delete_saas_database( + account_id=saas_test_account_id, + database_id=db.id, + client=client + ) diff --git a/test_saas/integration/test_buckets.py b/test_saas/integration/test_buckets.py new file mode 100644 index 00000000..c5c1fdeb --- /dev/null +++ b/test_saas/integration/test_buckets.py @@ -0,0 +1,94 @@ +from exasol.bucketfs import SaaSBucket + + +def test_write_bytes_to_saas_bucket(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id): + """ + Uploads some bytes into a SaaS bucket file and checks that the file is listed + in the SaaS BucketFS. + """ + bucket = SaaSBucket(url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + + file_name = 'bucketfs_test/test_write_bytes_to_saas_bucket/the_file.dat' + bucket.upload(path=file_name, data=b'abcd12345') + assert file_name in bucket.files + + +def test_write_file_to_saas_bucket(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id, + tmpdir): + """ + Uploads a file from a local file system into a SaaS bucket and checks that + the file is listed in the SaaS BucketFS. + """ + bucket = SaaSBucket(url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + + tmp_file = tmpdir / 'the_file.dat' + tmp_file.write_binary(b'abcd12345') + file_name = 'bucketfs_test/test_write_file_to_saas_bucket/the_file.dat' + with open(tmp_file, 'rb') as f: + bucket.upload(path=file_name, data=f) + assert file_name in bucket.files + + +def test_read_bytes_from_saas_bucket(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id): + """ + Uploads some bytes into a SaaS bucket file, reads them back and checks that + they are unchanged. + """ + bucket = SaaSBucket(url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + + file_name = 'bucketfs_test/test_read_bytes_from_saas_bucket/the_file.dat' + content = b'A string long enough to be downloaded in chunks.' + bucket.upload(path=file_name, data=content) + received_content = b''.join(bucket.download(file_name, chunk_size=20)) + assert received_content == content + + +def test_read_file_from_saas_bucket(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id, + tmpdir): + """ + Uploads a file from a local file system into a SaaS bucket, reads its content + back and checks that it's unchanged. + """ + bucket = SaaSBucket(url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + + content = b'A string long enough to be downloaded in chunks.' + tmp_file = tmpdir / 'the_file.dat' + tmp_file.write_binary(content) + file_name = 'bucketfs_test/test_read_file_from_saas_bucket/the_file.dat' + with open(tmp_file, 'rb') as f: + bucket.upload(path=file_name, data=f) + received_content = b''.join(bucket.download(file_name, chunk_size=20)) + assert received_content == content + + +def test_delete_file_from_saas_bucket(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id): + """ + Creates a SaaS bucket file, then deletes it and checks that it is not listed + in the SaaS BucketFS. + """ + bucket = SaaSBucket(url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + + file_name = 'bucketfs_test/test_delete_file_from_saas_bucket/the_file.dat' + bucket.upload(path=file_name, data=b'abcd12345') + bucket.delete(file_name) + assert file_name not in bucket.files diff --git a/test_saas/integration/test_path.py b/test_saas/integration/test_path.py new file mode 100644 index 00000000..51bbe151 --- /dev/null +++ b/test_saas/integration/test_path.py @@ -0,0 +1,88 @@ +from __future__ import annotations +from typing import ByteString +import pytest +import exasol.bucketfs as bfs + + +@pytest.fixture +def children_poem() -> ByteString: + poem_text = \ + b"Twinkle twinkle little star." \ + b"How I wonder what you are." \ + b"Up above the world so high." \ + b"Like a diamond in the sky." + return poem_text + + +@pytest.fixture +def classic_poem() -> ByteString: + poem_text = \ + b"My heart's in the Highlands, my heart is not here," \ + b"My heart's in the Highlands, a-chasing the deer;" \ + b"Chasing the wild-deer, and following the roe," \ + b"My heart's in the Highlands, wherever I go." + return poem_text + + +def _collect_all_names(path: bfs.path.PathLike) -> set[str]: + all_names = [] + for _, dirs, files in path.walk(): + all_names.extend(dirs) + all_names.extend(files) + return set(all_names) + + +def test_write_read_back_saas(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id, + children_poem): + + base_path = bfs.path.build_path(backend=bfs.path.StorageBackend.saas, + url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + file_name = 'test_bucket_path/test_write_read_back_saas/little_star.txt' + poem_path = base_path / file_name + + poem_path.write(children_poem) + data_back = b''.join(poem_path.read(20)) + assert data_back == children_poem + + +def test_write_list_files_saas(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id, + children_poem, classic_poem): + + base_path = bfs.path.build_path(backend=bfs.path.StorageBackend.saas, + url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token, + path='test_bucket_path/test_write_list_files_saas') + poem_path1 = base_path / 'children/little_star.txt' + poem_path2 = base_path / 'classic/highlands.txt' + + poem_path1.write(children_poem) + poem_path2.write(classic_poem) + expected_names = {'children', 'classic', 'little_star.txt', 'highlands.txt'} + assert _collect_all_names(base_path) == expected_names + + +def test_write_delete_saas(saas_test_service_url, saas_test_token, + saas_test_account_id, saas_test_database_id, + children_poem, classic_poem): + + base_path = bfs.path.build_path(backend=bfs.path.StorageBackend.saas, + url=saas_test_service_url, + account_id=saas_test_account_id, + database_id=saas_test_database_id, + pat=saas_test_token) + poems_root = base_path / 'test_bucket_path/test_write_delete_saas' + poem_path1 = poems_root / 'children/little_star.txt' + poem_path2 = poems_root / 'classic/highlands.txt' + + poem_path1.write(children_poem) + poem_path2.write(classic_poem) + poem_path1.rm() + expected_names = {'classic', 'highlands.txt'} + assert _collect_all_names(poems_root) == expected_names From 5da55caf6a0aec338c8e65ff2c25ab9d8afaa93b Mon Sep 17 00:00:00 2001 From: mibe Date: Tue, 30 Apr 2024 13:51:43 +0100 Subject: [PATCH 08/14] #113 Fixed the saas_test_service_url --- test_saas/integration/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test_saas/integration/conftest.py b/test_saas/integration/conftest.py index 5e4ea7fe..87b970a9 100644 --- a/test_saas/integration/conftest.py +++ b/test_saas/integration/conftest.py @@ -37,7 +37,7 @@ def create_saas_test_database(account_id, client): @pytest.fixture(scope='session') def saas_test_service_url() -> str: - return os.environ["SAAS_HOST"] + return f'https://{os.environ["SAAS_HOST"]}' @pytest.fixture(scope='session') From d0077ebbcdc280fb7066e31726051e9124a4b358 Mon Sep 17 00:00:00 2001 From: mibe Date: Tue, 30 Apr 2024 14:39:18 +0100 Subject: [PATCH 09/14] #113 Investigating the SaaS test error --- test_saas/integration/conftest.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/test_saas/integration/conftest.py b/test_saas/integration/conftest.py index 87b970a9..8d1dec7d 100644 --- a/test_saas/integration/conftest.py +++ b/test_saas/integration/conftest.py @@ -1,3 +1,4 @@ +from typing import Optional import os import time @@ -9,7 +10,10 @@ from exasol.saas.client.openapi.models.status import Status as SaasStatus -def create_saas_test_client(url: str, token: str, raise_on_unexpected_status: bool = True): +def create_saas_test_client(url: str, + token: str, + raise_on_unexpected_status: bool = True + ) -> openapi.AuthenticatedClient: return openapi.AuthenticatedClient( base_url=url, token=token, @@ -17,7 +21,9 @@ def create_saas_test_client(url: str, token: str, raise_on_unexpected_status: bo ) -def create_saas_test_database(account_id, client): +def create_saas_test_database(account_id: str, + client: openapi.AuthenticatedClient + ) -> Optional[openapi.models.database.Database]: cluster_spec = openapi.models.CreateCluster( name="my-cluster", size="XS", @@ -52,16 +58,17 @@ def saas_test_account_id() -> str: @pytest.fixture(scope='session') def saas_test_database_id(saas_test_service_url, saas_test_token, saas_test_account_id) -> str: + with create_saas_test_client( url=saas_test_service_url, token=saas_test_token ) as client: + db: Optional[openapi.models.database.Database] = None try: db = create_saas_test_database( account_id=saas_test_account_id, client=client ) - # Wait till the database gets to the running state. sleep_time = 600 small_interval = 20 @@ -81,6 +88,9 @@ def saas_test_database_id(saas_test_service_url, saas_test_token, saas_test_acco raise RuntimeError(f'Test SaaS database status is {db.status} ' f'after {max_wait_time} seconds.') yield db.id + except Exception as ex: + raise RuntimeError(f'Failed to create a database at {saas_test_service_url}. ' + f'Got an exception {ex}') finally: if db is not None: delete_saas_database( From 9df023a0df572a50826a8963f00ce7afa16f6101 Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 30 Apr 2024 16:38:55 +0200 Subject: [PATCH 10/14] experiment workflow --- .github/workflows/ci.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8c0b4b98..9c7757c2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,6 +28,16 @@ jobs: python-version: ["3.8", "3.9", "3.10"] steps: + - name: Check Environment + run: | + echo A = $($SECRET_A | base64) + echo SAAS_HOST = $(echo $SAAS_HOST | base64) + env: + SECRET_A: ${{ secrets.SECRET_A }} + SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} + SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} + SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} + - uses: actions/checkout@v2 - uses: actions/setup-python@v5 with: From 2296f8aa8bc0d2331a104f054ba55bdf3da0a77c Mon Sep 17 00:00:00 2001 From: ckunki Date: Tue, 30 Apr 2024 16:49:25 +0200 Subject: [PATCH 11/14] Fixed test setup --- .github/workflows/ci.yml | 23 +++++++---------------- test_saas/integration/conftest.py | 6 +++--- 2 files changed, 10 insertions(+), 19 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9c7757c2..dc0447e5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,16 +28,6 @@ jobs: python-version: ["3.8", "3.9", "3.10"] steps: - - name: Check Environment - run: | - echo A = $($SECRET_A | base64) - echo SAAS_HOST = $(echo $SAAS_HOST | base64) - env: - SECRET_A: ${{ secrets.SECRET_A }} - SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} - SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} - SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} - - uses: actions/checkout@v2 - uses: actions/setup-python@v5 with: @@ -49,6 +39,13 @@ jobs: - name: Install Project run: poetry install + - name: Run SaaS Tests + env: + SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} + SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} + SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} + run: poetry run pytest test_saas + - name: Checkout ITDE run: git clone https://github.com/exasol/integration-test-docker-environment.git working-directory: .. @@ -60,9 +57,3 @@ jobs: - name: Run Tests run: poetry run pytest tests - - name: Run SaaS Tests - env: - SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }} - SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }} - SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }} - run: poetry run pytest test_saas diff --git a/test_saas/integration/conftest.py b/test_saas/integration/conftest.py index 8d1dec7d..67e36108 100644 --- a/test_saas/integration/conftest.py +++ b/test_saas/integration/conftest.py @@ -43,7 +43,7 @@ def create_saas_test_database(account_id: str, @pytest.fixture(scope='session') def saas_test_service_url() -> str: - return f'https://{os.environ["SAAS_HOST"]}' + return os.environ["SAAS_HOST"] @pytest.fixture(scope='session') @@ -85,11 +85,11 @@ def saas_test_database_id(saas_test_service_url, saas_test_token, saas_test_acco break sleep_time = 30 else: - raise RuntimeError(f'Test SaaS database status is {db.status} ' + raise RuntimeError(f'Test SaaS database status is {db.status} ' f'after {max_wait_time} seconds.') yield db.id except Exception as ex: - raise RuntimeError(f'Failed to create a database at {saas_test_service_url}. ' + raise RuntimeError(f'Failed to create a database at {saas_test_service_url}. ' f'Got an exception {ex}') finally: if db is not None: From f8edf86b51951f40026f7d70915987b6311d727b Mon Sep 17 00:00:00 2001 From: mibe Date: Tue, 30 Apr 2024 16:34:24 +0100 Subject: [PATCH 12/14] #113 Cleaned the saas conftest.py --- test_saas/integration/conftest.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/test_saas/integration/conftest.py b/test_saas/integration/conftest.py index 67e36108..ce19f3eb 100644 --- a/test_saas/integration/conftest.py +++ b/test_saas/integration/conftest.py @@ -88,9 +88,6 @@ def saas_test_database_id(saas_test_service_url, saas_test_token, saas_test_acco raise RuntimeError(f'Test SaaS database status is {db.status} ' f'after {max_wait_time} seconds.') yield db.id - except Exception as ex: - raise RuntimeError(f'Failed to create a database at {saas_test_service_url}. ' - f'Got an exception {ex}') finally: if db is not None: delete_saas_database( From a630c231d86e52b1fb70ce3c4e3a6c10a9a73c4e Mon Sep 17 00:00:00 2001 From: mibe Date: Thu, 2 May 2024 09:05:35 +0100 Subject: [PATCH 13/14] #113 Addressed issued found in the review. --- exasol/bucketfs/_buckets.py | 26 ++++++++++++-------------- test/integration/test_bucket_path.py | 26 ++++++++++++++------------ test_saas/integration/test_path.py | 26 ++++++++++++++------------ 3 files changed, 40 insertions(+), 38 deletions(-) diff --git a/exasol/bucketfs/_buckets.py b/exasol/bucketfs/_buckets.py index d11d2ae3..a82a0b19 100644 --- a/exasol/bucketfs/_buckets.py +++ b/exasol/bucketfs/_buckets.py @@ -17,6 +17,7 @@ import requests from requests import HTTPError from requests.auth import HTTPBasicAuth +from urllib.parse import quote_plus from exasol.saas.client.openapi.client import AuthenticatedClient as SaasAuthenticatedClient from exasol.saas.client.openapi.models.file import File as SaasFile @@ -184,7 +185,7 @@ def _auth(self) -> HTTPBasicAuth: @property def files(self) -> Iterable[str]: url = _build_url(service_url=self._service, bucket=self.name) - LOGGER.info(f"Retrieving bucket listing for {self.name}.") + LOGGER.info("Retrieving bucket listing for {0}.", self.name) response = requests.get(url, auth=self._auth, verify=self._verify) try: response.raise_for_status() @@ -208,7 +209,7 @@ def upload( data: raw content of the file. """ url = _build_url(service_url=self._service, bucket=self.name, path=path) - LOGGER.info(f"Uploading {path} to bucket {self.name}.") + LOGGER.info("Uploading {0} to bucket {1}.", path, self.name) response = requests.put(url, data=data, auth=self._auth, verify=self._verify) try: response.raise_for_status() @@ -226,7 +227,7 @@ def delete(self, path) -> None: A BucketFsError if the operation couldn't be executed successfully. """ url = _build_url(service_url=self._service, bucket=self.name, path=path) - LOGGER.info(f"Deleting {path} from bucket {self.name}.") + LOGGER.info("Deleting {0} from bucket {1}.", path, self.name) response = requests.delete(url, auth=self._auth, verify=self._verify) try: @@ -247,7 +248,8 @@ def download(self, path: str, chunk_size: int = 8192) -> Iterable[ByteString]: """ url = _build_url(service_url=self._service, bucket=self.name, path=path) LOGGER.info( - f"Downloading {path} using a chunk size of {chunk_size} bytes from bucket {self.name}." + "Downloading {0} using a chunk size of {1} bytes from bucket {2}.", + path, chunk_size, self.name ) with requests.get( url, stream=True, auth=self._auth, verify=self._verify @@ -260,10 +262,6 @@ def download(self, path: str, chunk_size: int = 8192) -> Iterable[ByteString]: yield from response.iter_content(chunk_size=chunk_size) -def _to_path_in_url(path: str) -> str: - return path.replace('/', '%2F') - - class SaaSBucket: def __init__(self, url: str, account_id: str, database_id: str, pat: str) -> None: @@ -305,17 +303,17 @@ def recursive_file_collector(node: SaasFile) -> None: return file_list def delete(self, path: str) -> None: - LOGGER.info(f"Deleting {path} from the bucket.") + LOGGER.info("Deleting {0} from the bucket.", path) with SaasAuthenticatedClient(base_url=self._url, token=self._pat, raise_on_unexpected_status=True) as client: saas_delete_file(account_id=self._account_id, database_id=self._database_id, - key=_to_path_in_url(path), + key=quote_plus(path), client=client) def upload(self, path: str, data: ByteString | BinaryIO) -> None: - LOGGER.info(f"Uploading {path} to the bucket.") + LOGGER.info("Uploading {0} to the bucket.", path) # Q. The service can handle any characters in the path. # Do we need to check this path for presence of characters deemed # invalid in the BucketLike protocol? @@ -324,7 +322,7 @@ def upload(self, path: str, data: ByteString | BinaryIO) -> None: raise_on_unexpected_status=False) as client: response = saas_upload_file(account_id=self._account_id, database_id=self._database_id, - key=_to_path_in_url(path), + key=quote_plus(path), client=client) if response.status_code >= 400: # Q. Is it the right type of exception? @@ -336,13 +334,13 @@ def upload(self, path: str, data: ByteString | BinaryIO) -> None: response.raise_for_status() def download(self, path: str, chunk_size: int = 8192) -> Iterable[ByteString]: - LOGGER.info(f"Downloading {path} from the bucket.") + LOGGER.info("Downloading {0} from the bucket.", path) with SaasAuthenticatedClient(base_url=self._url, token=self._pat, raise_on_unexpected_status=False) as client: response = saas_download_file(account_id=self._account_id, database_id=self._database_id, - key=_to_path_in_url(path), + key=quote_plus(path), client=client) if response.status_code == 404: raise BucketFsError("The file {path} doesn't exist in the SaaS BucketFs.") diff --git a/test/integration/test_bucket_path.py b/test/integration/test_bucket_path.py index 5982e733..1afc8c65 100644 --- a/test/integration/test_bucket_path.py +++ b/test/integration/test_bucket_path.py @@ -7,30 +7,32 @@ @pytest.fixture def children_poem() -> ByteString: - poem_text = \ - b"Twinkle twinkle little star." \ - b"How I wonder what you are." \ - b"Up above the world so high." \ + poem_text = ( + b"Twinkle twinkle little star." + b"How I wonder what you are." + b"Up above the world so high." b"Like a diamond in the sky." + ) return poem_text @pytest.fixture def classic_poem() -> ByteString: - poem_text = \ - b"My heart's in the Highlands, my heart is not here," \ - b"My heart's in the Highlands, a-chasing the deer;" \ - b"Chasing the wild-deer, and following the roe," \ + poem_text = ( + b"My heart's in the Highlands, my heart is not here," + b"My heart's in the Highlands, a-chasing the deer;" + b"Chasing the wild-deer, and following the roe," b"My heart's in the Highlands, wherever I go." + ) return poem_text def _collect_all_names(path: bfs.path.PathLike) -> set[str]: - all_names = [] + all_names = set() for _, dirs, files in path.walk(): - all_names.extend(dirs) - all_names.extend(files) - return set(all_names) + all_names.update(dirs) + all_names.update(files) + return all_names def test_write_read_back_onprem(test_config, children_poem): diff --git a/test_saas/integration/test_path.py b/test_saas/integration/test_path.py index 51bbe151..de7b3f2b 100644 --- a/test_saas/integration/test_path.py +++ b/test_saas/integration/test_path.py @@ -6,30 +6,32 @@ @pytest.fixture def children_poem() -> ByteString: - poem_text = \ - b"Twinkle twinkle little star." \ - b"How I wonder what you are." \ - b"Up above the world so high." \ + poem_text = ( + b"Twinkle twinkle little star." + b"How I wonder what you are." + b"Up above the world so high." b"Like a diamond in the sky." + ) return poem_text @pytest.fixture def classic_poem() -> ByteString: - poem_text = \ - b"My heart's in the Highlands, my heart is not here," \ - b"My heart's in the Highlands, a-chasing the deer;" \ - b"Chasing the wild-deer, and following the roe," \ + poem_text = ( + b"My heart's in the Highlands, my heart is not here," + b"My heart's in the Highlands, a-chasing the deer;" + b"Chasing the wild-deer, and following the roe," b"My heart's in the Highlands, wherever I go." + ) return poem_text def _collect_all_names(path: bfs.path.PathLike) -> set[str]: - all_names = [] + all_names = set() for _, dirs, files in path.walk(): - all_names.extend(dirs) - all_names.extend(files) - return set(all_names) + all_names.update(dirs) + all_names.update(files) + return all_names def test_write_read_back_saas(saas_test_service_url, saas_test_token, From 0e25849df820d46ccc92ace3b7a636256f1ac87e Mon Sep 17 00:00:00 2001 From: mibe Date: Thu, 2 May 2024 10:30:05 +0100 Subject: [PATCH 14/14] #113 Changed string formatting in logging. --- exasol/bucketfs/_buckets.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/exasol/bucketfs/_buckets.py b/exasol/bucketfs/_buckets.py index a82a0b19..3d98ad15 100644 --- a/exasol/bucketfs/_buckets.py +++ b/exasol/bucketfs/_buckets.py @@ -185,7 +185,7 @@ def _auth(self) -> HTTPBasicAuth: @property def files(self) -> Iterable[str]: url = _build_url(service_url=self._service, bucket=self.name) - LOGGER.info("Retrieving bucket listing for {0}.", self.name) + LOGGER.info("Retrieving bucket listing for %s.", self.name) response = requests.get(url, auth=self._auth, verify=self._verify) try: response.raise_for_status() @@ -209,7 +209,7 @@ def upload( data: raw content of the file. """ url = _build_url(service_url=self._service, bucket=self.name, path=path) - LOGGER.info("Uploading {0} to bucket {1}.", path, self.name) + LOGGER.info("Uploading %s to bucket %s.", path, self.name) response = requests.put(url, data=data, auth=self._auth, verify=self._verify) try: response.raise_for_status() @@ -227,7 +227,7 @@ def delete(self, path) -> None: A BucketFsError if the operation couldn't be executed successfully. """ url = _build_url(service_url=self._service, bucket=self.name, path=path) - LOGGER.info("Deleting {0} from bucket {1}.", path, self.name) + LOGGER.info("Deleting %s from bucket %s.", path, self.name) response = requests.delete(url, auth=self._auth, verify=self._verify) try: @@ -248,7 +248,7 @@ def download(self, path: str, chunk_size: int = 8192) -> Iterable[ByteString]: """ url = _build_url(service_url=self._service, bucket=self.name, path=path) LOGGER.info( - "Downloading {0} using a chunk size of {1} bytes from bucket {2}.", + "Downloading %s using a chunk size of %d bytes from bucket %s.", path, chunk_size, self.name ) with requests.get( @@ -303,7 +303,7 @@ def recursive_file_collector(node: SaasFile) -> None: return file_list def delete(self, path: str) -> None: - LOGGER.info("Deleting {0} from the bucket.", path) + LOGGER.info("Deleting %s from the bucket.", path) with SaasAuthenticatedClient(base_url=self._url, token=self._pat, raise_on_unexpected_status=True) as client: @@ -313,7 +313,7 @@ def delete(self, path: str) -> None: client=client) def upload(self, path: str, data: ByteString | BinaryIO) -> None: - LOGGER.info("Uploading {0} to the bucket.", path) + LOGGER.info("Uploading %s to the bucket.", path) # Q. The service can handle any characters in the path. # Do we need to check this path for presence of characters deemed # invalid in the BucketLike protocol? @@ -334,7 +334,7 @@ def upload(self, path: str, data: ByteString | BinaryIO) -> None: response.raise_for_status() def download(self, path: str, chunk_size: int = 8192) -> Iterable[ByteString]: - LOGGER.info("Downloading {0} from the bucket.", path) + LOGGER.info("Downloading %s from the bucket.", path) with SaasAuthenticatedClient(base_url=self._url, token=self._pat, raise_on_unexpected_status=False) as client: