diff --git a/tests/ci_tests/test_deploying_autopilot.py b/tests/ci_tests/test_deploying_autopilot.py index 4f6e348..6fbc9d4 100644 --- a/tests/ci_tests/test_deploying_autopilot.py +++ b/tests/ci_tests/test_deploying_autopilot.py @@ -2,7 +2,6 @@ from datetime import datetime import pytest -import exasol.bucketfs as bfs from tests.fixtures.prepare_environment_fixture import CITestEnvironment from tests.ci_tests.utils import parameters @@ -54,11 +53,8 @@ def _deploy_endpoint(job_name, endpoint_name, model_setup_params, ci_test_env: C assert endpoint_name in list(map(lambda x: x[0], all_scripts)) -@pytest.mark.parametrize("db_conn,deploy_params", [ - (bfs.path.StorageBackend.onprem, bfs.path.StorageBackend.onprem), - (bfs.path.StorageBackend.saas, bfs.path.StorageBackend.saas) -], indirect=True) -def test_deploy_autopilot_endpoint(db_conn, deploy_params, prepare_ci_test_environment): +@pytest.mark.slow +def test_deploy_autopilot_endpoint(prepare_ci_test_environment): curr_datetime = datetime.now().strftime("%y%m%d%H%M%S") model_name = ''.join((cls_model_setup_params.model_type, curr_datetime)) job_name = ''.join((model_name, 'job')) diff --git a/tests/ci_tests/test_polling_autopilot.py b/tests/ci_tests/test_polling_autopilot.py index af8e67c..96a824e 100644 --- a/tests/ci_tests/test_polling_autopilot.py +++ b/tests/ci_tests/test_polling_autopilot.py @@ -1,18 +1,14 @@ from datetime import datetime import pytest -import exasol.bucketfs as bfs from tests.ci_tests.utils.autopilot_polling import AutopilotTestPolling from tests.ci_tests.utils.autopilot_training import AutopilotTestTraining from tests.ci_tests.utils.parameters import cls_model_setup_params -@pytest.mark.parametrize("db_conn,deploy_params", [ - (bfs.path.StorageBackend.onprem, bfs.path.StorageBackend.onprem), - (bfs.path.StorageBackend.saas, bfs.path.StorageBackend.saas) -], indirect=True) -def test_poll_autopilot_job(db_conn, deploy_params, prepare_ci_test_environment): +@pytest.mark.slow +def test_poll_autopilot_job(prepare_ci_test_environment): curr_datetime = datetime.now().strftime("%y%m%d%H%M%S") model_name = ''.join((cls_model_setup_params.model_type, curr_datetime)) job_name = ''.join((model_name, 'job')) diff --git a/tests/ci_tests/test_predicting_autopilot.py b/tests/ci_tests/test_predicting_autopilot.py index 98871ee..b17c83c 100644 --- a/tests/ci_tests/test_predicting_autopilot.py +++ b/tests/ci_tests/test_predicting_autopilot.py @@ -2,7 +2,6 @@ from datetime import datetime import pytest -import exasol.bucketfs as bfs from tests.fixtures.prepare_environment_fixture import CITestEnvironment from tests.ci_tests.utils import parameters @@ -55,11 +54,8 @@ def _make_prediction(job_name, endpoint_name, model_setup_params, ci_test_env: C assert predictions -@pytest.mark.parametrize("db_conn,deploy_params", [ - (bfs.path.StorageBackend.onprem, bfs.path.StorageBackend.onprem), - (bfs.path.StorageBackend.saas, bfs.path.StorageBackend.saas) -], indirect=True) -def test_predict_autopilot_regression_job(db_conn, deploy_params, prepare_ci_test_environment): +@pytest.mark.slow +def test_predict_autopilot_regression_job(prepare_ci_test_environment): curr_datetime = datetime.now().strftime("%y%m%d%H%M%S") model_name = ''.join((reg_model_setup_params.model_type, curr_datetime)) job_name = ''.join((model_name, 'job')) @@ -77,11 +73,8 @@ def test_predict_autopilot_regression_job(db_conn, deploy_params, prepare_ci_tes db_conn=prepare_ci_test_environment) -@pytest.mark.parametrize("db_conn,deploy_params", [ - (bfs.path.StorageBackend.onprem, bfs.path.StorageBackend.onprem), - (bfs.path.StorageBackend.saas, bfs.path.StorageBackend.saas) -], indirect=True) -def test_predict_autopilot_classification_job(db_conn, deploy_params, prepare_ci_test_environment): +@pytest.mark.slow +def test_predict_autopilot_classification_job(prepare_ci_test_environment): curr_datetime = datetime.now().strftime("%y%m%d%H%M%S") model_name = ''.join((cls_model_setup_params.model_type, curr_datetime)) job_name = ''.join((model_name, 'job')) diff --git a/tests/ci_tests/test_training_autopilot.py b/tests/ci_tests/test_training_autopilot.py index 9eb6fe7..291fe3d 100644 --- a/tests/ci_tests/test_training_autopilot.py +++ b/tests/ci_tests/test_training_autopilot.py @@ -1,7 +1,6 @@ from datetime import datetime import pytest -import exasol.bucketfs as bfs from tests.fixtures.prepare_environment_fixture import CITestEnvironment from tests.ci_tests.utils.autopilot_training import AutopilotTestTraining @@ -10,11 +9,8 @@ from tests.ci_tests.utils.queries import DatabaseQueries -@pytest.mark.parametrize("db_conn,deploy_params", [ - (bfs.path.StorageBackend.onprem, bfs.path.StorageBackend.onprem), - (bfs.path.StorageBackend.saas, bfs.path.StorageBackend.saas) -], indirect=True) -def test_train_autopilot_regression_job(db_conn, deploy_params, prepare_ci_test_environment): +@pytest.mark.slow +def test_train_autopilot_regression_job(prepare_ci_test_environment): curr_datetime = datetime.now().strftime("%y%m%d%H%M%S") model_name = ''.join((reg_model_setup_params.model_type, curr_datetime)) job_name = ''.join((model_name, 'job')) @@ -27,11 +23,8 @@ def test_train_autopilot_regression_job(db_conn, deploy_params, prepare_ci_test_ job_name, reg_model_setup_params, prepare_ci_test_environment) -@pytest.mark.parametrize("db_conn,deploy_params", [ - (bfs.path.StorageBackend.onprem, bfs.path.StorageBackend.onprem), - (bfs.path.StorageBackend.saas, bfs.path.StorageBackend.saas) -], indirect=True) -def test_train_autopilot_classification_job(db_conn, deploy_params, prepare_ci_test_environment): +@pytest.mark.slow +def test_train_autopilot_classification_job(prepare_ci_test_environment): curr_datetime = datetime.now().strftime("%y%m%d%H%M%S") model_name = ''.join((cls_model_setup_params.model_type, curr_datetime)) job_name = ''.join((model_name, 'job')) diff --git a/tests/deployment/test_deploy_cli.py b/tests/deployment/test_deploy_cli.py index 2536eb1..f69b5d5 100644 --- a/tests/deployment/test_deploy_cli.py +++ b/tests/deployment/test_deploy_cli.py @@ -1,6 +1,5 @@ import pytest from click.testing import CliRunner -import exasol.bucketfs as bfs from exasol_sagemaker_extension.deployment import deploy_cli from tests.ci_tests.utils.parameters import get_deploy_arg_list @@ -37,10 +36,7 @@ def get_all_scripts(db_conn): return list(map(lambda x: x[0], all_scripts)) -@pytest.mark.parametrize("db_conn,deploy_params", [ - (bfs.path.StorageBackend.onprem, bfs.path.StorageBackend.onprem), - (bfs.path.StorageBackend.saas, bfs.path.StorageBackend.saas) -], indirect=True) +@pytest.mark.slow def test_deploy_cli_main(db_conn, deploy_params): args_list = get_deploy_arg_list(deploy_params, DB_SCHEMA) diff --git a/tests/deployment/test_deploy_create_statements.py b/tests/deployment/test_deploy_create_statements.py index 5c41a47..6ee21ea 100644 --- a/tests/deployment/test_deploy_create_statements.py +++ b/tests/deployment/test_deploy_create_statements.py @@ -1,5 +1,4 @@ import pytest -import exasol.bucketfs as bfs from exasol_sagemaker_extension.deployment.deploy_create_statements import \ DeployCreateStatements @@ -28,10 +27,7 @@ def get_all_scripts(db_conn): return list(map(lambda x: x[0], all_scripts)) -@pytest.mark.parametrize("db_conn,deploy_params", [ - (bfs.path.StorageBackend.onprem, bfs.path.StorageBackend.onprem), - (bfs.path.StorageBackend.saas, bfs.path.StorageBackend.saas) -], indirect=True) +@pytest.mark.slow def test_deploy_create_statements(db_conn, deploy_params): # We validate the server certificate in SaaS, but not in the Docker DB diff --git a/tests/fixtures/database_connection_fixture.py b/tests/fixtures/database_connection_fixture.py index 6c9b192..6a5dbe9 100644 --- a/tests/fixtures/database_connection_fixture.py +++ b/tests/fixtures/database_connection_fixture.py @@ -33,13 +33,23 @@ def _open_pyexasol_connection(**kwargs) -> pyexasol.ExaConnection: compression=True) +@pytest.fixture(scope='session', params=[bfs.path.StorageBackend.onprem, bfs.path.StorageBackend.saas]) +def backend(request) -> bfs.path.StorageBackend: + # Here we are going to add + # pytest.skip() + # if there is an instruction to skip a particular backed in the command line. + return request.param + + @pytest.fixture(scope="session") -def db_conn_onprem() -> pyexasol.ExaConnection: - conn = _open_pyexasol_connection(dsn=f"{db_params.host}:{db_params.port}", - user=db_params.user, - password=db_params.password) - upload_language_container_onprem(db_conn=conn) - return conn +def db_conn_onprem(backend) -> pyexasol.ExaConnection | None: + if backend == bfs.path.StorageBackend.onprem: + conn = _open_pyexasol_connection(dsn=f"{db_params.host}:{db_params.port}", + user=db_params.user, + password=db_params.password) + upload_language_container_onprem(db_conn=conn) + return conn + return None @pytest.fixture(scope="session") @@ -58,47 +68,54 @@ def saas_token() -> str: @pytest.fixture(scope="session") -def saas_database_id(saas_url, saas_account_id, saas_token) -> str: - - with ExitStack() as stack: - # Create and configure the SaaS client. - client = create_saas_client(host=saas_url, pat=saas_token) - api_access = OpenApiAccess(client=client, account_id=saas_account_id) - stack.enter_context(api_access.allowed_ip()) - - # Create a temporary database and waite till it becomes operational - db = stack.enter_context(api_access.database( - name=timestamp_name('SME_CI'), - idle_time=timedelta(hours=12))) - api_access.wait_until_running(db.id) - yield db.id +def saas_database_id(backend, saas_url, saas_account_id, saas_token) -> str: + + if backend == bfs.path.StorageBackend.saas: + with ExitStack() as stack: + # Create and configure the SaaS client. + client = create_saas_client(host=saas_url, pat=saas_token) + api_access = OpenApiAccess(client=client, account_id=saas_account_id) + stack.enter_context(api_access.allowed_ip()) + + # Create a temporary database and waite till it becomes operational + db = stack.enter_context(api_access.database( + name=timestamp_name('SME_CI'), + idle_time=timedelta(hours=12))) + api_access.wait_until_running(db.id) + yield db.id + else: + yield '' @pytest.fixture(scope="session") -def db_conn_saas(saas_url, saas_account_id, saas_database_id, saas_token) -> pyexasol.ExaConnection: - - # Create a connection to the database. - conn_params = get_connection_params(host=saas_url, - account_id=saas_account_id, - database_id=saas_database_id, - pat=saas_token) - conn = _open_pyexasol_connection(**conn_params) - - # Build, upload and activate the language container - upload_language_container_saas(db_conn=conn, - saas_url=saas_url, - saas_account_id=saas_account_id, - saas_database_id=saas_database_id, - saas_token=saas_token) - yield conn +def db_conn_saas(backend, saas_url, saas_account_id, saas_database_id, saas_token) -> pyexasol.ExaConnection | None: + + if backend == bfs.path.StorageBackend.saas: + # Create a connection to the database. + conn_params = get_connection_params(host=saas_url, + account_id=saas_account_id, + database_id=saas_database_id, + pat=saas_token) + conn = _open_pyexasol_connection(**conn_params) + + # Build, upload and activate the language container + upload_language_container_saas(db_conn=conn, + saas_url=saas_url, + saas_account_id=saas_account_id, + saas_database_id=saas_database_id, + saas_token=saas_token) + yield conn + else: + yield None @pytest.fixture(scope="session") -def db_conn(request, +def db_conn(backend, db_conn_onprem, db_conn_saas) -> pyexasol.ExaConnection: - if (hasattr(request, 'param') and - (request.param == bfs.path.StorageBackend.saas)): + if backend == bfs.path.StorageBackend.saas: + assert db_conn_saas is not None yield db_conn_saas else: + assert db_conn_onprem is not None yield db_conn_onprem diff --git a/tests/fixtures/script_deployment_fixture.py b/tests/fixtures/script_deployment_fixture.py index 6fde463..141bb3a 100644 --- a/tests/fixtures/script_deployment_fixture.py +++ b/tests/fixtures/script_deployment_fixture.py @@ -27,11 +27,10 @@ def deploy_params_saas(saas_url, saas_account_id, saas_database_id, saas_token) @pytest.fixture(scope="session") -def deploy_params(request, +def deploy_params(backend, deploy_params_onprem, deploy_params_saas) -> dict[str, Any]: - if (hasattr(request, 'param') and - (request.param == bfs.path.StorageBackend.saas)): + if backend == bfs.path.StorageBackend.saas: yield deploy_params_saas else: yield deploy_params_onprem diff --git a/tests/integration_tests/test_autopilot_endpoint_deletion_udf_real.py b/tests/integration_tests/test_autopilot_endpoint_deletion_udf_real.py index 517e75c..0b5a8a7 100644 --- a/tests/integration_tests/test_autopilot_endpoint_deletion_udf_real.py +++ b/tests/integration_tests/test_autopilot_endpoint_deletion_udf_real.py @@ -1,4 +1,3 @@ -import pytest from typing import Dict from exasol_sagemaker_extension.autopilot_endpoint_deletion_udf import \ AutopilotEndpointDeletionUDF @@ -40,14 +39,10 @@ def get_emitted(self): return self._emitted -@pytest.mark.skipif(not aws_params.aws_secret_access_key, - reason="AWS credentials are not set") def test_autopilot_regression_endpoint_deletion_udf_real(): _run_test(reg_setup_params) -@pytest.mark.skipif(not aws_params.aws_secret_access_key, - reason="AWS credentials are not set") def test_autopilot_classification_endpoint_deletion_udf_real(): _run_test(cls_setup_params) diff --git a/tests/integration_tests/test_autopilot_endpoint_deployment_udf_real.py b/tests/integration_tests/test_autopilot_endpoint_deployment_udf_real.py index a714c30..5b8a5fe 100644 --- a/tests/integration_tests/test_autopilot_endpoint_deployment_udf_real.py +++ b/tests/integration_tests/test_autopilot_endpoint_deployment_udf_real.py @@ -1,4 +1,3 @@ -import pytest from typing import Dict from exasol_sagemaker_extension.autopilot_endpoint_deployment_udf import \ AutopilotEndpointDeploymentUDF @@ -49,14 +48,10 @@ def get_emitted(self): return self._emitted -@pytest.mark.skipif(not aws_params.aws_secret_access_key, - reason="AWS credentials are not set") def test_autopilot_regression_endpoint_deployment_udf_real(): _run_test(reg_setup_params) -@pytest.mark.skipif(not aws_params.aws_secret_access_key, - reason="AWS credentials are not set") def test_autopilot_classification_endpoint_deployment_udf_real(): _run_test(cls_setup_params) diff --git a/tests/integration_tests/test_autopilot_job_status_polling_udf_real.py b/tests/integration_tests/test_autopilot_job_status_polling_udf_real.py index b1fb25f..d3b3be8 100644 --- a/tests/integration_tests/test_autopilot_job_status_polling_udf_real.py +++ b/tests/integration_tests/test_autopilot_job_status_polling_udf_real.py @@ -1,4 +1,3 @@ -import pytest from typing import Dict from exasol_sagemaker_extension.autopilot_job_status_polling_udf import \ AutopilotJobStatusPollingUDF @@ -40,14 +39,10 @@ def get_emitted(self): return self._emitted -@pytest.mark.skipif(not aws_params.aws_secret_access_key, - reason="AWS credentials are not set") def test_poll_autopilot_regression_training_status_udf_real(): _run_test(reg_setup_params) -@pytest.mark.skipif(not aws_params.aws_secret_access_key, - reason="AWS credentials are not set") def test_poll_autopilot_classification_training_status_udf_real(): _run_test(cls_setup_params) diff --git a/tests/integration_tests/test_autopilot_prediction_udf_real.py b/tests/integration_tests/test_autopilot_prediction_udf_real.py index e231e92..671a478 100644 --- a/tests/integration_tests/test_autopilot_prediction_udf_real.py +++ b/tests/integration_tests/test_autopilot_prediction_udf_real.py @@ -1,5 +1,4 @@ import json -import pytest import pandas as pd from typing import Dict from exasol_sagemaker_extension.autopilot_prediction_udf import \ @@ -62,8 +61,6 @@ def get_dataframe(self, num_rows='all'): return return_df -@pytest.mark.skipif(not aws_params.aws_secret_access_key, - reason="AWS credentials are not set") def test_regression_autopilot_prediction_udf_real(): connection_data = { "aws_s3_connection": aws_params.aws_conn_name, @@ -101,8 +98,6 @@ def test_regression_autopilot_prediction_udf_real(): assert ctx.get_emitted()[0][0].shape == (3, 3) -@pytest.mark.skipif(not aws_params.aws_secret_access_key, - reason="AWS credentials are not set") def test_classification_autopilot_prediction_udf_real(): connection_data = { "aws_s3_connection": aws_params.aws_conn_name, diff --git a/tests/integration_tests/test_autopilot_training_udf_real.py b/tests/integration_tests/test_autopilot_training_udf_real.py index 30e5076..c55f81e 100644 --- a/tests/integration_tests/test_autopilot_training_udf_real.py +++ b/tests/integration_tests/test_autopilot_training_udf_real.py @@ -1,4 +1,3 @@ -import pytest from typing import Dict from exasol_sagemaker_extension.autopilot_training_udf import \ AutopilotTrainingUDF @@ -60,8 +59,6 @@ def get_emitted(self): return self._emitted -@pytest.mark.skipif(not aws_params.aws_secret_access_key, - reason="AWS credentials are not set") def test_autopilot_regression_training_udf_real(): params_dict = { 'setup_params': reg_setup_params, @@ -74,8 +71,6 @@ def test_autopilot_regression_training_udf_real(): params_dict['problem_params']) -@pytest.mark.skipif(not aws_params.aws_secret_access_key, - reason="AWS credentials are not set") def test_autopilot_classification_training_udf_real(): params_dict = { 'setup_params': cls_setup_params, @@ -88,8 +83,6 @@ def test_autopilot_classification_training_udf_real(): params_dict['problem_params']) -@pytest.mark.skipif(not aws_params.aws_secret_access_key, - reason="AWS credentials are not set") def test_autopilot_multi_classification_training_udf_real(): params_dict = { 'setup_params': cls_setup_params, diff --git a/tests/integration_tests/test_exporting_to_localstack_s3.py b/tests/integration_tests/test_exporting_to_localstack_s3.py index aa40207..09dad3c 100644 --- a/tests/integration_tests/test_exporting_to_localstack_s3.py +++ b/tests/integration_tests/test_exporting_to_localstack_s3.py @@ -1,9 +1,12 @@ import json -import pytest import os.path + +import pytest import localstack_client.session +import exasol.bucketfs as bfs + from tests.integration_tests.utils.generate_create_statement_s3_exporting \ - import S3ExportingLuaScriptCreateStatementGenerator + import S3ExportingLuaScriptCreateStatementGenerator DB_CONNECTION_ADDR = "127.0.0.1:9563" DB_CONNECTION_USER = "sys" @@ -134,7 +137,10 @@ def get_comparison_query(import_table_name): import_table_name=import_table_name) -def test_export_table(get_database_conn, s3_client): +def test_export_table(backend, get_database_conn, s3_client): + if backend != bfs.path.StorageBackend.onprem: + pytest.skip('The test can only run locally') + db_conn = get_database_conn create_s3_bucket(s3_client)