diff --git a/.github/workflows/check_ci.yaml b/.github/workflows/check_ci.yaml index a3bd8422..ed4b1e74 100644 --- a/.github/workflows/check_ci.yaml +++ b/.github/workflows/check_ci.yaml @@ -17,6 +17,9 @@ jobs: - name: Setup Python & Poetry Environment uses: ./.github/actions/prepare_poetry_env + - name: Show commit message + run: echo ${{ github.event.head_commit.message }} + - name: Run build ai-lab tests run: > poetry run pytest @@ -30,7 +33,8 @@ jobs: AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }} AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }} - run_nodebook_tests: + run_notebook_tests: + if: "contains(github.event.head_commit.message, '[run-notebook-tests]')" environment: AWS_SAGEMAKER runs-on: ubuntu-latest @@ -38,6 +42,21 @@ jobs: - uses: actions/checkout@v3 with: fetch-depth: 0 + - name: Free disk space + uses: jlumbroso/free-disk-space@main + with: + tool-cache: true + large-packages: false + + - name: Free disk space by removing large directories + run: | + sudo rm -rf /usr/local/graalvm/ + sudo rm -rf /usr/local/.ghcup/ + sudo rm -rf /usr/local/share/powershell + sudo rm -rf /usr/local/share/chromium + sudo rm -rf /usr/local/lib/node_modules + sudo rm -rf /opt/ghc + - name: Setup Python & Poetry Environment uses: ./.github/actions/prepare_poetry_env diff --git a/doc/changes/changes_0.2.0.md b/doc/changes/changes_0.2.0.md index c8e0ce0f..cc03b59e 100644 --- a/doc/changes/changes_0.2.0.md +++ b/doc/changes/changes_0.2.0.md @@ -19,5 +19,10 @@ Version: 0.2.0 ## Refactoring * #160: Implemented the PM's recommendations of 2024-01-24. * #120: Passing the secret store object (sb_config) as a parameter to all functions that need it. +* #167: Replacing the term "Docker-DB" with "Exasol Docker-DB" in all notebooks and documentation. +* #168: Renaming the section name “Access Configuration” to "Open Secure Configuration Storage". +* #170: Renaming the section name "Set up" to "Setup". +* #182: Renaming the secret store global variable from "sb_config" to "ai_lab_config". +* #169: Renaming the default database schema from "IDA" to "AI_LAB". ## Documentation diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/cloud/01_import_data.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/cloud/01_import_data.ipynb index 741b6a95..2db5c2df 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/cloud/01_import_data.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/cloud/01_import_data.ipynb @@ -27,7 +27,7 @@ { "cell_type": "markdown", "source": [ - "### Access configuration" + "### Open Secure Configuration Storage" ], "metadata": { "collapsed": false @@ -154,9 +154,9 @@ ")\n", "\"\"\"\n", "\n", - "with open_pyexasol_connection(sb_config) as conn:\n", + "with open_pyexasol_connection(ai_lab_config) as conn:\n", " conn.execute(sql, query_params={\n", - " \"schema_name\": sb_config.db_schema,\n", + " \"schema_name\": ai_lab_config.db_schema,\n", " \"table_name\": TABLE_NAME\n", " })" ], @@ -194,9 +194,9 @@ "S3_ACCESS_KEY = \"\"\n", "S3_SECRET_KEY = \"\"\n", "\n", - "with open_pyexasol_connection(sb_config) as conn:\n", + "with open_pyexasol_connection(ai_lab_config) as conn:\n", " conn.execute(sql, query_params={\n", - " \"schema\": sb_config.db_schema,\n", + " \"schema\": ai_lab_config.db_schema,\n", " \"access_key\": S3_ACCESS_KEY,\n", " \"secret_key\": S3_SECRET_KEY,\n", " })" @@ -228,7 +228,7 @@ "outputs": [], "source": [ "params = {\n", - " \"schema\": sb_config.db_schema,\n", + " \"schema\": ai_lab_config.db_schema,\n", " \"table\": TABLE_NAME, \n", "}\n", "\n", @@ -241,7 +241,7 @@ " CONNECTION_NAME = 'S3_CONNECTION';\n", "\"\"\"\n", "\n", - "with open_pyexasol_connection(sb_config) as conn:\n", + "with open_pyexasol_connection(ai_lab_config) as conn:\n", " conn.execute(sql, query_params=params)" ], "metadata": { @@ -268,7 +268,7 @@ "execution_count": null, "outputs": [], "source": [ - "with open_pyexasol_connection(sb_config) as conn:\n", + "with open_pyexasol_connection(ai_lab_config) as conn:\n", " data_rows = conn.execute(\"select count(*) from {schema!i}.{table!i}\", query_params=params)\n", " count = next(data_rows)[0] \n", " print(f\"Loaded {count} rows\")\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/cloud_store_config.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/cloud_store_config.ipynb index 68cc6b0e..19b080e4 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/cloud_store_config.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/cloud_store_config.ipynb @@ -37,11 +37,11 @@ "from exasol.connections import open_bucketfs_connection, open_pyexasol_connection\n", "\n", "jar_local_path = github.retrieve_jar(github.Project.CLOUD_STORAGE_EXTENSION, use_local_cache=True)\n", - "bfs_bucket = open_bucketfs_connection(sb_config)\n", + "bfs_bucket = open_bucketfs_connection(ai_lab_config)\n", "bfs_path = bfs_utils.put_file(bfs_bucket, jar_local_path)\n", "\n", - "with open_pyexasol_connection(sb_config) as conn:\n", - " cloud_storage.setup_scripts(conn, sb_config.db_schema, bfs_path)\n", + "with open_pyexasol_connection(ai_lab_config) as conn:\n", + " cloud_storage.setup_scripts(conn, ai_lab_config.db_schema, bfs_path)\n", " \n", "print(\"Could Storage Extension was initialized\")" ], diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/data/data_abalone.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/data/data_abalone.ipynb index 4f0d5648..96da19d7 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/data/data_abalone.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/data/data_abalone.ipynb @@ -18,7 +18,7 @@ "\n", "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -118,18 +118,18 @@ "stopwatch = Stopwatch()\n", "\n", "# Create an Exasol connection\n", - "with open_pyexasol_connection(sb_config, compression=True) as conn:\n", + "with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n", "\n", " # Create tables\n", - " sql = f'CREATE OR REPLACE TABLE \"{sb_config.db_schema}\".\"{train_table}\"({\", \".join(column_desc)})'\n", + " sql = f'CREATE OR REPLACE TABLE \"{ai_lab_config.db_schema}\".\"{train_table}\"({\", \".join(column_desc)})'\n", " conn.execute(query=sql)\n", - " sql = f'CREATE OR REPLACE TABLE \"{sb_config.db_schema}\".\"{test_table}\" LIKE \"{sb_config.db_schema}\".\"{train_table}\"'\n", + " sql = f'CREATE OR REPLACE TABLE \"{ai_lab_config.db_schema}\".\"{test_table}\" LIKE \"{ai_lab_config.db_schema}\".\"{train_table}\"'\n", " conn.execute(query=sql)\n", "\n", " # Import data into Exasol\n", - " conn.import_from_pandas(df_train, (sb_config.db_schema, train_table))\n", + " conn.import_from_pandas(df_train, (ai_lab_config.db_schema, train_table))\n", " print(f\"Imported {conn.last_statement().rowcount()} rows into {train_table}.\")\n", - " conn.import_from_pandas(df_test, (sb_config.db_schema, test_table))\n", + " conn.import_from_pandas(df_test, (ai_lab_config.db_schema, test_table))\n", " print(f\"Imported {conn.last_statement().rowcount()} rows into {test_table}.\")\n", "\n", "print(f\"Importing the data took: {stopwatch}\")" diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/data/data_telescope.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/data/data_telescope.ipynb index 2547d46d..f59cae22 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/data/data_telescope.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/data/data_telescope.ipynb @@ -18,7 +18,7 @@ "\n", "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -120,18 +120,18 @@ "stopwatch = Stopwatch()\n", "\n", "# Create an Exasol connection\n", - "with open_pyexasol_connection(sb_config, compression=True) as conn:\n", + "with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n", "\n", " # Create tables\n", - " sql = f'CREATE OR REPLACE TABLE \"{sb_config.db_schema}\".\"{train_table}\"({\", \".join(column_desc)})'\n", + " sql = f'CREATE OR REPLACE TABLE \"{ai_lab_config.db_schema}\".\"{train_table}\"({\", \".join(column_desc)})'\n", " conn.execute(query=sql)\n", - " sql = f'CREATE OR REPLACE TABLE \"{sb_config.db_schema}\".\"{test_table}\" LIKE \"{sb_config.db_schema}\".\"{train_table}\"'\n", + " sql = f'CREATE OR REPLACE TABLE \"{ai_lab_config.db_schema}\".\"{test_table}\" LIKE \"{ai_lab_config.db_schema}\".\"{train_table}\"'\n", " conn.execute(query=sql)\n", "\n", " # Import data into Exasol\n", - " conn.import_from_pandas(df_train, (sb_config.db_schema, train_table))\n", + " conn.import_from_pandas(df_train, (ai_lab_config.db_schema, train_table))\n", " print(f\"Imported {conn.last_statement().rowcount()} rows into {train_table}.\")\n", - " conn.import_from_pandas(df_test, (sb_config.db_schema, test_table))\n", + " conn.import_from_pandas(df_test, (ai_lab_config.db_schema, test_table))\n", " print(f\"Imported {conn.last_statement().rowcount()} rows into {test_table}.\")\n", "\n", "print(f\"Importing the data took: {stopwatch}\")" diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/main_config.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/main_config.ipynb index 8dfd0b95..cab1b75a 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/main_config.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/main_config.ipynb @@ -20,7 +20,7 @@ "\n", "Before starting working with any notebook in the AI-Lab one needs to connect to the configuration store providing the name of the file where the data is stored and the password. If the file with the specified name doesn't exist a new file will be created and protected with the entered password. However, if the file does exist it will be opened only if the provided password matches the password the file was created with.\n", "\n", - "## Access configuration" + "## Open Secure Configuration Storage" ] }, { @@ -61,7 +61,7 @@ "source": [ "## Choose the database\n", "\n", - "First, we need to choose which database we will work with. The AI-Lab includes a so-called Docker-DB. This is a single-node database running in a Docker container. It's a good choice of a database for any light-weight experiment designed as a proof of concept.\n", + "First, we need to choose which database we will work with. The AI-Lab includes a so-called Exasol Docker-DB. This is a single-node database running in a Docker container. It's a good choice of a database for any light-weight experiment designed as a proof of concept.\n", "\n", "For more practical work one might choose a customer database with real data.\n", "\n", @@ -75,7 +75,7 @@ "metadata": {}, "outputs": [], "source": [ - "display(get_db_selection_ui(sb_config))" + "display(get_db_selection_ui(ai_lab_config))" ] }, { @@ -95,7 +95,7 @@ "metadata": {}, "outputs": [], "source": [ - "display(get_db_config_ui(sb_config))" + "display(get_db_config_ui(ai_lab_config))" ] }, { @@ -103,9 +103,9 @@ "id": "c57ab0bf-8b56-433e-b8a8-55d8bd4ff931", "metadata": {}, "source": [ - "## Start the Docker-DB\n", + "## Start the Exasol Docker-DB\n", "\n", - "If we choose to use the Docker-DB we need to launch it after every restart of the AI-Lab. This operation may take a few moments. Please wait till it is completed before proceeding with the next step.\n", + "If we choose to use the Exasol Docker-DB we need to launch it after every restart of the AI-Lab. This operation may take a few moments. Please wait till it is completed before proceeding with the next step.\n", "\n", "For a customer database, this operation will take no effect." ] @@ -117,7 +117,7 @@ "metadata": {}, "outputs": [], "source": [ - "display(get_start_docker_db_ui(sb_config))" + "display(get_start_docker_db_ui(ai_lab_config))" ] }, { @@ -144,8 +144,8 @@ "\n", "stopwatch = Stopwatch()\n", "\n", - "sql = f'CREATE SCHEMA IF NOT EXISTS \"{sb_config.db_schema}\"'\n", - "with open_pyexasol_connection(sb_config, compression=True) as conn:\n", + "sql = f'CREATE SCHEMA IF NOT EXISTS \"{ai_lab_config.db_schema}\"'\n", + "with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n", " conn.execute(query=sql)\n", "\n", "print(f\"Schema created in {stopwatch}\")" diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sagemaker/sme_deploy_model.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sagemaker/sme_deploy_model.ipynb index e01dcbd9..2e0b5a4f 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sagemaker/sme_deploy_model.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sagemaker/sme_deploy_model.ipynb @@ -23,7 +23,7 @@ "\n", "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -101,14 +101,14 @@ "outputs": [], "source": [ "%%sql\n", - "EXECUTE SCRIPT {{sb_config.db_schema}}.\"SME_DEPLOY_SAGEMAKER_AUTOPILOT_ENDPOINT\"(\n", - " '{{sb_config.JOB_NAME}}', \n", + "EXECUTE SCRIPT {{ai_lab_config.db_schema}}.\"SME_DEPLOY_SAGEMAKER_AUTOPILOT_ENDPOINT\"(\n", + " '{{ai_lab_config.JOB_NAME}}', \n", " '{{ENDPOINT_NAME}}', \n", - " '{{sb_config.db_schema}}',\n", + " '{{ai_lab_config.db_schema}}',\n", " '{{INSTANCE_TYPE}}', \n", " {{INSTANCE_COUNT}}, \n", - " '{{sb_config.sme_aws_connection}}', \n", - " '{{sb_config.aws_region}}'\n", + " '{{ai_lab_config.sme_aws_connection}}', \n", + " '{{ai_lab_config.aws_region}}'\n", ");" ] }, @@ -130,7 +130,7 @@ "%%sql\n", "SELECT SCRIPT_NAME, SCRIPT_TYPE \n", "FROM SYS.EXA_ALL_SCRIPTS\n", - "WHERE SCRIPT_SCHEMA='{{sb_config.db_schema}}' AND SCRIPT_TYPE = 'UDF'" + "WHERE SCRIPT_SCHEMA='{{ai_lab_config.db_schema}}' AND SCRIPT_TYPE = 'UDF'" ] }, { @@ -155,7 +155,7 @@ "%%sql column_names <<\n", "SELECT COLUMN_NAME\n", "FROM SYS.EXA_ALL_COLUMNS\n", - "WHERE COLUMN_SCHEMA = '{{sb_config.db_schema}}' AND COLUMN_TABLE='{{TEST_TABLE_NAME}}' AND COLUMN_NAME <> UPPER('{{TARGET_COLUMN}}');" + "WHERE COLUMN_SCHEMA = '{{ai_lab_config.db_schema}}' AND COLUMN_TABLE='{{TEST_TABLE_NAME}}' AND COLUMN_NAME <> UPPER('{{TARGET_COLUMN}}');" ] }, { @@ -184,8 +184,8 @@ "outputs": [], "source": [ "%%sql\n", - "SELECT \"{{sb_config.db_schema}}\".\"{{ENDPOINT_NAME}}\"(0, {{column_names}})\n", - "FROM \"{{sb_config.db_schema}}\".\"{{TEST_TABLE_NAME}}\"\n", + "SELECT \"{{ai_lab_config.db_schema}}\".\"{{ENDPOINT_NAME}}\"(0, {{column_names}})\n", + "FROM \"{{ai_lab_config.db_schema}}\".\"{{TEST_TABLE_NAME}}\"\n", "LIMIT 10" ] }, @@ -208,12 +208,12 @@ "WITH TEST_DATA AS\n", "(\n", " -- We take data from the test table and add the row number calling it SAMPLE_ID.\n", - " SELECT ROW_NUMBER() OVER () AS SAMPLE_ID, {{column_names}}, [{{TARGET_COLUMN}}] FROM \"{{sb_config.db_schema}}\".\"{{TEST_TABLE_NAME}}\"\n", + " SELECT ROW_NUMBER() OVER () AS SAMPLE_ID, {{column_names}}, [{{TARGET_COLUMN}}] FROM \"{{ai_lab_config.db_schema}}\".\"{{TEST_TABLE_NAME}}\"\n", ")\n", "WITH MODEL_OUTPUT AS\n", "(\n", " -- Make predictions. We will pass the SAMPLE_ID that sould be returned back unchanged.\n", - " SELECT \"{{sb_config.db_schema}}\".\"{{ENDPOINT_NAME}}\"(SAMPLE_ID, {{column_names}})\n", + " SELECT \"{{ai_lab_config.db_schema}}\".\"{{ENDPOINT_NAME}}\"(SAMPLE_ID, {{column_names}})\n", " FROM TEST_DATA\n", ")\n", "-- Finally, compute the confusion matrix.\n", @@ -242,8 +242,8 @@ "%%sql\n", "EXECUTE SCRIPT SME_DELETE_SAGEMAKER_AUTOPILOT_ENDPOINT(\n", " '{{ENDPOINT_NAME}}', \n", - " '{{sb_config.sme_aws_connection}}', \n", - " '{{sb_config.aws_region}}'\n", + " '{{ai_lab_config.sme_aws_connection}}', \n", + " '{{ai_lab_config.aws_region}}'\n", ")" ] }, diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sagemaker/sme_init.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sagemaker/sme_init.ipynb index cdaae5b9..b026933a 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sagemaker/sme_init.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sagemaker/sme_init.ipynb @@ -26,7 +26,7 @@ "\n", "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -58,7 +58,7 @@ "outputs": [], "source": [ "%run utils/sme_init_ui.ipynb\n", - "display(get_sme_config_ui(sb_config))" + "display(get_sme_config_ui(ai_lab_config))" ] }, { @@ -82,7 +82,7 @@ "source": [ "from exasol.sagemaker_extension_wrapper import initialize_sme_extension\n", "\n", - "initialize_sme_extension(sb_config)" + "initialize_sme_extension(ai_lab_config)" ] }, { diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sagemaker/sme_train_model.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sagemaker/sme_train_model.ipynb index 50a69131..798bf295 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sagemaker/sme_train_model.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sagemaker/sme_train_model.ipynb @@ -20,7 +20,7 @@ "\n", "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -52,10 +52,10 @@ "outputs": [], "source": [ "from datetime import datetime\n", - "sb_config.save('JOB_NAME', 'CLS' + datetime.now().strftime('%Y%m%d%H%M%S'))\n", + "ai_lab_config.save('JOB_NAME', 'CLS' + datetime.now().strftime('%Y%m%d%H%M%S'))\n", "\n", "# Here is the job name we are going to use in this and the following notebooks.\n", - "sb_config.JOB_NAME" + "ai_lab_config.JOB_NAME" ] }, { @@ -97,7 +97,7 @@ "outputs": [], "source": [ "# URI of the S3 bucket\n", - "S3_BUCKET_URI=f\"s3://{sb_config.sme_aws_bucket}\"\n", + "S3_BUCKET_URI=f\"s3://{ai_lab_config.sme_aws_bucket}\"\n", "\n", "# Path in the S3 bucket where the input data will be uploaded.\n", "S3_OUTPUT_PATH = \"ida_dataset_path\"\n", @@ -139,7 +139,7 @@ "%%sql column_names <<\n", "SELECT COLUMN_NAME\n", "FROM SYS.EXA_ALL_COLUMNS\n", - "WHERE COLUMN_SCHEMA = '{{sb_config.db_schema}}' AND COLUMN_TABLE='{{INPUT_TABLE_NAME}}'" + "WHERE COLUMN_SCHEMA = '{{ai_lab_config.db_schema}}' AND COLUMN_TABLE='{{INPUT_TABLE_NAME}}'" ] }, { @@ -168,7 +168,7 @@ "outputs": [], "source": [ "%%sql\n", - "CREATE OR REPLACE VIEW {{sb_config.db_schema}}.\"{{INPUT_VIEW_NAME}}\" AS\n", + "CREATE OR REPLACE VIEW {{ai_lab_config.db_schema}}.\"{{INPUT_VIEW_NAME}}\" AS\n", "SELECT CAST(0 AS INT) AS SAMPLE_ID, {{column_names}} FROM {{INPUT_TABLE_NAME}}" ] }, @@ -203,15 +203,15 @@ "outputs": [], "source": [ "%%sql\n", - "EXECUTE SCRIPT \"{{sb_config.db_schema}}\".\"SME_TRAIN_WITH_SAGEMAKER_AUTOPILOT\"(\n", + "EXECUTE SCRIPT \"{{ai_lab_config.db_schema}}\".\"SME_TRAIN_WITH_SAGEMAKER_AUTOPILOT\"(\n", "'{\n", - " \"job_name\" : \"{{sb_config.JOB_NAME}}\",\n", - " \"aws_credentials_connection_name\" : \"{{sb_config.sme_aws_connection}}\",\n", - " \"aws_region\" : \"{{sb_config.aws_region}}\",\n", - " \"iam_sagemaker_role\" : \"{{sb_config.sme_aws_role}}\",\n", + " \"job_name\" : \"{{ai_lab_config.JOB_NAME}}\",\n", + " \"aws_credentials_connection_name\" : \"{{ai_lab_config.sme_aws_connection}}\",\n", + " \"aws_region\" : \"{{ai_lab_config.aws_region}}\",\n", + " \"iam_sagemaker_role\" : \"{{ai_lab_config.sme_aws_role}}\",\n", " \"s3_bucket_uri\" : \"{{S3_BUCKET_URI}}\",\n", " \"s3_output_path\" : \"{{S3_OUTPUT_PATH}}\",\n", - " \"input_schema_name\" : \"{{sb_config.db_schema}}\",\n", + " \"input_schema_name\" : \"{{ai_lab_config.db_schema}}\",\n", " \"input_table_or_view_name\" : \"{{INPUT_VIEW_NAME}}\",\n", " \"target_attribute_name\" : \"{{TARGET_COLUMN}}\",\n", " \"max_candidates\" : {{MAX_CANDIDATES}}\n", @@ -234,7 +234,7 @@ "outputs": [], "source": [ "%%sql\n", - "DROP VIEW {{sb_config.db_schema}}.\"{{INPUT_VIEW_NAME}}\"" + "DROP VIEW {{ai_lab_config.db_schema}}.\"{{INPUT_VIEW_NAME}}\"" ] }, { @@ -263,10 +263,10 @@ "outputs": [], "source": [ "%%sql\n", - "EXECUTE SCRIPT {{sb_config.db_schema}}.\"SME_POLL_SAGEMAKER_AUTOPILOT_JOB_STATUS\"(\n", - " '{{sb_config.JOB_NAME}}',\n", - " '{{sb_config.sme_aws_connection}}',\n", - " '{{sb_config.aws_region}}'\n", + "EXECUTE SCRIPT {{ai_lab_config.db_schema}}.\"SME_POLL_SAGEMAKER_AUTOPILOT_JOB_STATUS\"(\n", + " '{{ai_lab_config.JOB_NAME}}',\n", + " '{{ai_lab_config.sme_aws_connection}}',\n", + " '{{ai_lab_config.aws_region}}'\n", ")" ] }, @@ -298,11 +298,11 @@ "import os\n", "from sagemaker import AutoML\n", "\n", - "os.environ[\"AWS_DEFAULT_REGION\"] = sb_config.aws_region\n", - "os.environ[\"AWS_ACCESS_KEY_ID\"] = sb_config.aws_access_key_id\n", - "os.environ[\"AWS_SECRET_ACCESS_KEY\"] = sb_config.aws_secret_access_key\n", + "os.environ[\"AWS_DEFAULT_REGION\"] = ai_lab_config.aws_region\n", + "os.environ[\"AWS_ACCESS_KEY_ID\"] = ai_lab_config.aws_access_key_id\n", + "os.environ[\"AWS_SECRET_ACCESS_KEY\"] = ai_lab_config.aws_secret_access_key\n", "\n", - "automl = AutoML.attach(auto_ml_job_name=sb_config.JOB_NAME)\n", + "automl = AutoML.attach(auto_ml_job_name=ai_lab_config.JOB_NAME)\n", "automl.describe_auto_ml_job()" ] }, @@ -325,7 +325,7 @@ "metadata": {}, "outputs": [], "source": [ - "aws_command = f'aws s3 ls s3://{sb_config.sme_aws_bucket}/{S3_OUTPUT_PATH} --recursive'\n", + "aws_command = f'aws s3 ls s3://{ai_lab_config.sme_aws_bucket}/{S3_OUTPUT_PATH} --recursive'\n", "!{aws_command}" ] }, diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_predict_abalone.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_predict_abalone.ipynb index 80a8d5bb..bb64fc6b 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_predict_abalone.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_predict_abalone.ipynb @@ -19,7 +19,7 @@ "\n", "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -54,12 +54,12 @@ "from stopwatch import Stopwatch\n", "\n", "target_column = 'RINGS'\n", - "bfs_model_path = get_udf_bucket_path(sb_config) + '/abalone_svm_model.pkl'\n", - "params = {'schema': sb_config.db_schema, 'test_table': 'ABALONE_TEST', 'model_path': bfs_model_path}\n", + "bfs_model_path = get_udf_bucket_path(ai_lab_config) + '/abalone_svm_model.pkl'\n", + "params = {'schema': ai_lab_config.db_schema, 'test_table': 'ABALONE_TEST', 'model_path': bfs_model_path}\n", "\n", "stopwatch = Stopwatch()\n", "\n", - "with open_pyexasol_connection(sb_config, compression=True) as conn:\n", + "with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n", " # Get the list of feature columns\n", " sql = 'SELECT * FROM {schema!q}.{test_table!q} LIMIT 1'\n", " df_tmp = conn.export_to_pandas(query_or_table=sql, query_params=params)\n", @@ -97,7 +97,7 @@ "import matplotlib.pyplot as plt\n", "\n", "# Get the ground truth labels for the test set.\n", - "with open_pyexasol_connection(sb_config, compression=True) as conn:\n", + "with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n", " sql = f'SELECT ROWID AS [sample_id], [{target_column}] FROM {{schema!q}}.{{test_table!q}}'\n", " df_true = conn.export_to_pandas(query_or_table=sql, query_params=params)\n", "\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_predict_telescope.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_predict_telescope.ipynb index 6cff6523..d975c0f1 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_predict_telescope.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_predict_telescope.ipynb @@ -19,7 +19,7 @@ "\n", "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -54,12 +54,12 @@ "from stopwatch import Stopwatch\n", "\n", "target_column = 'CLASS'\n", - "bfs_model_path = get_udf_bucket_path(sb_config) + '/telescope_tree_model.pkl'\n", - "params = {'schema': sb_config.db_schema, 'test_table': 'TELESCOPE_TEST', 'model_path': bfs_model_path}\n", + "bfs_model_path = get_udf_bucket_path(ai_lab_config) + '/telescope_tree_model.pkl'\n", + "params = {'schema': ai_lab_config.db_schema, 'test_table': 'TELESCOPE_TEST', 'model_path': bfs_model_path}\n", "\n", "stopwatch = Stopwatch()\n", "\n", - "with open_pyexasol_connection(sb_config, compression=True) as conn:\n", + "with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n", " # Get the list of feature columns\n", " sql = 'SELECT * FROM {schema!i}.{test_table!i} LIMIT 1'\n", " df_tmp = conn.export_to_pandas(query_or_table=sql, query_params=params)\n", @@ -97,7 +97,7 @@ "import matplotlib.pyplot as plt\n", "\n", "# Get the ground truth labels for the test set.\n", - "with open_pyexasol_connection(sb_config, compression=True) as conn:\n", + "with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n", " sql = f'SELECT ROWID AS [sample_id], [{target_column}] FROM {{schema!q}}.{{test_table!q}}'\n", " df_true = conn.export_to_pandas(query_or_table=sql, query_params=params)\n", "\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_predict_udf.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_predict_udf.ipynb index 6641f82e..ef302930 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_predict_udf.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_predict_udf.ipynb @@ -18,7 +18,7 @@ "\n", "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -104,8 +104,8 @@ "/\n", "\"\"\")\n", "\n", - "with open_pyexasol_connection(sb_config, compression=True) as conn:\n", - " conn.execute(query=sql, query_params={'schema': sb_config.db_schema})\n", + "with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n", + " conn.execute(query=sql, query_params={'schema': ai_lab_config.db_schema})\n", "\n", "print(f\"Creating prediction script took: {stopwatch}\")" ] diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_train_abalone.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_train_abalone.ipynb index 02ed6f59..188590c0 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_train_abalone.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_train_abalone.ipynb @@ -21,7 +21,7 @@ "\n", "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -58,8 +58,8 @@ "\n", "stopwatch = Stopwatch()\n", "\n", - "with open_pyexasol_connection(sb_config, compression=True) as conn:\n", - " df = conn.export_to_pandas(query_or_table=(sb_config.db_schema, 'ABALONE_TRAIN'))\n", + "with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n", + " df = conn.export_to_pandas(query_or_table=(ai_lab_config.db_schema, 'ABALONE_TRAIN'))\n", "\n", "X, y = df.drop(columns='RINGS'), df['RINGS']\n", "X_train, X_valid, y_train, y_valid = train_test_split(X, y, test_size=0.2)\n", @@ -205,7 +205,7 @@ "stopwatch = Stopwatch()\n", "\n", "# Connect to the BucketFS service\n", - "bucket = open_bucketfs_connection(sb_config)\n", + "bucket = open_bucketfs_connection(ai_lab_config)\n", "\n", "# Serialize the model into a byte-array and upload it to the BucketFS, \n", "# where it will be saved in the file with the specified name.\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_train_telescope.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_train_telescope.ipynb index 2e4f3a16..9f1c756f 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_train_telescope.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/sklearn/sklearn_train_telescope.ipynb @@ -21,7 +21,7 @@ "\n", "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -57,8 +57,8 @@ "\n", "stopwatch = Stopwatch()\n", "\n", - "with open_pyexasol_connection(sb_config, compression=True) as conn:\n", - " df = conn.export_to_pandas(query_or_table=(sb_config.db_schema, 'TELESCOPE_TRAIN'))\n", + "with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n", + " df = conn.export_to_pandas(query_or_table=(ai_lab_config.db_schema, 'TELESCOPE_TRAIN'))\n", "\n", "print(f\"Loading the data took: {stopwatch}\")" ] @@ -151,7 +151,7 @@ "stopwatch = Stopwatch()\n", "\n", "# Connect to the BucketFS service\n", - "bucket = open_bucketfs_connection(sb_config)\n", + "bucket = open_bucketfs_connection(ai_lab_config)\n", "\n", "# Serialize the model into a byte-array and upload it to the BucketFS, \n", "# where it will be saved in the file with the specified name.\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/slc/script-languages.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/slc/script-languages.ipynb index b826639e..7c68d2fd 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/slc/script-languages.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/slc/script-languages.ipynb @@ -673,7 +673,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Now, that we have an updated container, we need to check if our changes were successful. For that we are going to upload the container to an Exasol Database and have a look into it. In this example, we are going to use a local Docker-DB started by `exaslct`, which uses our [integration-test-docker-environment](https://github.com/exasol/integration-test-docker-environment) in the background. \n", + "Now, that we have an updated container, we need to check if our changes were successful. For that we are going to upload the container to an Exasol Database and have a look into it. In this example, we are going to use a local Exasol Docker-DB started by `exaslct`, which uses our [integration-test-docker-environment](https://github.com/exasol/integration-test-docker-environment) in the background. \n", "\n", "**Note:** You could also use your own Exasol Database by changing the variables below. However, this Notebook must be able to access the [BucketFS](https://docs.exasol.com/db/latest/database_concepts/bucketfs/bucketfs.htm) of your Exasol Database or you need to manually upload the container. " ] @@ -700,7 +700,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Starting a local Docker-DB for Testing" + "### Starting a local Exasol Docker-DB for Testing" ] }, { diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/start.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/start.ipynb index 7b142044..a6579fec 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/start.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/start.ipynb @@ -30,7 +30,7 @@ "\n", "The tutorials are organized into topics, each one in a separate directory. The links below lead to the main page of a topic, usually an introduction. From there one can follow through the tutorials on the topic or go to a particular notebook.\n", "\n", - "The configuration notebook must be visited at least once. When using the provided demo database - Docker-DB - the configuration notebook may need to be re-visited at the beginning of every session with AI-Lab to start or restart this database. \n", + "The configuration notebook must be visited at least once. When using the provided demo database - Exasol Docker-DB - the configuration notebook may need to be re-visited at the beginning of every session with AI-Lab to start or restart this database. \n", "\n", "Some tutorials require example data. The data can be loaded into the database using notebooks in the data loading topic. \n", "\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/masked_modelling.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/masked_modelling.ipynb index e69e6d55..eb469364 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/masked_modelling.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/masked_modelling.ipynb @@ -17,9 +17,9 @@ "1. [Configure the AI-Lab](../main_config.ipynb).\n", "2. [Initialize the Transformer Extension](te_init.ipynb).\n", "\n", - "## Set up\n", + "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -89,7 +89,7 @@ "outputs": [], "source": [ "%run utils/model_retrieval.ipynb\n", - "load_huggingface_model(sb_config, MODEL_NAME)" + "load_huggingface_model(ai_lab_config, MODEL_NAME)" ] }, { @@ -144,9 +144,9 @@ "(\n", " SELECT TE_FILLING_MASK_UDF(\n", " NULL,\n", - " '{{sb_config.te_bfs_connection}}',\n", - " '{{sb_config.te_hf_connection}}',\n", - " '{{sb_config.te_models_bfs_dir}}',\n", + " '{{ai_lab_config.te_bfs_connection}}',\n", + " '{{ai_lab_config.te_hf_connection}}',\n", + " '{{ai_lab_config.te_models_bfs_dir}}',\n", " '{{MODEL_NAME}}',\n", " '{{MY_TEXT}}',\n", " 5\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/question_answering.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/question_answering.ipynb index 4e0f8c50..2e4f9c28 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/question_answering.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/question_answering.ipynb @@ -17,9 +17,9 @@ "1. [Configure the AI-Lab](../main_config.ipynb).\n", "2. [Initialize the Transformer Extension](te_init.ipynb).\n", "\n", - "## Set up\n", + "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -89,7 +89,7 @@ "outputs": [], "source": [ "%run utils/model_retrieval.ipynb\n", - "load_huggingface_model(sb_config, MODEL_NAME)" + "load_huggingface_model(ai_lab_config, MODEL_NAME)" ] }, { @@ -153,9 +153,9 @@ "(\n", " SELECT TE_QUESTION_ANSWERING_UDF(\n", " NULL,\n", - " '{{sb_config.te_bfs_connection}}',\n", - " '{{sb_config.te_hf_connection}}',\n", - " '{{sb_config.te_models_bfs_dir}}',\n", + " '{{ai_lab_config.te_bfs_connection}}',\n", + " '{{ai_lab_config.te_hf_connection}}',\n", + " '{{ai_lab_config.te_models_bfs_dir}}',\n", " '{{MODEL_NAME}}',\n", " '{{TEST_QUESTION}}',\n", " '{{TEST_CONTEXT1}}',\n", @@ -209,9 +209,9 @@ "(\n", " SELECT TE_QUESTION_ANSWERING_UDF(\n", " NULL,\n", - " '{{sb_config.te_bfs_connection}}',\n", - " '{{sb_config.te_hf_connection}}',\n", - " '{{sb_config.te_models_bfs_dir}}',\n", + " '{{ai_lab_config.te_bfs_connection}}',\n", + " '{{ai_lab_config.te_hf_connection}}',\n", + " '{{ai_lab_config.te_models_bfs_dir}}',\n", " '{{MODEL_NAME}}',\n", " '{{TEST_QUESTION}}',\n", " '{{TEST_CONTEXT2}}',\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/sequence_classification.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/sequence_classification.ipynb index eded5ffd..e9f4baed 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/sequence_classification.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/sequence_classification.ipynb @@ -17,9 +17,9 @@ "1. [Configure the AI-Lab](../main_config.ipynb).\n", "2. [Initialize the Transformer Extension](te_init.ipynb).\n", "\n", - "## Set up\n", + "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -90,7 +90,7 @@ "outputs": [], "source": [ "%run utils/model_retrieval.ipynb\n", - "load_huggingface_model(sb_config, MODEL_NAME, method='udf')" + "load_huggingface_model(ai_lab_config, MODEL_NAME, method='udf')" ] }, { @@ -123,9 +123,9 @@ "(\n", " SELECT TE_SEQUENCE_CLASSIFICATION_SINGLE_TEXT_UDF(\n", " NULL,\n", - " '{{sb_config.te_bfs_connection}}',\n", - " '{{sb_config.te_hf_connection}}',\n", - " '{{sb_config.te_models_bfs_dir}}',\n", + " '{{ai_lab_config.te_bfs_connection}}',\n", + " '{{ai_lab_config.te_hf_connection}}',\n", + " '{{ai_lab_config.te_models_bfs_dir}}',\n", " '{{MODEL_NAME}}',\n", " 'Oh my God!'\n", " )\n", @@ -160,9 +160,9 @@ "(\n", " SELECT TE_SEQUENCE_CLASSIFICATION_TEXT_PAIR_UDF(\n", " NULL,\n", - " '{{sb_config.te_bfs_connection}}',\n", - " '{{sb_config.te_hf_connection}}',\n", - " '{{sb_config.te_models_bfs_dir}}',\n", + " '{{ai_lab_config.te_bfs_connection}}',\n", + " '{{ai_lab_config.te_hf_connection}}',\n", + " '{{ai_lab_config.te_models_bfs_dir}}',\n", " '{{MODEL_NAME}}',\n", " 'Oh my God!',\n", " 'I lost my purse.'\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/te_init.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/te_init.ipynb index 105d506e..bfe856d9 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/te_init.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/te_init.ipynb @@ -14,9 +14,9 @@ "Prior to using this notebook one needs to complete the following steps:\n", "1. [Configure the AI-Lab](../main_config.ipynb).\n", "\n", - "## Set up\n", + "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -46,7 +46,7 @@ "outputs": [], "source": [ "%run utils/te_init_ui.ipynb\n", - "display(get_te_config_ui(sb_config))" + "display(get_te_config_ui(ai_lab_config))" ] }, { @@ -78,7 +78,7 @@ "source": [ "from exasol.transformers_extension_wrapper import initialize_te_extension\n", "\n", - "initialize_te_extension(sb_config)" + "initialize_te_extension(ai_lab_config)" ] }, { diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/text_generation.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/text_generation.ipynb index b0531f47..7668216c 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/text_generation.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/text_generation.ipynb @@ -17,9 +17,9 @@ "1. [Configure the AI-Lab](../main_config.ipynb).\n", "2. [Initialize the Transformer Extension](te_init.ipynb).\n", "\n", - "## Set up\n", + "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -71,7 +71,7 @@ "outputs": [], "source": [ "%run utils/model_retrieval.ipynb\n", - "load_huggingface_model(sb_config, MODEL_NAME)" + "load_huggingface_model(ai_lab_config, MODEL_NAME)" ] }, { @@ -122,7 +122,7 @@ "# We will be creating a new database session every time we call the model.\n", "# We will have to activate the language container for each of these sessions.\n", "# Here we will get the activation SQL command.\n", - "activation_sql = get_activation_sql(sb_config)" + "activation_sql = get_activation_sql(ai_lab_config)" ] }, { @@ -150,11 +150,11 @@ "outputs": [], "source": [ "sql = f\"\"\"\n", - "SELECT {sb_config.db_schema}.TE_TEXT_GENERATION_UDF(\n", + "SELECT {ai_lab_config.db_schema}.TE_TEXT_GENERATION_UDF(\n", " NULL,\n", - " '{sb_config.te_bfs_connection}',\n", - " '{sb_config.te_hf_connection}',\n", - " '{sb_config.te_models_bfs_dir}',\n", + " '{ai_lab_config.te_bfs_connection}',\n", + " '{ai_lab_config.te_hf_connection}',\n", + " '{ai_lab_config.te_models_bfs_dir}',\n", " '{MODEL_NAME}',\n", " '{MY_TEXT}',\n", " {MAX_LENGTH},\n", @@ -162,7 +162,7 @@ ")\n", "\"\"\"\n", "\n", - "with open_pyexasol_connection(sb_config, compression=True) as conn:\n", + "with open_pyexasol_connection(ai_lab_config, compression=True) as conn:\n", " conn.execute(query=activation_sql)\n", " result = conn.export_to_pandas(query_or_table=sql).squeeze()\n", " MY_TEXT = result['GENERATED_TEXT']\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/token_classification.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/token_classification.ipynb index bbe07b44..b05e7d5c 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/token_classification.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/token_classification.ipynb @@ -17,9 +17,9 @@ "1. [Configure the AI-Lab](../main_config.ipynb).\n", "2. [Initialize the Transformer Extension](te_init.ipynb).\n", "\n", - "## Set up\n", + "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -89,7 +89,7 @@ "outputs": [], "source": [ "%run utils/model_retrieval.ipynb\n", - "load_huggingface_model(sb_config, MODEL_NAME)" + "load_huggingface_model(ai_lab_config, MODEL_NAME)" ] }, { @@ -158,9 +158,9 @@ "(\n", " SELECT TE_TOKEN_CLASSIFICATION_UDF(\n", " NULL,\n", - " '{{sb_config.te_bfs_connection}}',\n", - " '{{sb_config.te_hf_connection}}',\n", - " '{{sb_config.te_models_bfs_dir}}',\n", + " '{{ai_lab_config.te_bfs_connection}}',\n", + " '{{ai_lab_config.te_hf_connection}}',\n", + " '{{ai_lab_config.te_models_bfs_dir}}',\n", " '{{MODEL_NAME}}',\n", " '{{MY_TEXT}}',\n", " NULL\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/translation.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/translation.ipynb index 03caaaae..1e8bb26d 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/translation.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/translation.ipynb @@ -17,9 +17,9 @@ "1. [Configure the AI-Lab](../main_config.ipynb).\n", "2. [Initialize the Transformer Extension](te_init.ipynb).\n", "\n", - "## Set up\n", + "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -90,7 +90,7 @@ "outputs": [], "source": [ "%run utils/model_retrieval.ipynb\n", - "load_huggingface_model(sb_config, MODEL_NAME)" + "load_huggingface_model(ai_lab_config, MODEL_NAME)" ] }, { @@ -149,9 +149,9 @@ "(\n", " SELECT TE_TRANSLATION_UDF(\n", " NULL,\n", - " '{{sb_config.te_bfs_connection}}',\n", - " '{{sb_config.te_hf_connection}}',\n", - " '{{sb_config.te_models_bfs_dir}}',\n", + " '{{ai_lab_config.te_bfs_connection}}',\n", + " '{{ai_lab_config.te_hf_connection}}',\n", + " '{{ai_lab_config.te_models_bfs_dir}}',\n", " '{{MODEL_NAME}}',\n", " '{{MY_TEXT}}',\n", " '{{SOURCE_LANGUAGE}}',\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/utils/te_init_ui.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/utils/te_init_ui.ipynb index 5f0f98b8..634044fb 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/utils/te_init_ui.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/utils/te_init_ui.ipynb @@ -41,7 +41,7 @@ "def get_te_config_ui(conf: Secrets) -> widgets.Widget:\n", "\n", " inputs = [\n", - " ('Access token', widgets.Password(value=sb_config.get(CKey.huggingface_token)), CKey.huggingface_token)\n", + " ('Access token', widgets.Password(value=ai_lab_config.get(CKey.huggingface_token)), CKey.huggingface_token)\n", " ]\n", "\n", " return get_generic_config_ui(conf, [inputs], ['Huggingface Access Parameters'])\n" diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/zero_shot_classification.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/zero_shot_classification.ipynb index 667ef7ba..708c53b9 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/zero_shot_classification.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/transformers/zero_shot_classification.ipynb @@ -17,9 +17,9 @@ "1. [Configure the AI-Lab](../main_config.ipynb).\n", "2. [Initialize the Transformer Extension](te_init.ipynb).\n", "\n", - "## Set up\n", + "## Setup\n", "\n", - "### Access configuration" + "### Open Secure Configuration Storage" ] }, { @@ -89,7 +89,7 @@ "outputs": [], "source": [ "%run utils/model_retrieval.ipynb\n", - "load_huggingface_model(sb_config, MODEL_NAME)" + "load_huggingface_model(ai_lab_config, MODEL_NAME)" ] }, { @@ -158,9 +158,9 @@ "(\n", " SELECT TE_ZERO_SHOT_TEXT_CLASSIFICATION_UDF(\n", " NULL,\n", - " '{{sb_config.te_bfs_connection}}',\n", - " '{{sb_config.te_hf_connection}}',\n", - " '{{sb_config.te_models_bfs_dir}}',\n", + " '{{ai_lab_config.te_bfs_connection}}',\n", + " '{{ai_lab_config.te_hf_connection}}',\n", + " '{{ai_lab_config.te_models_bfs_dir}}',\n", " '{{MODEL_NAME}}',\n", " '{{MY_TEXT}}',\n", " '{{MY_LABELS}}'\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/utils/access_store_ui.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/utils/access_store_ui.ipynb index 6aaffcd1..fe40c300 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/utils/access_store_ui.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/utils/access_store_ui.ipynb @@ -62,11 +62,11 @@ " open_btn = widgets.Button(description='Open', style=ui_look.button_style, layout=ui_look.button_layout)\n", "\n", " def open_or_create_config_store(btn):\n", - " global sb_config, sb_store_file\n", + " global ai_lab_config, sb_store_file\n", " sb_store_file = file_txt.value\n", " try:\n", - " sb_config = Secrets(Path(root_dir) / sb_store_file, password_txt.value)\n", - " sb_config.connection()\n", + " ai_lab_config = Secrets(Path(root_dir) / sb_store_file, password_txt.value)\n", + " ai_lab_config.connection()\n", " except:\n", " popup_message('Failed to open the store. Please check that the password is correct')\n", " else:\n", diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/utils/jupysql_init.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/utils/jupysql_init.ipynb index d9b0e214..4675f761 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/utils/jupysql_init.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/utils/jupysql_init.ipynb @@ -20,12 +20,13 @@ "from exasol.connections import open_sqlalchemy_connection\n", "from exasol.language_container_activation import get_activation_sql\n", "\n", - "engine = open_sqlalchemy_connection(sb_config)\n", + "engine = open_sqlalchemy_connection(ai_lab_config)\n", "\n", "%load_ext sql\n", "%sql engine\n", - "%sql OPEN SCHEMA {{sb_config.db_schema}}\n", - "%sql {{get_activation_sql(sb_config)}}" + "%config SqlMagic.short_errors = False\n", + "%sql OPEN SCHEMA {{ai_lab_config.db_schema}}\n", + "%sql {{get_activation_sql(ai_lab_config)}}" ] } ], diff --git a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/utils/main_config_ui.ipynb b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/utils/main_config_ui.ipynb index 0865cf38..a019556f 100644 --- a/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/utils/main_config_ui.ipynb +++ b/exasol/ds/sandbox/runtime/ansible/roles/jupyter/files/notebook/utils/main_config_ui.ipynb @@ -94,7 +94,7 @@ " ('Port', widgets.IntText(value=int(conf.get(CKey.db_port, '8888'))), CKey.db_port),\n", " ('User Name', widgets.Text(value=conf.get(CKey.db_user)), CKey.db_user),\n", " ('Password', widgets.Password(value=conf.get(CKey.db_password)), CKey.db_password),\n", - " ('Default Schema', widgets.Text(value=conf.get(CKey.db_schema, 'IDA')), CKey.db_schema),\n", + " ('Default Schema', widgets.Text(value=conf.get(CKey.db_schema, 'AI_LAB')), CKey.db_schema),\n", " ('Encrypted Comm.', widgets.Checkbox(value=conf.get(CKey.db_encryption, 'True') == 'True', indent=False),\n", " CKey.db_encryption)\n", " ],\n", @@ -131,7 +131,7 @@ " [\n", " ('Memory Size (GiB)', widgets.IntText(value=int(conf.get(CKey.mem_size, '2'))), CKey.mem_size),\n", " ('Disk Size (GiB)', widgets.IntText(value=int(conf.get(CKey.disk_size, '2'))), CKey.disk_size),\n", - " ('Default Schema', widgets.Text(value=conf.get(CKey.db_schema, 'IDA')), CKey.db_schema)\n", + " ('Default Schema', widgets.Text(value=conf.get(CKey.db_schema, 'AI_LAB')), CKey.db_schema)\n", " ]\n", " ]\n", "\n", diff --git a/test/notebooks/nbtest_sagemaker.py b/test/notebooks/nbtest_sagemaker.py index a410006f..967b1ecc 100644 --- a/test/notebooks/nbtest_sagemaker.py +++ b/test/notebooks/nbtest_sagemaker.py @@ -5,6 +5,7 @@ from inspect import cleandoc import textwrap +import pytest from exasol.secret_store import Secrets from exasol.ai_lab_config import AILabConfig as CKey @@ -152,13 +153,13 @@ def continuous_job_polling(): from exasol.connections import open_pyexasol_connection from exasol.language_container_activation import get_activation_sql - sql = f'EXECUTE SCRIPT {sb_config.db_schema}."SME_POLL_SAGEMAKER_AUTOPILOT_JOB_STATUS"(' \ - f"'{sb_config.JOB_NAME}'," \ - f"'{sb_config.sme_aws_connection}'," \ - f"'{sb_config.aws_region}');" + sql = f'EXECUTE SCRIPT {ai_lab_config.db_schema}."SME_POLL_SAGEMAKER_AUTOPILOT_JOB_STATUS"(' \ + f"'{ai_lab_config.JOB_NAME}'," \ + f"'{ai_lab_config.sme_aws_connection}'," \ + f"'{ai_lab_config.aws_region}');" - with open_pyexasol_connection(sb_config, compression=True) as conn: - conn.execute(get_activation_sql(sb_config)) + with open_pyexasol_connection(ai_lab_config, compression=True) as conn: + conn.execute(get_activation_sql(ai_lab_config)) t_start = time.time() job_status = 'Unknown' while job_status != 'Completed': diff --git a/test/notebooks/notebook_test_utils.py b/test/notebooks/notebook_test_utils.py index 82f8ccb9..fe26f69d 100644 --- a/test/notebooks/notebook_test_utils.py +++ b/test/notebooks/notebook_test_utils.py @@ -78,8 +78,8 @@ def run_notebook(notebook_file: str, store_file: str, store_password: str, def init_notebook_test(): from pathlib import Path from exasol.secret_store import Secrets - global sb_config - sb_config = Secrets(Path("{store_file}"), "{store_password}") + global ai_lab_config + ai_lab_config = Secrets(Path("{store_file}"), "{store_password}") init_notebook_test() ''' nb.cells.insert(0, nbformat.v4.new_code_cell(init_code)) @@ -93,7 +93,7 @@ def init_notebook_test(): def access_to_temp_secret_store(tmp_path: Path) -> Tuple[Path, str]: """ Creates a temporary configuration store. - Brings up and subsequently destroys the Docker-DB. + Brings up and subsequently destroys the Exasol Docker-DB. Returns the temporary configuration store path and password. """ @@ -108,10 +108,10 @@ def access_to_temp_secret_store(tmp_path: Path) -> Tuple[Path, str]: secrets = Secrets(store_path, master_password=store_password) # Set the configuration required by the ITDE manager and those the - # manager will not set after starting the Docker-DB. + # manager will not set after starting the Exasol Docker-DB. _init_secret_store(secrets) - # Start the Docker-DB and then destroy it after the test finishes. + # Start the Exasol Docker-DB and then destroy it after the test finishes. bring_itde_up(secrets) try: yield store_path, store_password