From 5facd9aa4dfaf2f1ed6ae0d6ee3d1bab1835a2f2 Mon Sep 17 00:00:00 2001 From: Jan Sikorski <132985823+sfc-gh-jsikorski@users.noreply.github.com> Date: Mon, 29 Jan 2024 15:26:10 +0100 Subject: [PATCH 01/17] Download required packages inside venv (#677) * Added package installer * Added package installer * reformat to context manager * Fixing windows issues * Fixing windows issues * Fixing windows issues * Fixing windows issues * Fixing windows issues * Fixing windows issues * Fixing windows issues * Fixing windows issues * Fixing windows issues * Minor reformat --- .../cli/plugins/snowpark/package/utils.py | 9 +++- .../cli/plugins/snowpark/package_utils.py | 32 +++-------- src/snowflake/cli/plugins/snowpark/venv.py | 54 +++++++++++++++++++ tests/__snapshots__/test_package.ambr | 7 +-- tests/test_package.py | 15 +----- tests/test_utils.py | 8 +-- tests_integration/test_package.py | 15 ++++++ 7 files changed, 94 insertions(+), 46 deletions(-) create mode 100644 src/snowflake/cli/plugins/snowpark/venv.py diff --git a/src/snowflake/cli/plugins/snowpark/package/utils.py b/src/snowflake/cli/plugins/snowpark/package/utils.py index d867b24709..4d8edaa90e 100644 --- a/src/snowflake/cli/plugins/snowpark/package/utils.py +++ b/src/snowflake/cli/plugins/snowpark/package/utils.py @@ -3,7 +3,9 @@ import shutil from dataclasses import dataclass from pathlib import Path +from typing import List +from requirements.requirement import Requirement from snowflake.cli.plugins.snowpark.models import SplitRequirements @@ -28,7 +30,8 @@ class RequiresPackages(LookupResult): def message(self): return f"""The package {self.name} is supported, but does depend on the following Snowflake supported native libraries. You should - include the following in your packages: {self.requirements.snowflake}""" + include the following in your packages: + {get_readable_list_of_requirements(self.requirements.snowflake)}""" class NotInAnaconda(LookupResult): @@ -63,3 +66,7 @@ def prepare_app_zip(file_path: Path, temp_dir: str) -> str: temp_path = temp_dir + "/" + file_name shutil.copy(file_path, temp_path) return temp_path + + +def get_readable_list_of_requirements(reqs: List[Requirement]): + return "\n".join((req.line for req in reqs)) diff --git a/src/snowflake/cli/plugins/snowpark/package_utils.py b/src/snowflake/cli/plugins/snowpark/package_utils.py index d79d25cf3b..786d98fd7b 100644 --- a/src/snowflake/cli/plugins/snowpark/package_utils.py +++ b/src/snowflake/cli/plugins/snowpark/package_utils.py @@ -5,7 +5,6 @@ import os import re import shutil -import subprocess from typing import Dict, List import click @@ -19,6 +18,7 @@ RequirementWithFiles, SplitRequirements, ) +from snowflake.cli.plugins.snowpark.venv import Venv log = logging.getLogger(__name__) @@ -195,11 +195,13 @@ def install_packages( been deleted from the local packages folder. """ second_chance_results = None - if file_name is not None: - pip_install_result = _run_pip_install(file_name, "file") - if package_name is not None: - pip_install_result = _run_pip_install(package_name, "package") + with Venv() as v: + if file_name is not None: + pip_install_result = v.pip_install(file_name, "file") + + if package_name is not None: + pip_install_result = v.pip_install(package_name, "package") if pip_install_result != 0: log.info(pip_failed_msg.format(pip_install_result)) @@ -254,26 +256,6 @@ def install_packages( return True, second_chance_results -def _run_pip_install(name: str, type_: str): - arguments = ["-r", name] if type_ == "file" else [name] - - try: - process = subprocess.Popen( - [PIP_PATH, "install", "-t", ".packages/"] + arguments, - stdout=subprocess.PIPE, - universal_newlines=True, - ) - for line in process.stdout: # type: ignore - log.info(line.strip()) - process.wait() - except FileNotFoundError: - log.error( - "pip not found. Please install pip and try again. " - "HINT: you can also set the environment variable 'SNOWCLI_PIP_PATH' to the path of pip.", - ) - return process.returncode - - def _delete_packages(to_be_deleted: Dict) -> None: for package, items in to_be_deleted.items(): log.info("Package %s: deleting %d files", package, len(items.files)) diff --git a/src/snowflake/cli/plugins/snowpark/venv.py b/src/snowflake/cli/plugins/snowpark/venv.py new file mode 100644 index 0000000000..4e60403c29 --- /dev/null +++ b/src/snowflake/cli/plugins/snowpark/venv.py @@ -0,0 +1,54 @@ +import logging +import subprocess +import sys +import venv +from pathlib import Path +from tempfile import TemporaryDirectory + +log = logging.getLogger(__name__) + + +class Venv: + ERROR_MESSAGE = "Running command {0} caused error {1}" + + def __init__(self, directory: str = "", with_pip: bool = True): + self.directory = TemporaryDirectory(directory) + self.with_pip = with_pip + + def __enter__(self): + self._create_venv() + self.python_path = self._get_python_path(Path(self.directory.name)) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.directory.cleanup() + + def run_python(self, args): + + try: + process = subprocess.run( + [self.python_path, *args], + capture_output=True, + text=True, + ) + except subprocess.CalledProcessError as e: + log.error(self.ERROR_MESSAGE, "python" + " ".join(args), e.stderr) + raise SystemExit + + return process + + def pip_install(self, name: str, req_type: str, directory: str = ".packages"): + arguments = ["-m", "pip", "install", "-t", directory] + arguments += ["-r", name] if req_type == "file" else [name] + process = self.run_python(arguments) + + return process.returncode + + def _create_venv(self): + venv.create(self.directory.name, self.with_pip) + + @staticmethod + def _get_python_path(venv_dir: Path): + if sys.platform == "win32": + return venv_dir / "scripts" / "python" + return venv_dir / "bin" / "python" diff --git a/tests/__snapshots__/test_package.ambr b/tests/__snapshots__/test_package.ambr index 52a05e3065..ded469c57a 100644 --- a/tests/__snapshots__/test_package.ambr +++ b/tests/__snapshots__/test_package.ambr @@ -1,11 +1,11 @@ # serializer version: 1 -# name: TestPackage.test_package_lookup[argument0] +# name: TestPackage.test_package_lookup[snowflake-connector-python] ''' Package snowflake-connector-python is available on the Snowflake anaconda channel. ''' # --- -# name: TestPackage.test_package_lookup[argument1] +# name: TestPackage.test_package_lookup[some-weird-package-we-dont-know] ''' Lookup for package some-weird-package-we-dont-know resulted in some error. Please check the package name or try again with -y option @@ -15,7 +15,8 @@ ''' The package some-other-package is supported, but does depend on the following Snowflake supported native libraries. You should - include the following in your packages: [] + include the following in your packages: + snowflake-snowpark-python ''' # --- diff --git a/tests/test_package.py b/tests/test_package.py index f964d8d398..9a6a3b04ca 100644 --- a/tests/test_package.py +++ b/tests/test_package.py @@ -15,18 +15,7 @@ class TestPackage: @pytest.mark.parametrize( "argument", - [ - ( - "snowflake-connector-python", - "Package snowflake-connector-python is available on the Snowflake anaconda channel.", - "snowflake.cli.plugins.snowpark.package.commands", - ), - ( - "some-weird-package-we-dont-know", - "Lookup for package some-weird-package-we-dont-know resulted in some error. Please check the package name or try again with -y option", - "snowflake.cli.plugins.snowpark.package.commands", - ), - ], + ["snowflake-connector-python", "some-weird-package-we-dont-know"], ) @patch("snowflake.cli.plugins.snowpark.package_utils.requests") def test_package_lookup( @@ -36,7 +25,7 @@ def test_package_lookup( test_data.anaconda_response ) - result = runner.invoke(["snowpark", "package", "lookup", argument[0], "--yes"]) + result = runner.invoke(["snowpark", "package", "lookup", argument, "--yes"]) assert result.exit_code == 0 assert result.output == snapshot diff --git a/tests/test_utils.py b/tests/test_utils.py index f619353ccb..2cd909f52e 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -285,7 +285,7 @@ def test_deduplicate_and_sort_reqs(): assert sorted_packages[0].specs == [("==", "0.9.5")] -@mock.patch("platform.system") +@patch("platform.system") @pytest.mark.parametrize( "argument, expected", [ @@ -302,9 +302,9 @@ def test_path_resolver(mock_system, argument, expected): assert path_utils.path_resolver(argument) == expected -@mock.patch("snowflake.cli.plugins.snowpark.package_utils._run_pip_install") -def test_pip_fail_message(mock_pip, correct_requirements_txt, caplog): - mock_pip.return_value = 42 +@patch("snowflake.cli.plugins.snowpark.package_utils.Venv") +def test_pip_fail_message(mock_installer, correct_requirements_txt, caplog): + mock_installer.return_value.__enter__.return_value.pip_install.return_value = 42 with caplog.at_level(logging.INFO, "snowflake.cli.plugins.snowpark.package_utils"): result = package_utils.install_packages( diff --git a/tests_integration/test_package.py b/tests_integration/test_package.py index 041476922e..47574e98e5 100644 --- a/tests_integration/test_package.py +++ b/tests_integration/test_package.py @@ -68,6 +68,21 @@ def test_package_create_with_non_anaconda_package_without_install( } assert not os.path.exists("PyRTF3.zip") + @pytest.mark.integration + def test_create_package_with_deps(self, directory_for_test, runner): + result = runner.invoke_with_connection_json( + ["snowpark", "package", "create", "dummy_pkg_for_tests_with_deps", "-y"] + ) + + assert result.exit_code == 0 + assert ( + "Package dummy_pkg_for_tests_with_deps.zip created. You can now upload it to a stage" + in result.json["message"] + ) + + files = self._get_filenames_from_zip("dummy_pkg_for_tests_with_deps.zip") + assert "dummy_pkg_for_tests/shrubbery.py" in files + @pytest.fixture(scope="function") def directory_for_test(self): init_dir = os.getcwd() From 0c97b63647876780957131fbe734103ee1ce9d6e Mon Sep 17 00:00:00 2001 From: Jan Sikorski <132985823+sfc-gh-jsikorski@users.noreply.github.com> Date: Mon, 29 Jan 2024 15:40:00 +0100 Subject: [PATCH 02/17] Switch snowpark & package tests to our test-packages (#691) Switched to our test packages --- .../snowpark_with_package/app/functions.py | 4 ++-- .../snowpark_with_package/requirements.txt | 2 +- .../snowpark_with_package/snowflake.yml | 2 +- tests_integration/test_package.py | 18 ++++++++++++------ tests_integration/test_snowpark.py | 14 ++++++++++---- .../testing_utils/snowpark_utils.py | 6 +++--- 6 files changed, 29 insertions(+), 17 deletions(-) diff --git a/tests_integration/test_data/projects/snowpark_with_package/app/functions.py b/tests_integration/test_data/projects/snowpark_with_package/app/functions.py index c00ee4a60f..a17ad724df 100644 --- a/tests_integration/test_data/projects/snowpark_with_package/app/functions.py +++ b/tests_integration/test_data/projects/snowpark_with_package/app/functions.py @@ -1,7 +1,7 @@ from __future__ import annotations -from PyRTF.Elements import StyleSheet +from dummy_pkg_for_tests import shrubbery def hello_function(name: str) -> str: - return f"{StyleSheet.__str__} {name}" + return shrubbery.knights_of_nii_says() diff --git a/tests_integration/test_data/projects/snowpark_with_package/requirements.txt b/tests_integration/test_data/projects/snowpark_with_package/requirements.txt index 889e05133e..13e4d7e64d 100644 --- a/tests_integration/test_data/projects/snowpark_with_package/requirements.txt +++ b/tests_integration/test_data/projects/snowpark_with_package/requirements.txt @@ -1,4 +1,4 @@ snowflake-snowpark-python # Package below should not be in snowflake anaconda -fonts +PyRTF3 #snowcli # for local development diff --git a/tests_integration/test_data/projects/snowpark_with_package/snowflake.yml b/tests_integration/test_data/projects/snowpark_with_package/snowflake.yml index c144c340bb..3e3732f863 100644 --- a/tests_integration/test_data/projects/snowpark_with_package/snowflake.yml +++ b/tests_integration/test_data/projects/snowpark_with_package/snowflake.yml @@ -11,4 +11,4 @@ snowpark: type: "string" returns: string imports: - - "@dev_deployment/PyRTF3.zip" + - "@dev_deployment/dummy_pkg_for_tests.zip" diff --git a/tests_integration/test_package.py b/tests_integration/test_package.py index 47574e98e5..5f44048863 100644 --- a/tests_integration/test_package.py +++ b/tests_integration/test_package.py @@ -47,26 +47,32 @@ def test_package_upload(self, runner, snowflake_session, test_database): @pytest.mark.integration def test_package_create_with_non_anaconda_package(self, directory_for_test, runner): result = runner.invoke_with_connection_json( - ["snowpark", "package", "create", "PyRTF3", "-y"] + ["snowpark", "package", "create", "dummy_pkg_for_tests_with_deps", "-y"] ) assert result.exit_code == 0 - assert os.path.isfile("PyRTF3.zip") - assert "PyRTF/utils.py" in self._get_filenames_from_zip("PyRTF3.zip") + assert os.path.isfile("dummy_pkg_for_tests_with_deps.zip") + assert "dummy_pkg_for_tests/shrubbery.py" in self._get_filenames_from_zip( + "dummy_pkg_for_tests_with_deps.zip" + ) + assert ( + "dummy_pkg_for_tests_with_deps/shrubbery.py" + in self._get_filenames_from_zip("dummy_pkg_for_tests_with_deps.zip") + ) @pytest.mark.integration def test_package_create_with_non_anaconda_package_without_install( self, directory_for_test, runner ): result = runner.invoke_with_connection_json( - ["snowpark", "package", "create", "PyRTF3"] + ["snowpark", "package", "create", "dummy_pkg_for_tests_with_deps"] ) assert_that_result_is_successful(result) assert result.json == { - "message": "Lookup for package PyRTF3 resulted in some error. Please check the package name or try again with -y option" + "message": "Lookup for package dummy_pkg_for_tests_with_deps resulted in some error. Please check the package name or try again with -y option" } - assert not os.path.exists("PyRTF3.zip") + assert not os.path.exists("dummy_pkg_for_tests_with_deps.zip") @pytest.mark.integration def test_create_package_with_deps(self, directory_for_test, runner): diff --git a/tests_integration/test_snowpark.py b/tests_integration/test_snowpark.py index e2d06909bc..57ab1daa1a 100644 --- a/tests_integration/test_snowpark.py +++ b/tests_integration/test_snowpark.py @@ -187,10 +187,16 @@ def test_snowpark_with_separately_created_package( _test_steps, project_directory, alter_snowflake_yml ): - _test_steps.package_should_build_proper_artifact("PyRTF3") - _test_steps.package_should_upload_artifact_to_stage("PyRTF3.zip", STAGE_NAME) + _test_steps.package_should_build_proper_artifact( + "dummy_pkg_for_tests", "dummy_pkg_for_tests/shrubbery.py" + ) + _test_steps.package_should_upload_artifact_to_stage( + "dummy_pkg_for_tests.zip", STAGE_NAME + ) - _test_steps.artifacts_left_after_package_creation_should_be_deleted("PyRTF3.zip") + _test_steps.artifacts_left_after_package_creation_should_be_deleted( + "dummy_pkg_for_tests.zip" + ) with project_directory("snowpark_with_package"): _test_steps.snowpark_build_should_zip_files() @@ -207,7 +213,7 @@ def test_snowpark_with_separately_created_package( _test_steps.snowpark_execute_should_return_expected_value( object_type="function", identifier="test_func('foo')", - expected_value=" foo", + expected_value="We want... a shrubbery!", ) diff --git a/tests_integration/testing_utils/snowpark_utils.py b/tests_integration/testing_utils/snowpark_utils.py index 5f929c1e85..0c33ede3a5 100644 --- a/tests_integration/testing_utils/snowpark_utils.py +++ b/tests_integration/testing_utils/snowpark_utils.py @@ -220,14 +220,14 @@ def object_drop_should_finish_successfully( ) assert_that_result_is_successful(result) - def package_should_build_proper_artifact(self, package_name: str): + def package_should_build_proper_artifact(self, package_name: str, file_name: str): result = self._setup.runner.invoke_with_connection_json( ["snowpark", "package", "create", package_name, "-y"] ) assert result.exit_code == 0 - assert os.path.isfile("PyRTF3.zip") - assert "pyparsing/results.py" in ZipFile("PyRTF3.zip").namelist() + assert os.path.isfile(f"{package_name}.zip") + assert file_name in ZipFile(f"{package_name}.zip").namelist() def package_should_upload_artifact_to_stage(self, file_name, stage_name): result = self._setup.runner.invoke_with_connection_json( From ba000a34700cde5382c62a53e40f003e16390dd3 Mon Sep 17 00:00:00 2001 From: Adam Stus Date: Mon, 29 Jan 2024 15:48:39 +0100 Subject: [PATCH 03/17] Bump to rc version (#693) --- src/snowflake/cli/__about__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/snowflake/cli/__about__.py b/src/snowflake/cli/__about__.py index 271d78af30..caf369b3fa 100644 --- a/src/snowflake/cli/__about__.py +++ b/src/snowflake/cli/__about__.py @@ -1,3 +1,3 @@ from __future__ import annotations -VERSION = "2.0.0a3" +VERSION = "2.0.0rc0" From 5140cf0c4ee1f844e1480cafc7c0fd483a03e0ce Mon Sep 17 00:00:00 2001 From: Adam Stus Date: Mon, 29 Jan 2024 17:08:19 +0100 Subject: [PATCH 04/17] Import git when is needed (#689) --- pyproject.toml | 3 ++- .../plugins/nativeapp/version/version_processor.py | 5 +++-- tests/test_loaded_modules.py | 14 ++++++++++++++ tox.ini | 1 + 4 files changed, 20 insertions(+), 3 deletions(-) create mode 100644 tests/test_loaded_modules.py diff --git a/pyproject.toml b/pyproject.toml index 6a3f8aa1b6..3a3f48579e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,9 +80,10 @@ select = [ ] [tool.pytest.ini_options] -addopts = "-m 'not integration and not performance and not e2e and not spcs'" +addopts = "-m 'not integration and not performance and not e2e and not spcs and not loaded_modules'" markers = [ "integration: mark test as integration test", "performance: mark test as performance test", "e2e: mark test to execute on SnowCLI installed in fresh virtual environment", + "loaded_modules: checks loaded modules", ] diff --git a/src/snowflake/cli/plugins/nativeapp/version/version_processor.py b/src/snowflake/cli/plugins/nativeapp/version/version_processor.py index 2e48382edc..562889e263 100644 --- a/src/snowflake/cli/plugins/nativeapp/version/version_processor.py +++ b/src/snowflake/cli/plugins/nativeapp/version/version_processor.py @@ -5,8 +5,6 @@ import typer from click import BadOptionUsage, ClickException -from git import Repo -from git.exc import InvalidGitRepositoryError from rich import print from snowflake.cli.api.exceptions import SnowflakeSQLExecutionError from snowflake.cli.api.project.util import unquote_identifier @@ -39,6 +37,9 @@ def check_index_changes_in_git_repo( Checks if the project root, i.e. the native apps project is a git repository. If it is a git repository, it also checks if there any local changes to the directory that may not be on the app package stage. """ + from git import Repo + from git.exc import InvalidGitRepositoryError + try: repo = Repo(project_root, search_parent_directories=True) assert repo.git_dir is not None diff --git a/tests/test_loaded_modules.py b/tests/test_loaded_modules.py new file mode 100644 index 0000000000..89f4a29654 --- /dev/null +++ b/tests/test_loaded_modules.py @@ -0,0 +1,14 @@ +import pytest +import sys + +from tests.testing_utils.fixtures import * + + +@pytest.mark.loaded_modules +def test_loaded_modules(runner): + should_not_load = {"git"} + + runner.invoke(["sql", "-q", "select 1"]) + + loaded_modules = sys.modules.keys() + assert loaded_modules.isdisjoint(should_not_load) diff --git a/tox.ini b/tox.ini index bc3fca8c1c..ee3a1fd0b9 100644 --- a/tox.ini +++ b/tox.ini @@ -12,6 +12,7 @@ deps = extras = tests commands: coverage run --source=snowflake.cli -m pytest --snapshot-warn-unused tests/ + coverage run --source=snowflake.cli -m pytest -m loaded_modules --snapshot-warn-unused tests/ coverage report [tox:.package] From cbd43cd3c23d73f006cec3ca91f08197895f1b10 Mon Sep 17 00:00:00 2001 From: David Wang Date: Mon, 29 Jan 2024 23:39:18 -0800 Subject: [PATCH 05/17] SNOW-1020720: Changing image registry prefix from 'registry' to 'image-registry' (#702) --- RELEASE-NOTES.md | 2 +- src/snowflake/cli/plugins/spcs/__init__.py | 2 +- .../{registry => image_registry}/__init__.py | 0 .../{registry => image_registry}/commands.py | 4 +-- .../{registry => image_registry}/manager.py | 0 .../plugin_spec.py | 2 +- tests/spcs/test_registry.py | 28 +++++++++---------- tests_integration/spcs/test_registry.py | 2 +- 8 files changed, 20 insertions(+), 20 deletions(-) rename src/snowflake/cli/plugins/spcs/{registry => image_registry}/__init__.py (100%) rename src/snowflake/cli/plugins/spcs/{registry => image_registry}/commands.py (97%) rename src/snowflake/cli/plugins/spcs/{registry => image_registry}/manager.py (100%) rename src/snowflake/cli/plugins/spcs/{registry => image_registry}/plugin_spec.py (84%) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 3c77605cf0..1c082a67a1 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -27,7 +27,7 @@ * `compute-pool` commands and its alias `cp` were renamed to `pool` commands. * `jobs` commands were renamed to `job`. * `services` commands were renamed to `service` - * `pool`, `job`, `service`, and `registry` commands were moved from `snowpark` group to a new `spcs` group. + * `pool`, `job`, `service`, and `registry` commands were moved from `snowpark` group to a new `spcs` group (`registry` was renamed to `image-registry`). * Streamlit changes * `snow streamlit deploy` is requiring `snowflake.yml` project file with a Streamlit definition. diff --git a/src/snowflake/cli/plugins/spcs/__init__.py b/src/snowflake/cli/plugins/spcs/__init__.py index 119a30b020..a6198c2a2a 100644 --- a/src/snowflake/cli/plugins/spcs/__init__.py +++ b/src/snowflake/cli/plugins/spcs/__init__.py @@ -3,8 +3,8 @@ from snowflake.cli.plugins.spcs.compute_pool.commands import ( app as compute_pools_app, ) +from snowflake.cli.plugins.spcs.image_registry.commands import app as registry_app from snowflake.cli.plugins.spcs.jobs.commands import app as jobs_app -from snowflake.cli.plugins.spcs.registry.commands import app as registry_app from snowflake.cli.plugins.spcs.services.commands import app as services_app app = typer.Typer( diff --git a/src/snowflake/cli/plugins/spcs/registry/__init__.py b/src/snowflake/cli/plugins/spcs/image_registry/__init__.py similarity index 100% rename from src/snowflake/cli/plugins/spcs/registry/__init__.py rename to src/snowflake/cli/plugins/spcs/image_registry/__init__.py diff --git a/src/snowflake/cli/plugins/spcs/registry/commands.py b/src/snowflake/cli/plugins/spcs/image_registry/commands.py similarity index 97% rename from src/snowflake/cli/plugins/spcs/registry/commands.py rename to src/snowflake/cli/plugins/spcs/image_registry/commands.py index 7a8664d144..e46113c9fc 100644 --- a/src/snowflake/cli/plugins/spcs/registry/commands.py +++ b/src/snowflake/cli/plugins/spcs/image_registry/commands.py @@ -10,11 +10,11 @@ ) from snowflake.cli.api.commands.flags import DEFAULT_CONTEXT_SETTINGS from snowflake.cli.api.output.types import CollectionResult, ObjectResult -from snowflake.cli.plugins.spcs.registry.manager import RegistryManager +from snowflake.cli.plugins.spcs.image_registry.manager import RegistryManager app = typer.Typer( context_settings=DEFAULT_CONTEXT_SETTINGS, - name="registry", + name="image-registry", help="Manages Snowpark registries.", rich_markup_mode="markdown", ) diff --git a/src/snowflake/cli/plugins/spcs/registry/manager.py b/src/snowflake/cli/plugins/spcs/image_registry/manager.py similarity index 100% rename from src/snowflake/cli/plugins/spcs/registry/manager.py rename to src/snowflake/cli/plugins/spcs/image_registry/manager.py diff --git a/src/snowflake/cli/plugins/spcs/registry/plugin_spec.py b/src/snowflake/cli/plugins/spcs/image_registry/plugin_spec.py similarity index 84% rename from src/snowflake/cli/plugins/spcs/registry/plugin_spec.py rename to src/snowflake/cli/plugins/spcs/image_registry/plugin_spec.py index 80b10bf66a..f3a879a18e 100644 --- a/src/snowflake/cli/plugins/spcs/registry/plugin_spec.py +++ b/src/snowflake/cli/plugins/spcs/image_registry/plugin_spec.py @@ -4,7 +4,7 @@ CommandType, plugin_hook_impl, ) -from snowflake.cli.plugins.spcs.registry import commands +from snowflake.cli.plugins.spcs.image_registry import commands @plugin_hook_impl diff --git a/tests/spcs/test_registry.py b/tests/spcs/test_registry.py index 117675e9ce..6e7f824a55 100644 --- a/tests/spcs/test_registry.py +++ b/tests/spcs/test_registry.py @@ -4,9 +4,9 @@ from tests.testing_utils.fixtures import * -@mock.patch("snowflake.cli.plugins.spcs.registry.manager.RegistryManager._conn") +@mock.patch("snowflake.cli.plugins.spcs.image_registry.manager.RegistryManager._conn") @mock.patch( - "snowflake.cli.plugins.spcs.registry.manager.RegistryManager._execute_query" + "snowflake.cli.plugins.spcs.image_registry.manager.RegistryManager._execute_query" ) def test_registry_get_token_2(mock_execute, mock_conn, mock_cursor, runner): mock_execute.return_value = mock_cursor( @@ -18,18 +18,18 @@ def test_registry_get_token_2(mock_execute, mock_conn, mock_cursor, runner): "validityInSecondsST": 42, } } - result = runner.invoke(["spcs", "registry", "token", "--format", "JSON"]) + result = runner.invoke(["spcs", "image-registry", "token", "--format", "JSON"]) assert result.exit_code == 0, result.output assert json.loads(result.stdout) == {"token": "token1234", "expires_in": 42} -@mock.patch("snowflake.cli.plugins.spcs.registry.commands.requests.get") +@mock.patch("snowflake.cli.plugins.spcs.image_registry.commands.requests.get") @mock.patch( - "snowflake.cli.plugins.spcs.registry.commands.RegistryManager._execute_query" + "snowflake.cli.plugins.spcs.image_registry.commands.RegistryManager._execute_query" ) -@mock.patch("snowflake.cli.plugins.spcs.registry.commands.RegistryManager._conn") +@mock.patch("snowflake.cli.plugins.spcs.image_registry.commands.RegistryManager._conn") @mock.patch( - "snowflake.cli.plugins.spcs.registry.commands.RegistryManager.login_to_registry" + "snowflake.cli.plugins.spcs.image_registry.commands.RegistryManager.login_to_registry" ) def test_list_images( mock_login, @@ -73,20 +73,20 @@ def test_list_images( mock_get_images.return_value.text = '{"repositories":["baserepo/super-cool-repo"]}' result = runner.invoke( - ["spcs", "registry", "list-images", "-r", "IMAGES", "--format", "JSON"] + ["spcs", "image-registry", "list-images", "-r", "IMAGES", "--format", "JSON"] ) assert result.exit_code == 0, result.output assert json.loads(result.output) == [{"image": "DB/SCHEMA/IMAGES/super-cool-repo"}] -@mock.patch("snowflake.cli.plugins.spcs.registry.commands.requests.get") +@mock.patch("snowflake.cli.plugins.spcs.image_registry.commands.requests.get") @mock.patch( - "snowflake.cli.plugins.spcs.registry.manager.RegistryManager._execute_query" + "snowflake.cli.plugins.spcs.image_registry.manager.RegistryManager._execute_query" ) -@mock.patch("snowflake.cli.plugins.spcs.registry.commands.RegistryManager._conn") +@mock.patch("snowflake.cli.plugins.spcs.image_registry.commands.RegistryManager._conn") @mock.patch( - "snowflake.cli.plugins.spcs.registry.manager.RegistryManager.login_to_registry" + "snowflake.cli.plugins.spcs.image_registry.manager.RegistryManager.login_to_registry" ) def test_list_tags( mock_login, @@ -118,7 +118,7 @@ def test_list_tags( "name", "db", "schema", - "registry", + "image-registry", "role", "unknown", "unkown2", @@ -134,7 +134,7 @@ def test_list_tags( result = runner.invoke( [ "spcs", - "registry", + "image-registry", "list-tags", "--repository_name", "IMAGES", diff --git a/tests_integration/spcs/test_registry.py b/tests_integration/spcs/test_registry.py index 93df34babf..2030150ced 100644 --- a/tests_integration/spcs/test_registry.py +++ b/tests_integration/spcs/test_registry.py @@ -3,7 +3,7 @@ @pytest.mark.integration def test_token(runner): - result = runner.invoke_with_connection_json(["spcs", "registry", "token"]) + result = runner.invoke_with_connection_json(["spcs", "image-registry", "token"]) assert result.exit_code == 0 assert result.json From 67ed0b21c051ebf908fb2ba0c5ccf181a2b0415d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 08:57:18 +0000 Subject: [PATCH 06/17] Bump pluggy from 1.3.0 to 1.4.0 (#699) Bumps [pluggy](https://github.com/pytest-dev/pluggy) from 1.3.0 to 1.4.0. - [Changelog](https://github.com/pytest-dev/pluggy/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pluggy/compare/1.3.0...1.4.0) --- updated-dependencies: - dependency-name: pluggy dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 3a3f48579e..38aed41ae6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ readme = "README.md" dependencies = [ "coverage==7.4.0", "jinja2==3.1.3", - "pluggy==1.3.0", + "pluggy==1.4.0", "PyYAML==6.0.1", "rich==13.7.0", "requests==2.31.0", From e23cfbd424075cf765f377eb7877d484db5909c0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 08:59:59 +0000 Subject: [PATCH 07/17] Bump snowflake-connector-python[secure-local-storage] from 3.6.0 to 3.7.0 (#697) Bump snowflake-connector-python[secure-local-storage] Bumps [snowflake-connector-python[secure-local-storage]](https://github.com/snowflakedb/snowflake-connector-python) from 3.6.0 to 3.7.0. - [Release notes](https://github.com/snowflakedb/snowflake-connector-python/releases) - [Commits](https://github.com/snowflakedb/snowflake-connector-python/compare/v3.6.0...v3.7.0) --- updated-dependencies: - dependency-name: snowflake-connector-python[secure-local-storage] dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 38aed41ae6..1a3f56b36c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ dependencies = [ "requests==2.31.0", "requirements-parser==0.5.0", "setuptools==69.0.3", - "snowflake-connector-python[secure-local-storage]==3.6.0", + "snowflake-connector-python[secure-local-storage]==3.7.0", "strictyaml==1.7.3", "tomlkit==0.12.3", "typer==0.9.0", From f8b80825b27180a3857c21992e455ac48f816b18 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 30 Jan 2024 10:03:53 +0100 Subject: [PATCH 08/17] hide spcs job command (#704) --- src/snowflake/cli/plugins/spcs/jobs/commands.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/snowflake/cli/plugins/spcs/jobs/commands.py b/src/snowflake/cli/plugins/spcs/jobs/commands.py index 023d33ffc5..fbeb2d2eac 100644 --- a/src/snowflake/cli/plugins/spcs/jobs/commands.py +++ b/src/snowflake/cli/plugins/spcs/jobs/commands.py @@ -12,7 +12,10 @@ from snowflake.cli.plugins.spcs.jobs.manager import JobManager app = typer.Typer( - context_settings=DEFAULT_CONTEXT_SETTINGS, name="job", help="Manage Snowpark jobs." + context_settings=DEFAULT_CONTEXT_SETTINGS, + name="job", + help="Manage Snowpark jobs.", + hidden=True, ) From 71d8d126d9072eb61db779e59dd34cc11b77d5ef Mon Sep 17 00:00:00 2001 From: Adam Stus Date: Tue, 30 Jan 2024 10:14:17 +0100 Subject: [PATCH 09/17] Updated release notes with 1.2.5 (#692) --- RELEASE-NOTES.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 1c082a67a1..2a28aab006 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -54,6 +54,11 @@ * Allow the use of quoted identifiers in stages +# v1.2.5 +## Fixes and improvements +* Import git module only when is needed + + # v1.2.4 ## Fixes and improvements * Fixed look up for all folders in downloaded package. From 7332045261c9eb8690584c9991fd38cbd16e81dd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 10:23:21 +0100 Subject: [PATCH 10/17] Bump coverage from 7.4.0 to 7.4.1 (#698) Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.4.0 to 7.4.1. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.4.0...7.4.1) --- updated-dependencies: - dependency-name: coverage dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1a3f56b36c..80c24257bc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ requires-python = ">=3.8" description = "Snowflake CLI" readme = "README.md" dependencies = [ - "coverage==7.4.0", + "coverage==7.4.1", "jinja2==3.1.3", "pluggy==1.4.0", "PyYAML==6.0.1", @@ -40,7 +40,7 @@ classifiers = [ [project.optional-dependencies] dev = [ - "coverage==7.4.0", + "coverage==7.4.1", "pre-commit>=3.5.0", "pytest==7.4.4", "pytest-randomly==3.15.0", From 24ea692de34d969efaa1694b84979eb2fe4e33f9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 10:33:53 +0100 Subject: [PATCH 11/17] Bump codecov/codecov-action from 3.1.4 to 3.1.5 (#695) Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 3.1.4 to 3.1.5. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/eaaf4bedf32dbdc6b720b63067d99c4d77d6047d...4fe8c5f003fae66aa5ebb77cfd3e7bfbbda0b6b0) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 7729d3533e..6cf279ff32 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -29,4 +29,4 @@ jobs: run: | python -m pip install tox-gh-actions tox - - uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d + - uses: codecov/codecov-action@4fe8c5f003fae66aa5ebb77cfd3e7bfbbda0b6b0 From 4a90a334ffd14195a8be1855b75cccc268415ee6 Mon Sep 17 00:00:00 2001 From: David Wang Date: Tue, 30 Jan 2024 02:15:24 -0800 Subject: [PATCH 12/17] SNOW-1011737, SNOW-1011750: Add options to create commands for compute pool and service (#679) * SNOW-1011750: Added external access eai, auto_resume, query_warehouse, tags, comment options to 'containers service create'. Updated unit tests with new options. * SNOW-1011737: Extending 'containers pool create' options to auto-resume, initially suspended, auto suspend seconds, and comment. * SNOW-1011737, SNOW-1011750: Splitting --num-instances argument for 'snow spcs pool create' and 'snow spcs service create' into min/max-nodes and min/max-instances, respectively. * SNOW-1011737: Renaming one missed --min-instances to --min-nodes in test.cp --- RELEASE-NOTES.md | 3 + src/snowflake/cli/api/exceptions.py | 2 +- src/snowflake/cli/api/project/util.py | 2 +- src/snowflake/cli/plugins/object/common.py | 81 ++++++++ src/snowflake/cli/plugins/spcs/common.py | 24 +++ .../cli/plugins/spcs/compute_pool/commands.py | 39 +++- .../cli/plugins/spcs/compute_pool/manager.py | 31 ++- .../cli/plugins/spcs/services/commands.py | 43 ++++- .../cli/plugins/spcs/services/manager.py | 43 ++++- tests/object/test_common.py | 46 +++++ tests/snowpark/__init__.py | 0 tests/spcs/__init__.py | 0 tests/spcs/test_common.py | 33 ++++ tests/spcs/test_compute_pool.py | 153 +++++++++++---- tests/spcs/test_services.py | 180 ++++++++++++++++-- tests_integration/{ => spcs}/test_cp.py | 2 +- 16 files changed, 600 insertions(+), 82 deletions(-) create mode 100644 src/snowflake/cli/plugins/object/common.py create mode 100644 tests/object/test_common.py create mode 100644 tests/snowpark/__init__.py create mode 100644 tests/spcs/__init__.py create mode 100644 tests/spcs/test_common.py rename tests_integration/{ => spcs}/test_cp.py (98%) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 2a28aab006..21db5c5b87 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -11,6 +11,8 @@ * Switched to Python Connector default connection https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-connect#setting-a-default-connection * Default connection name changed from `dev` to `default` * Environment variable for default connection name changed from `SNOWFLAKE_OPTIONS_DEFAULT_CONNECTION` to `SNOWFLAKE_DEFAULT_CONNECTION_NAME` +* `--num` option for `snow spcs pool create` has been split into `--min-nodes` and `--max-nodes` +* `--num-instances` option for `snow spcs service create` has been split into `--min-instances` and `--max-instances` * Snowpark changes * Removed `procedure` and `function` subgroups. @@ -28,6 +30,7 @@ * `jobs` commands were renamed to `job`. * `services` commands were renamed to `service` * `pool`, `job`, `service`, and `registry` commands were moved from `snowpark` group to a new `spcs` group (`registry` was renamed to `image-registry`). + * `snow spcs pool create` and `snow spcs service create` have been updated with all options available through SQL interface. * Streamlit changes * `snow streamlit deploy` is requiring `snowflake.yml` project file with a Streamlit definition. diff --git a/src/snowflake/cli/api/exceptions.py b/src/snowflake/cli/api/exceptions.py index 0beb42d033..e46d73b981 100644 --- a/src/snowflake/cli/api/exceptions.py +++ b/src/snowflake/cli/api/exceptions.py @@ -42,7 +42,7 @@ def __init__(self, got_type: type, expected_type: type): class CommandReturnTypeError(ClickException): def __init__(self, got_type: type): - super().__init__(f"Commads have to return OutputData type, but got {got_type}") + super().__init__(f"Commands have to return OutputData type, but got {got_type}") class SnowflakeSQLExecutionError(ClickException): diff --git a/src/snowflake/cli/api/project/util.py b/src/snowflake/cli/api/project/util.py index c3ad91a64f..5623017e71 100644 --- a/src/snowflake/cli/api/project/util.py +++ b/src/snowflake/cli/api/project/util.py @@ -13,7 +13,7 @@ # See https://docs.snowflake.com/en/sql-reference/identifiers-syntax for identifier syntax UNQUOTED_IDENTIFIER_REGEX = r"(^[a-zA-Z_])([a-zA-Z0-9_$]{0,254})" -QUOTED_IDENTIFIER_REGEX = r'"((""|[^"])*)"' +QUOTED_IDENTIFIER_REGEX = r'"((""|[^"]){0,255})"' def clean_identifier(input_: str): diff --git a/src/snowflake/cli/plugins/object/common.py b/src/snowflake/cli/plugins/object/common.py new file mode 100644 index 0000000000..e6e7b61799 --- /dev/null +++ b/src/snowflake/cli/plugins/object/common.py @@ -0,0 +1,81 @@ +from dataclasses import dataclass +from typing import Optional + +import typer +from click import ClickException +from snowflake.cli.api.project.util import ( + QUOTED_IDENTIFIER_REGEX, + UNQUOTED_IDENTIFIER_REGEX, + is_valid_identifier, + to_string_literal, +) + + +@dataclass +class Tag: + name: str + value: str + + def __post_init__(self): + if not is_valid_identifier(self.name): + raise ValueError("name of a tag must be a valid snowflake identifier") + + def value_string_literal(self): + return to_string_literal(self.value) + + +class TagError(ClickException): + def __init__(self): + super().__init__( + "tag must be in the format = where 'name' is a valid identifier and value is a string" + ) + + +def _parse_tag(tag: str) -> Tag: + import re + + identifier_pattern = re.compile( + f"(?P{UNQUOTED_IDENTIFIER_REGEX}|{QUOTED_IDENTIFIER_REGEX})" + ) + value_pattern = re.compile(f"(?P.+)") + result = re.fullmatch(f"{identifier_pattern.pattern}={value_pattern.pattern}", tag) + if result is not None: + try: + return Tag(result.group("tag_name"), result.group("tag_value")) + except ValueError: + raise TagError() + else: + raise TagError() + + +def tag_option(object_type: str): + """ + Provides a common interface for all commands that accept a tag option (e.g. when altering the tag of an object). + Parses the input string in the format "name=value" into a Tag object with 'name' and 'value' properties. + """ + return typer.Option( + None, + "--tag", + help=f"Tag for the {object_type}", + parser=_parse_tag, + metavar="NAME=VALUE", + ) + + +def _comment_callback(comment: Optional[str]): + if comment is None: + return comment + return to_string_literal(comment) + + +def comment_option(object_type: str): + """ + Provides a common interface for all commands that accept a comment option (e.g. when creating a new object). + Parses the input string into a string literal. + """ + return typer.Option( + None, + "--comment", + help=f"Comment for the {object_type}", + callback=_comment_callback, + ) diff --git a/src/snowflake/cli/plugins/spcs/common.py b/src/snowflake/cli/plugins/spcs/common.py index 08224cecfa..e4fdfc5e74 100644 --- a/src/snowflake/cli/plugins/spcs/common.py +++ b/src/snowflake/cli/plugins/spcs/common.py @@ -3,6 +3,8 @@ import sys from typing import TextIO +from click import ClickException + if not sys.stdout.closed and sys.stdout.isatty(): GREEN = "\033[32m" BLUE = "\033[34m" @@ -35,3 +37,25 @@ def print_log_lines(file: TextIO, name, identifier, logs): logs = logs[0:-1] for log in logs: print(_prefix_line(prefix, log + "\n"), file=file, end="", flush=True) + + +def strip_empty_lines(lines: list[str]) -> str: + return "\n".join(stripped for l in lines if (stripped := l.strip())) + + +def validate_and_set_instances(min_instances, max_instances, instance_name): + """ + Used to validate that min_instances is positive and that max_instances is not less than min_instances. In the + case that max_instances is none, sets it equal to min_instances by default. Used like `max_instances = + validate_and_set_instances(min_instances, max_instances, "name")`. + """ + if min_instances < 1: + raise ClickException(f"min_{instance_name} must be positive") + + if max_instances is None: + max_instances = min_instances + elif max_instances < min_instances: + raise ClickException( + f"max_{instance_name} must be greater or equal to min_{instance_name}" + ) + return max_instances diff --git a/src/snowflake/cli/plugins/spcs/compute_pool/commands.py b/src/snowflake/cli/plugins/spcs/compute_pool/commands.py index 1e2fd60611..3dbc8ae521 100644 --- a/src/snowflake/cli/plugins/spcs/compute_pool/commands.py +++ b/src/snowflake/cli/plugins/spcs/compute_pool/commands.py @@ -1,3 +1,5 @@ +from typing import Optional + import typer from snowflake.cli.api.commands.decorators import ( global_options_with_connection, @@ -5,6 +7,8 @@ ) from snowflake.cli.api.commands.flags import DEFAULT_CONTEXT_SETTINGS from snowflake.cli.api.output.types import CommandResult, SingleQueryResult +from snowflake.cli.plugins.object.common import comment_option +from snowflake.cli.plugins.spcs.common import validate_and_set_instances from snowflake.cli.plugins.spcs.compute_pool.manager import ComputePoolManager app = typer.Typer( @@ -19,21 +23,48 @@ @global_options_with_connection def create( name: str = typer.Option(..., "--name", help="Name of the compute pool."), - num_instances: int = typer.Option( - ..., "--num", help="Number of compute pool instances." + min_nodes: int = typer.Option( + 1, "--min-nodes", help="Minimum number of nodes for the compute pool" + ), + max_nodes: Optional[int] = typer.Option( + None, "--max-nodes", help="Maximum number of nodes for the compute pool" ), instance_family: str = typer.Option( ..., "--family", help="Name of the instance family. For more information about instance families, refer to the SQL CREATE COMPUTE POOL command.", ), + auto_resume: bool = typer.Option( + True, + "--auto-resume/--no-auto-resume", + help="The compute pool will automatically resume when a service or job is submitted to it.", + ), + initially_suspended: bool = typer.Option( + False, + "--init-suspend", + help="The compute pool will start in a suspended state.", + ), + auto_suspend_secs: int = typer.Option( + 3600, + "--auto-suspend-secs", + help="Number of seconds of inactivity after which you want Snowflake to automatically suspend the compute pool.", + ), + comment: Optional[str] = comment_option("compute pool"), **options, ) -> CommandResult: """ - Creates a compute pool with a specified number of instances. + Creates a compute pool with a specified number of nodes. """ + max_nodes = validate_and_set_instances(min_nodes, max_nodes, "nodes") cursor = ComputePoolManager().create( - pool_name=name, num_instances=num_instances, instance_family=instance_family + pool_name=name, + min_nodes=min_nodes, + max_nodes=max_nodes, + instance_family=instance_family, + auto_resume=auto_resume, + initially_suspended=initially_suspended, + auto_suspend_secs=auto_suspend_secs, + comment=comment, ) return SingleQueryResult(cursor) diff --git a/src/snowflake/cli/plugins/spcs/compute_pool/manager.py b/src/snowflake/cli/plugins/spcs/compute_pool/manager.py index 6702619f8b..fd6845180c 100644 --- a/src/snowflake/cli/plugins/spcs/compute_pool/manager.py +++ b/src/snowflake/cli/plugins/spcs/compute_pool/manager.py @@ -1,19 +1,34 @@ +from typing import Optional + from snowflake.cli.api.sql_execution import SqlExecutionMixin +from snowflake.cli.plugins.spcs.common import strip_empty_lines from snowflake.connector.cursor import SnowflakeCursor class ComputePoolManager(SqlExecutionMixin): def create( - self, pool_name: str, num_instances: int, instance_family: str + self, + pool_name: str, + min_nodes: int, + max_nodes: int, + instance_family: str, + auto_resume: bool, + initially_suspended: bool, + auto_suspend_secs: int, + comment: Optional[str], ) -> SnowflakeCursor: - return self._execute_query( - f"""\ + query = f"""\ CREATE COMPUTE POOL {pool_name} - MIN_NODES = {num_instances} - MAX_NODES = {num_instances} - INSTANCE_FAMILY = {instance_family}; - """ - ) + MIN_NODES = {min_nodes} + MAX_NODES = {max_nodes} + INSTANCE_FAMILY = {instance_family} + AUTO_RESUME = {auto_resume} + INITIALLY_SUSPENDED = {initially_suspended} + AUTO_SUSPEND_SECS = {auto_suspend_secs} + """.splitlines() + if comment: + query.append(f"COMMENT = {comment}") + return self._execute_query(strip_empty_lines(query)) def stop(self, pool_name: str) -> SnowflakeCursor: return self._execute_query(f"alter compute pool {pool_name} stop all;") diff --git a/src/snowflake/cli/plugins/spcs/services/commands.py b/src/snowflake/cli/plugins/spcs/services/commands.py index 3d2c221320..8aa89519f5 100644 --- a/src/snowflake/cli/plugins/spcs/services/commands.py +++ b/src/snowflake/cli/plugins/spcs/services/commands.py @@ -1,5 +1,6 @@ import sys from pathlib import Path +from typing import List, Optional import typer from snowflake.cli.api.commands.decorators import ( @@ -12,7 +13,11 @@ QueryJsonValueResult, SingleQueryResult, ) -from snowflake.cli.plugins.spcs.common import print_log_lines +from snowflake.cli.plugins.object.common import Tag, comment_option, tag_option +from snowflake.cli.plugins.spcs.common import ( + print_log_lines, + validate_and_set_instances, +) from snowflake.cli.plugins.spcs.services.manager import ServiceManager app = typer.Typer( @@ -36,18 +41,48 @@ def create( dir_okay=False, exists=True, ), - num_instances: int = typer.Option(1, "--num-instances", help="Number of instances"), + min_instances: int = typer.Option( + 1, "--min-instances", help="Minimum number of service instances to run" + ), + max_instances: Optional[int] = typer.Option( + None, "--max-instances", help="Maximum number of service instances to run" + ), + auto_resume: bool = typer.Option( + True, + "--auto-resume/--no-auto-resume", + help="The service will automatically resume when a service function or ingress is called.", + ), + external_access_integrations: Optional[List[str]] = typer.Option( + None, + "--eai-name", + help="Identifies External Access Integrations(EAI) that the service can access. This option may be specified multiple times for multiple EAIs.", + ), + query_warehouse: Optional[str] = typer.Option( + None, + "--query-warehouse", + help="Warehouse to use if a service container connects to Snowflake to execute a query without explicitly specifying a warehouse to use.", + ), + tags: Optional[List[Tag]] = tag_option("service"), + comment: Optional[str] = comment_option("service"), **options, ) -> CommandResult: """ Creates a new Snowpark Container Services service in the current schema. """ - + max_instances = validate_and_set_instances( + min_instances, max_instances, "instances" + ) cursor = ServiceManager().create( service_name=name, - num_instances=num_instances, + min_instances=min_instances, + max_instances=max_instances, compute_pool=compute_pool, spec_path=spec_path, + external_access_integrations=external_access_integrations, + auto_resume=auto_resume, + query_warehouse=query_warehouse, + tags=tags, + comment=comment, ) return SingleQueryResult(cursor) diff --git a/src/snowflake/cli/plugins/spcs/services/manager.py b/src/snowflake/cli/plugins/spcs/services/manager.py index b64158b1c2..fc24fb26b5 100644 --- a/src/snowflake/cli/plugins/spcs/services/manager.py +++ b/src/snowflake/cli/plugins/spcs/services/manager.py @@ -1,6 +1,9 @@ from pathlib import Path +from typing import List, Optional from snowflake.cli.api.sql_execution import SqlExecutionMixin +from snowflake.cli.plugins.object.common import Tag +from snowflake.cli.plugins.spcs.common import strip_empty_lines from snowflake.connector.cursor import SnowflakeCursor @@ -10,21 +13,47 @@ def create( service_name: str, compute_pool: str, spec_path: Path, - num_instances: int, + min_instances: int, + max_instances: int, + auto_resume: bool, + external_access_integrations: Optional[List[str]], + query_warehouse: Optional[str], + tags: Optional[List[Tag]], + comment: Optional[str], ) -> SnowflakeCursor: spec = self._read_yaml(spec_path) - return self._execute_schema_query( - f"""\ + + query = f"""\ CREATE SERVICE IF NOT EXISTS {service_name} IN COMPUTE POOL {compute_pool} FROM SPECIFICATION $$ {spec} $$ WITH - MIN_INSTANCES = {num_instances} - MAX_INSTANCES = {num_instances} - """ - ) + MIN_INSTANCES = {min_instances} + MAX_INSTANCES = {max_instances} + AUTO_RESUME = {auto_resume} + """.splitlines() + + if external_access_integrations: + external_access_integration_list = ",".join( + f"{e}" for e in external_access_integrations + ) + query.append( + f"EXTERNAL_ACCESS_INTEGRATIONS = ({external_access_integration_list})" + ) + + if query_warehouse: + query.append(f"QUERY_WAREHOUSE = {query_warehouse}") + + if tags: + tag_list = ",".join(f"{t.name}={t.value_string_literal()}" for t in tags) + query.append(f"TAG ({tag_list})") + + if comment: + query.append(f"COMMENT = {comment}") + + return self._execute_schema_query(strip_empty_lines(query)) def _read_yaml(self, path: Path) -> str: # TODO(aivanou): Add validation towards schema diff --git a/tests/object/test_common.py b/tests/object/test_common.py new file mode 100644 index 0000000000..3d434f1975 --- /dev/null +++ b/tests/object/test_common.py @@ -0,0 +1,46 @@ +from snowflake.cli.plugins.object.common import _parse_tag, Tag +from typing import Tuple +import pytest + +from click import ClickException + + +@pytest.mark.parametrize( + "value, expected", + [ + ("tag=value", ("tag", "value")), + ("_underscore_start=value", ("_underscore_start", "value")), + ("a=xyz", ("a", "xyz")), + ("A=123", ("A", "123")), + ("mixedCase=value", ("mixedCase", "value")), + ("_=value", ("_", "value")), + ("tag='this is a value'", ("tag", "'this is a value'")), + ( + '"tag name!@#"=value', + ('"tag name!@#"', "value"), + ), # quoted identifier allows for spaces and special characters + ( + "tag==value", + ("tag", "=value"), + ), # This is a strange case which we may not actually want to support + ], +) +def test_parse_tag_valid(value: str, expected: Tuple[str, str]): + assert _parse_tag(value) == Tag(*expected) + + +@pytest.mark.parametrize( + "value", + [ + "123_name=value", # starts with a digit + "tag name=value", # space in identifier + "tag&_name=value", # special characters in identifier + "tag", # no equals sign + "=value", # empty identifier + "a" * 257 + "=value", # identifier is over 256 characters + '"tag"name"=value', # undoubled quote in tag name + ], +) +def test_parse_tag_invalid(value: str): + with pytest.raises(ClickException): + _parse_tag(value) diff --git a/tests/snowpark/__init__.py b/tests/snowpark/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/spcs/__init__.py b/tests/spcs/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/spcs/test_common.py b/tests/spcs/test_common.py new file mode 100644 index 0000000000..3554b80b16 --- /dev/null +++ b/tests/spcs/test_common.py @@ -0,0 +1,33 @@ +from snowflake.cli.plugins.spcs.common import validate_and_set_instances +from tests.testing_utils.fixtures import * +from click import ClickException + + +@pytest.mark.parametrize( + "min_instances, max_instances, expected_max", + [ + (2, None, 2), # max_instances is None, set max_instances to min_instances + ( + 5, + 10, + 10, + ), # max_instances is valid non-None value, return max_instances unchanged + ], +) +def test_validate_and_set_instances(min_instances, max_instances, expected_max): + assert expected_max == validate_and_set_instances( + min_instances, max_instances, "name" + ) + + +@pytest.mark.parametrize( + "min_instances, max_instances", + [ + (0, 1), # non-positive min_instances + (-1, 1), # negative min_instances + (2, 1), # min_instances > max_instances + ], +) +def test_validate_and_set_instances_invalid(min_instances, max_instances): + with pytest.raises(ClickException): + validate_and_set_instances(min_instances, max_instances, "name") diff --git a/tests/spcs/test_compute_pool.py b/tests/spcs/test_compute_pool.py index 0d6608e333..f6bc9fcf49 100644 --- a/tests/spcs/test_compute_pool.py +++ b/tests/spcs/test_compute_pool.py @@ -1,50 +1,121 @@ -import unittest from unittest.mock import Mock, patch from snowflake.cli.plugins.spcs.compute_pool.manager import ComputePoolManager from snowflake.connector.cursor import SnowflakeCursor +from snowflake.cli.api.project.util import to_string_literal -class TestComputePoolManager(unittest.TestCase): - def setUp(self): - self.compute_pool_manager = ComputePoolManager() +@patch( + "snowflake.cli.plugins.spcs.compute_pool.manager.ComputePoolManager._execute_query" +) +def test_create(mock_execute_query): + pool_name = "test_pool" + min_nodes = 2 + max_nodes = 3 + instance_family = "test_family" + auto_resume = True + initially_suspended = False + auto_suspend_secs = 7200 + comment = "'test comment'" + cursor = Mock(spec=SnowflakeCursor) + mock_execute_query.return_value = cursor + result = ComputePoolManager().create( + pool_name=pool_name, + min_nodes=min_nodes, + max_nodes=max_nodes, + instance_family=instance_family, + auto_resume=auto_resume, + initially_suspended=initially_suspended, + auto_suspend_secs=auto_suspend_secs, + comment=comment, + ) + expected_query = " ".join( + [ + "CREATE COMPUTE POOL test_pool", + "MIN_NODES = 2", + "MAX_NODES = 3", + "INSTANCE_FAMILY = test_family", + "AUTO_RESUME = True", + "INITIALLY_SUSPENDED = False", + "AUTO_SUSPEND_SECS = 7200", + "COMMENT = 'test comment'", + ] + ) + actual_query = " ".join(mock_execute_query.mock_calls[0].args[0].split()) + assert expected_query == actual_query + assert result == cursor + - @patch( - "snowflake.cli.plugins.spcs.compute_pool.manager.ComputePoolManager._execute_query" +@patch("snowflake.cli.plugins.spcs.compute_pool.manager.ComputePoolManager.create") +def test_create_pool_cli_defaults(mock_create, runner): + result = runner.invoke( + [ + "spcs", + "pool", + "create", + "--name", + "test_pool", + "--family", + "test_family", + ] ) - def test_create(self, mock_execute_query): - pool_name = "test_pool" - num_instances = 2 - instance_family = "test_family" - cursor = Mock(spec=SnowflakeCursor) - mock_execute_query.return_value = cursor - result = self.compute_pool_manager.create( - pool_name, num_instances, instance_family - ) - expected_query = ( - "CREATE COMPUTE POOL test_pool " - "MIN_NODES = 2 " - "MAX_NODES = 2 " - "INSTANCE_FAMILY = test_family;" - ) - actual_query = " ".join( - mock_execute_query.mock_calls[0].args[0].replace("\n", "").split() - ) - self.assertEqual(expected_query, actual_query) - self.assertEqual(result, cursor) - - @patch( - "snowflake.cli.plugins.spcs.compute_pool.manager.ComputePoolManager._execute_query" + assert result.exit_code == 0, result.output + mock_create.assert_called_once_with( + pool_name="test_pool", + min_nodes=1, + max_nodes=1, + instance_family="test_family", + auto_resume=True, + initially_suspended=False, + auto_suspend_secs=3600, + comment=None, ) - def test_stop(self, mock_execute_query): - pool_name = "test_pool" - cursor = Mock(spec=SnowflakeCursor) - mock_execute_query.return_value = cursor - result = self.compute_pool_manager.stop(pool_name) - expected_query = "alter compute pool test_pool stop all;" - mock_execute_query.assert_called_once_with(expected_query) - self.assertEqual(result, cursor) - - -if __name__ == "__main__": - unittest.main() + + +@patch("snowflake.cli.plugins.spcs.compute_pool.manager.ComputePoolManager.create") +def test_create_pool_cli(mock_create, runner): + result = runner.invoke( + [ + "spcs", + "pool", + "create", + "--name", + "test_pool", + "--min-nodes", + "2", + "--max-nodes", + "3", + "--family", + "test_family", + "--no-auto-resume", + "--init-suspend", + "--auto-suspend-secs", + "7200", + "--comment", + "this is a test", + ] + ) + assert result.exit_code == 0, result.output + mock_create.assert_called_once_with( + pool_name="test_pool", + min_nodes=2, + max_nodes=3, + instance_family="test_family", + auto_resume=False, + initially_suspended=True, + auto_suspend_secs=7200, + comment=to_string_literal("this is a test"), + ) + + +@patch( + "snowflake.cli.plugins.spcs.compute_pool.manager.ComputePoolManager._execute_query" +) +def test_stop(mock_execute_query): + pool_name = "test_pool" + cursor = Mock(spec=SnowflakeCursor) + mock_execute_query.return_value = cursor + result = ComputePoolManager().stop(pool_name) + expected_query = "alter compute pool test_pool stop all;" + mock_execute_query.assert_called_once_with(expected_query) + assert result == cursor diff --git a/tests/spcs/test_services.py b/tests/spcs/test_services.py index bfc9fc2ad3..d5ecbfb37d 100644 --- a/tests/spcs/test_services.py +++ b/tests/spcs/test_services.py @@ -2,11 +2,13 @@ from textwrap import dedent from unittest.mock import Mock, patch +from click import ClickException import pytest import strictyaml from snowflake.cli.plugins.spcs.services.manager import ServiceManager - from tests.testing_utils.fixtures import * +from snowflake.cli.api.project.util import to_string_literal +from snowflake.cli.plugins.object.common import Tag @patch( @@ -15,7 +17,8 @@ def test_create_service(mock_execute_schema_query, other_directory): service_name = "test_service" compute_pool = "test_pool" - num_instances = 42 + min_instances = 42 + max_instances = 43 tmp_dir = Path(other_directory) spec_path = tmp_dir / "spec.yml" spec_path.write_text( @@ -33,38 +36,185 @@ def test_create_service(mock_execute_schema_query, other_directory): """ ) ) + auto_resume = True + external_access_integrations = [ + "google_apis_access_integration", + "salesforce_api_access_integration", + ] + query_warehouse = "test_warehouse" + tags = [Tag("test_tag", "test value"), Tag("key", "value")] + comment = "'user\\'s comment'" cursor = Mock(spec=SnowflakeCursor) mock_execute_schema_query.return_value = cursor result = ServiceManager().create( - service_name, compute_pool, Path(spec_path), num_instances - ) - expected_query = ( - "CREATE SERVICE IF NOT EXISTS test_service " - "IN COMPUTE POOL test_pool " - 'FROM SPECIFICATION $$ {"spec": {"containers": [{"name": "cloudbeaver", "image": ' - '"/spcs_demos_db/cloudbeaver:23.2.1"}], "endpoints": [{"name": "cloudbeaver", ' - '"port": 80, "public": true}]}} $$ ' - "WITH MIN_INSTANCES = 42 MAX_INSTANCES = 42" + service_name=service_name, + compute_pool=compute_pool, + spec_path=Path(spec_path), + min_instances=min_instances, + max_instances=max_instances, + auto_resume=auto_resume, + external_access_integrations=external_access_integrations, + query_warehouse=query_warehouse, + tags=tags, + comment=comment, ) - actual_query = " ".join( - mock_execute_schema_query.mock_calls[0].args[0].replace("\n", "").split() + expected_query = " ".join( + [ + "CREATE SERVICE IF NOT EXISTS test_service", + "IN COMPUTE POOL test_pool", + 'FROM SPECIFICATION $$ {"spec": {"containers": [{"name": "cloudbeaver", "image":', + '"/spcs_demos_db/cloudbeaver:23.2.1"}], "endpoints": [{"name": "cloudbeaver",', + '"port": 80, "public": true}]}} $$', + "WITH MIN_INSTANCES = 42 MAX_INSTANCES = 43", + "AUTO_RESUME = True", + "EXTERNAL_ACCESS_INTEGRATIONS = (google_apis_access_integration,salesforce_api_access_integration)", + "QUERY_WAREHOUSE = test_warehouse", + "TAG (test_tag='test value',key='value')", + "COMMENT = 'user\\'s comment'", + ] ) + actual_query = " ".join(mock_execute_schema_query.mock_calls[0].args[0].split()) assert expected_query == actual_query assert result == cursor +@patch("snowflake.cli.plugins.spcs.services.manager.ServiceManager.create") +def test_create_service_cli_defaults(mock_create, other_directory, runner): + tmp_dir = Path(other_directory) + spec_path = tmp_dir / "spec.yml" + spec_path.write_text( + dedent( + """ + spec: + containers: + - name: cloudbeaver + image: /spcs_demos_db/cloudbeaver:23.2.1 + endpoints: + - name: cloudbeaver + port: 80 + public: true + + """ + ) + ) + result = runner.invoke( + [ + "spcs", + "service", + "create", + "--name", + "test_service", + "--compute-pool", + "test_pool", + "--spec-path", + f"{spec_path}", + ] + ) + assert result.exit_code == 0, result.output + mock_create.assert_called_once_with( + service_name="test_service", + compute_pool="test_pool", + spec_path=spec_path, + min_instances=1, + max_instances=1, + auto_resume=True, + external_access_integrations=[], + query_warehouse=None, + tags=[], + comment=None, + ) + + +@patch("snowflake.cli.plugins.spcs.services.manager.ServiceManager.create") +def test_create_service_cli(mock_create, other_directory, runner): + tmp_dir = Path(other_directory) + spec_path = tmp_dir / "spec.yml" + spec_path.write_text( + dedent( + """ + spec: + containers: + - name: cloudbeaver + image: /spcs_demos_db/cloudbeaver:23.2.1 + endpoints: + - name: cloudbeaver + port: 80 + public: true + + """ + ) + ) + result = runner.invoke( + [ + "spcs", + "service", + "create", + "--name", + "test_service", + "--compute-pool", + "test_pool", + "--spec-path", + f"{spec_path}", + "--min-instances", + "42", + "--max-instances", + "43", + "--no-auto-resume", + "--eai-name", + "google_api", + "--eai-name", + "salesforce_api", + "--query-warehouse", + "test_warehouse", + "--tag", + "name=value", + "--tag", + '"$trange name"=normal value', + "--comment", + "this is a test", + ] + ) + assert result.exit_code == 0, result.output + print(mock_create.mock_calls[0]) + mock_create.assert_called_once_with( + service_name="test_service", + compute_pool="test_pool", + spec_path=spec_path, + min_instances=42, + max_instances=43, + auto_resume=False, + external_access_integrations=["google_api", "salesforce_api"], + query_warehouse="test_warehouse", + tags=[Tag("name", "value"), Tag('"$trange name"', "normal value")], + comment=to_string_literal("this is a test"), + ) + + @patch("snowflake.cli.plugins.spcs.services.manager.ServiceManager._read_yaml") def test_create_service_with_invalid_spec(mock_read_yaml): service_name = "test_service" compute_pool = "test_pool" spec_path = "/path/to/spec.yaml" - num_instances = 42 + min_instances = 42 + max_instances = 42 + external_access_integrations = query_warehouse = tags = comment = None + auto_resume = False mock_read_yaml.side_effect = strictyaml.YAMLError("Invalid YAML") + with pytest.raises(strictyaml.YAMLError): ServiceManager().create( - service_name, compute_pool, Path(spec_path), num_instances + service_name=service_name, + compute_pool=compute_pool, + spec_path=Path(spec_path), + min_instances=min_instances, + max_instances=max_instances, + auto_resume=auto_resume, + external_access_integrations=external_access_integrations, + query_warehouse=query_warehouse, + tags=tags, + comment=comment, ) diff --git a/tests_integration/test_cp.py b/tests_integration/spcs/test_cp.py similarity index 98% rename from tests_integration/test_cp.py rename to tests_integration/spcs/test_cp.py index 980cc07fa5..74201f7fa3 100644 --- a/tests_integration/test_cp.py +++ b/tests_integration/spcs/test_cp.py @@ -20,7 +20,7 @@ def test_cp(runner, snowflake_session): "create", "--name", cp_name, - "--num", + "--min-nodes", 1, "--family", "STANDARD_1", From b825e63d31924a0233d414fb093aa6ca11509d8f Mon Sep 17 00:00:00 2001 From: David Wang Date: Tue, 30 Jan 2024 02:31:46 -0800 Subject: [PATCH 13/17] SNOW-1011765: Move list-images and list-tags to image-repository (#703) * SNOW-1011765: Moved list-images and list-tags to from image-registry to image-repository. Added integration tests for image-repository * SNOW-1011765: Update release notes * SNOW-1011765: Fixing some imports * SNOW-1011765: Changing repository name from option to argument for list-images, list-tags --- RELEASE-NOTES.md | 7 +- src/snowflake/cli/plugins/spcs/__init__.py | 4 + .../plugins/spcs/image_registry/commands.py | 116 +-------------- .../plugins/spcs/image_registry/manager.py | 61 +------- .../spcs/image_registry/plugin_spec.py | 16 --- .../plugins/spcs/image_repository/__init__.py | 0 .../plugins/spcs/image_repository/commands.py | 122 ++++++++++++++++ .../plugins/spcs/image_repository/manager.py | 62 ++++++++ tests/spcs/test_image_repository.py | 133 ++++++++++++++++++ tests/spcs/test_registry.py | 129 ----------------- .../spcs/test_image_repository.py | 61 ++++++++ 11 files changed, 389 insertions(+), 322 deletions(-) delete mode 100644 src/snowflake/cli/plugins/spcs/image_registry/plugin_spec.py create mode 100644 src/snowflake/cli/plugins/spcs/image_repository/__init__.py create mode 100644 src/snowflake/cli/plugins/spcs/image_repository/commands.py create mode 100644 src/snowflake/cli/plugins/spcs/image_repository/manager.py create mode 100644 tests/spcs/test_image_repository.py create mode 100644 tests_integration/spcs/test_image_repository.py diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 21db5c5b87..8ee77352fe 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -11,8 +11,6 @@ * Switched to Python Connector default connection https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-connect#setting-a-default-connection * Default connection name changed from `dev` to `default` * Environment variable for default connection name changed from `SNOWFLAKE_OPTIONS_DEFAULT_CONNECTION` to `SNOWFLAKE_DEFAULT_CONNECTION_NAME` -* `--num` option for `snow spcs pool create` has been split into `--min-nodes` and `--max-nodes` -* `--num-instances` option for `snow spcs service create` has been split into `--min-instances` and `--max-instances` * Snowpark changes * Removed `procedure` and `function` subgroups. @@ -29,8 +27,9 @@ * `compute-pool` commands and its alias `cp` were renamed to `pool` commands. * `jobs` commands were renamed to `job`. * `services` commands were renamed to `service` - * `pool`, `job`, `service`, and `registry` commands were moved from `snowpark` group to a new `spcs` group (`registry` was renamed to `image-registry`). - * `snow spcs pool create` and `snow spcs service create` have been updated with all options available through SQL interface. + * `pool`, `job`, `service`, and `image-registry` commands were moved from `snowpark` group to a new `spcs` group (`registry` was renamed to `image-registry`). + * `snow spcs pool create` and `snow spcs service create` have been updated with new options to match SQL interface + * Added new `image-repository` command group under `spcs`. Moved `list-images` and `list-tags` from `registry` to `image-repository`. * Streamlit changes * `snow streamlit deploy` is requiring `snowflake.yml` project file with a Streamlit definition. diff --git a/src/snowflake/cli/plugins/spcs/__init__.py b/src/snowflake/cli/plugins/spcs/__init__.py index a6198c2a2a..1da2a5ccfb 100644 --- a/src/snowflake/cli/plugins/spcs/__init__.py +++ b/src/snowflake/cli/plugins/spcs/__init__.py @@ -4,6 +4,9 @@ app as compute_pools_app, ) from snowflake.cli.plugins.spcs.image_registry.commands import app as registry_app +from snowflake.cli.plugins.spcs.image_repository.commands import ( + app as image_repository_app, +) from snowflake.cli.plugins.spcs.jobs.commands import app as jobs_app from snowflake.cli.plugins.spcs.services.commands import app as services_app @@ -17,3 +20,4 @@ app.add_typer(services_app) # type: ignore app.add_typer(jobs_app) # type: ignore app.add_typer(registry_app) # type: ignore +app.add_typer(image_repository_app) diff --git a/src/snowflake/cli/plugins/spcs/image_registry/commands.py b/src/snowflake/cli/plugins/spcs/image_registry/commands.py index e46113c9fc..103fb7e58a 100644 --- a/src/snowflake/cli/plugins/spcs/image_registry/commands.py +++ b/src/snowflake/cli/plugins/spcs/image_registry/commands.py @@ -1,15 +1,10 @@ -import json -from typing import Optional - -import requests import typer -from click import ClickException from snowflake.cli.api.commands.decorators import ( global_options_with_connection, with_output, ) from snowflake.cli.api.commands.flags import DEFAULT_CONTEXT_SETTINGS -from snowflake.cli.api.output.types import CollectionResult, ObjectResult +from snowflake.cli.api.output.types import ObjectResult from snowflake.cli.plugins.spcs.image_registry.manager import RegistryManager app = typer.Typer( @@ -26,112 +21,3 @@ def token(**options) -> ObjectResult: """Gets the token from environment to use for authenticating with the registry.""" return ObjectResult(RegistryManager().get_token()) - - -@app.command("list-images") -@with_output -@global_options_with_connection -def list_images( - repo_name: str = typer.Option( - ..., - "--repository_name", - "-r", - help="Name of the image repository shown by the `SHOW IMAGE REPOSITORIES` SQL command.", - ), - **options, -) -> CollectionResult: - """Lists images in given repository.""" - registry_manager = RegistryManager() - database = registry_manager.get_database() - schema = registry_manager.get_schema() - url = registry_manager.get_repository_url(repo_name) - api_url = registry_manager.get_repository_api_url(url) - bearer_login = registry_manager.login_to_registry(api_url) - - repos = [] - query: Optional[str] = f"{api_url}/_catalog?n=10" - - while query: - # Make paginated catalog requests - response = requests.get( - query, headers={"Authorization": f"Bearer {bearer_login}"} - ) - - if response.status_code != 200: - raise ClickException(f"Call to the registry failed {response.text}") - - data = json.loads(response.text) - if "repositories" in data: - repos.extend(data["repositories"]) - - if "Link" in response.headers: - # There are more results - query = f"{api_url}/_catalog?n=10&last={repos[-1]}" - else: - query = None - - images = [] - for repo in repos: - prefix = f"{database}/{schema}/{repo_name}/" - repo = repo.replace("baserepo/", prefix) - images.append({"image": repo}) - - return CollectionResult(images) - - -@app.command("list-tags") -@with_output -@global_options_with_connection -def list_tags( - repo_name: str = typer.Option( - ..., - "--repository_name", - "-r", - help="Name of the image repository shown by the `SHOW IMAGE REPOSITORIES` SQL command.", - ), - image_name: str = typer.Option( - ..., - "--image_name", - "-i", - help="Name of the image as shown in the output of list-images", - ), - **options, -) -> CollectionResult: - """Lists tags for given image in a repository.""" - - registry_manager = RegistryManager() - url = registry_manager.get_repository_url(repo_name) - api_url = registry_manager.get_repository_api_url(url) - bearer_login = registry_manager.login_to_registry(api_url) - - repo_name = image_name.split("/")[2] - image_realname = "/".join(image_name.split("/")[3:]) - - tags = [] - query: Optional[str] = f"{api_url}/{image_realname}/tags/list?n=10" - - while query is not None: - # Make paginated catalog requests - response = requests.get( - query, headers={"Authorization": f"Bearer {bearer_login}"} - ) - - if response.status_code != 200: - print("Call to the registry failed", response.text) - - data = json.loads(response.text) - if "tags" in data: - tags.extend(data["tags"]) - - if "Link" in response.headers: - # There are more results - query = f"{api_url}/{image_realname}/tags/list?n=10&last={tags[-1]}" - else: - query = None - - tags_list = [] - for tag in tags: - image_tag = f"{image_name}:{tag}" - tags_list.append({"tag": image_tag}) - - return CollectionResult(tags_list) diff --git a/src/snowflake/cli/plugins/spcs/image_registry/manager.py b/src/snowflake/cli/plugins/spcs/image_registry/manager.py index eb5ecf6ed0..1b2580bee4 100644 --- a/src/snowflake/cli/plugins/spcs/image_registry/manager.py +++ b/src/snowflake/cli/plugins/spcs/image_registry/manager.py @@ -5,19 +5,9 @@ import requests from click import ClickException from snowflake.cli.api.sql_execution import SqlExecutionMixin -from snowflake.connector.cursor import SnowflakeCursor class RegistryManager(SqlExecutionMixin): - def get_database(self): - return self._conn.database - - def get_schema(self): - return self._conn.schema - - def get_role(self): - return self._conn.role - def get_token(self): """ Get token to authenticate with registry. @@ -38,6 +28,9 @@ def get_token(self): } def login_to_registry(self, repo_url): + """ + Logs in to the registry using basic authentication and generates a bearer authentication token. + """ token = json.dumps(self.get_token()) parsed_url = urlparse(repo_url) @@ -47,56 +40,8 @@ def login_to_registry(self, repo_url): login_url = f"{scheme}://{host}/login" creds = base64.b64encode(f"0sessiontoken:{token}".encode("utf-8")) creds = creds.decode("utf-8") - resp = requests.get(login_url, headers={"Authorization": f"Basic {creds}"}) if resp.status_code != 200: raise ClickException(f"Failed to login to the repository {resp.text}") - return json.loads(resp.text)["token"] - - def get_registry_url(self, repo_name: str) -> SnowflakeCursor: - role = self.get_role() - database = self.get_database() - schema = self.get_schema() - - registry_query = f""" - use role {role}; - use database {database}; - use schema {schema}; - show image repositories like '{repo_name}'; - """ - - return self._execute_query(registry_query) - - def get_repository_url(self, repo_name): - database = self.get_database() - schema = self.get_schema() - - result_set = self.get_registry_url(repo_name=repo_name) - - results = result_set.fetchall() - if len(results) == 0: - raise ClickException( - f"Specified repository name {repo_name} not found in database {database} and schema {schema}" - ) - else: - if len(results) > 1: - raise Exception( - f"Found more than one repositories with name {repo_name}. This is unexpected." - ) - - return f"https://{results[0][4]}" - - def get_repository_api_url(self, repo_url): - """ - Converts a repo URL to a registry OCI API URL. - https://reg.com/db/schema/repo becomes https://reg.com/v2/db/schema/repo - """ - parsed_url = urlparse(repo_url) - - scheme = parsed_url.scheme - host = parsed_url.netloc - path = parsed_url.path - - return f"{scheme}://{host}/v2{path}" diff --git a/src/snowflake/cli/plugins/spcs/image_registry/plugin_spec.py b/src/snowflake/cli/plugins/spcs/image_registry/plugin_spec.py deleted file mode 100644 index f3a879a18e..0000000000 --- a/src/snowflake/cli/plugins/spcs/image_registry/plugin_spec.py +++ /dev/null @@ -1,16 +0,0 @@ -from snowflake.cli.api.plugins.command import ( - SNOWCLI_ROOT_COMMAND_PATH, - CommandSpec, - CommandType, - plugin_hook_impl, -) -from snowflake.cli.plugins.spcs.image_registry import commands - - -@plugin_hook_impl -def command_spec(): - return CommandSpec( - parent_command_path=SNOWCLI_ROOT_COMMAND_PATH, - command_type=CommandType.COMMAND_GROUP, - typer_instance=commands.app, - ) diff --git a/src/snowflake/cli/plugins/spcs/image_repository/__init__.py b/src/snowflake/cli/plugins/spcs/image_repository/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/snowflake/cli/plugins/spcs/image_repository/commands.py b/src/snowflake/cli/plugins/spcs/image_repository/commands.py new file mode 100644 index 0000000000..dbfd3cccd6 --- /dev/null +++ b/src/snowflake/cli/plugins/spcs/image_repository/commands.py @@ -0,0 +1,122 @@ +import json +from typing import Optional + +import requests +import typer +from click import ClickException +from snowflake.cli.api.commands.decorators import ( + global_options_with_connection, + with_output, +) +from snowflake.cli.api.commands.flags import DEFAULT_CONTEXT_SETTINGS +from snowflake.cli.api.output.types import CollectionResult +from snowflake.cli.plugins.spcs.image_registry.manager import RegistryManager +from snowflake.cli.plugins.spcs.image_repository.manager import ImageRepositoryManager + +app = typer.Typer( + context_settings=DEFAULT_CONTEXT_SETTINGS, + name="image-repository", + help="Manages Snowpark Container Services image repositories.", + rich_markup_mode="markdown", +) + + +@app.command("list-images") +@with_output +@global_options_with_connection +def list_images( + repo_name: str = typer.Argument( + help="Name of the image repository shown by the `SHOW IMAGE REPOSITORIES` SQL command.", + ), + **options, +) -> CollectionResult: + """Lists images in given repository.""" + repository_manager = ImageRepositoryManager() + database = repository_manager.get_database() + schema = repository_manager.get_schema() + url = repository_manager.get_repository_url(repo_name) + api_url = repository_manager.get_repository_api_url(url) + bearer_login = RegistryManager().login_to_registry(api_url) + repos = [] + query: Optional[str] = f"{api_url}/_catalog?n=10" + + while query: + # Make paginated catalog requests + response = requests.get( + query, headers={"Authorization": f"Bearer {bearer_login}"} + ) + + if response.status_code != 200: + raise ClickException(f"Call to the registry failed {response.text}") + + data = json.loads(response.text) + if "repositories" in data: + repos.extend(data["repositories"]) + + if "Link" in response.headers: + # There are more results + query = f"{api_url}/_catalog?n=10&last={repos[-1]}" + else: + query = None + + images = [] + for repo in repos: + prefix = f"{database}/{schema}/{repo_name}/" + repo = repo.replace("baserepo/", prefix) + images.append({"image": repo}) + + return CollectionResult(images) + + +@app.command("list-tags") +@with_output +@global_options_with_connection +def list_tags( + repo_name: str = typer.Argument( + help="Name of the image repository shown by the `SHOW IMAGE REPOSITORIES` SQL command.", + ), + image_name: str = typer.Option( + ..., + "--image_name", + "-i", + help="Name of the image as shown in the output of list-images", + ), + **options, +) -> CollectionResult: + """Lists tags for given image in a repository.""" + + repository_manager = ImageRepositoryManager() + url = repository_manager.get_repository_url(repo_name) + api_url = repository_manager.get_repository_api_url(url) + bearer_login = RegistryManager().login_to_registry(api_url) + + image_realname = "/".join(image_name.split("/")[3:]) + + tags = [] + query: Optional[str] = f"{api_url}/{image_realname}/tags/list?n=10" + + while query is not None: + # Make paginated catalog requests + response = requests.get( + query, headers={"Authorization": f"Bearer {bearer_login}"} + ) + + if response.status_code != 200: + print("Call to the registry failed", response.text) + + data = json.loads(response.text) + if "tags" in data: + tags.extend(data["tags"]) + + if "Link" in response.headers: + # There are more results + query = f"{api_url}/{image_realname}/tags/list?n=10&last={tags[-1]}" + else: + query = None + + tags_list = [] + for tag in tags: + image_tag = f"{image_name}:{tag}" + tags_list.append({"tag": image_tag}) + + return CollectionResult(tags_list) diff --git a/src/snowflake/cli/plugins/spcs/image_repository/manager.py b/src/snowflake/cli/plugins/spcs/image_repository/manager.py new file mode 100644 index 0000000000..50b7247e7c --- /dev/null +++ b/src/snowflake/cli/plugins/spcs/image_repository/manager.py @@ -0,0 +1,62 @@ +from urllib.parse import urlparse + +from click import ClickException +from snowflake.cli.api.sql_execution import SqlExecutionMixin +from snowflake.connector.cursor import SnowflakeCursor + + +class ImageRepositoryManager(SqlExecutionMixin): + def get_database(self): + return self._conn.database + + def get_schema(self): + return self._conn.schema + + def get_role(self): + return self._conn.role + + def get_repository_url_list(self, repo_name: str) -> SnowflakeCursor: + role = self.get_role() + database = self.get_database() + schema = self.get_schema() + + registry_query = f""" + use role {role}; + use database {database}; + use schema {schema}; + show image repositories like '{repo_name}'; + """ + + return self._execute_query(registry_query) + + def get_repository_url(self, repo_name): + database = self.get_database() + schema = self.get_schema() + + result_set = self.get_repository_url_list(repo_name=repo_name) + + results = result_set.fetchall() + if len(results) == 0: + raise ClickException( + f"Specified repository name {repo_name} not found in database {database} and schema {schema}" + ) + else: + if len(results) > 1: + raise Exception( + f"Found more than one repositories with name {repo_name}. This is unexpected." + ) + + return f"https://{results[0][4]}" + + def get_repository_api_url(self, repo_url): + """ + Converts a repo URL to a registry OCI API URL. + https://reg.com/db/schema/repo becomes https://reg.com/v2/db/schema/repo + """ + parsed_url = urlparse(repo_url) + + scheme = parsed_url.scheme + host = parsed_url.netloc + path = parsed_url.path + + return f"{scheme}://{host}/v2{path}" diff --git a/tests/spcs/test_image_repository.py b/tests/spcs/test_image_repository.py new file mode 100644 index 0000000000..26893b004c --- /dev/null +++ b/tests/spcs/test_image_repository.py @@ -0,0 +1,133 @@ +from tests.testing_utils.fixtures import * +import json + + +@mock.patch("snowflake.cli.plugins.spcs.image_repository.commands.requests.get") +@mock.patch( + "snowflake.cli.plugins.spcs.image_repository.commands.ImageRepositoryManager._execute_query" +) +@mock.patch( + "snowflake.cli.plugins.spcs.image_repository.commands.ImageRepositoryManager._conn" +) +@mock.patch( + "snowflake.cli.plugins.spcs.image_registry.commands.RegistryManager.login_to_registry" +) +def test_list_images( + mock_login, + mock_conn, + mock_execute, + mock_get_images, + runner, + mock_cursor, +): + mock_conn.database = "DB" + mock_conn.schema = "SCHEMA" + mock_conn.role = "MY_ROLE" + + mock_execute.return_value = mock_cursor( + rows=[ + [ + "2023-01-01 00:00:00", + "IMAGES", + "DB", + "SCHEMA", + "orgname-alias.registry.snowflakecomputing.com/DB/SCHEMA/IMAGES", + "ROLE", + "ROLE", + "", + ] + ], + columns=[ + "date", + "name", + "db", + "schema", + "registry", + "role", + "unknown", + "unkown2", + ], + ) + mock_login.return_value = "TOKEN" + + mock_get_images.return_value.status_code = 200 + mock_get_images.return_value.text = '{"repositories":["baserepo/super-cool-repo"]}' + + result = runner.invoke( + ["spcs", "image-repository", "list-images", "IMAGES", "--format", "JSON"] + ) + + assert result.exit_code == 0, result.output + assert json.loads(result.output) == [{"image": "DB/SCHEMA/IMAGES/super-cool-repo"}] + + +@mock.patch("snowflake.cli.plugins.spcs.image_repository.commands.requests.get") +@mock.patch( + "snowflake.cli.plugins.spcs.image_repository.manager.ImageRepositoryManager._execute_query" +) +@mock.patch( + "snowflake.cli.plugins.spcs.image_repository.commands.ImageRepositoryManager._conn" +) +@mock.patch( + "snowflake.cli.plugins.spcs.image_registry.manager.RegistryManager.login_to_registry" +) +def test_list_tags( + mock_login, + mock_conn, + mock_execute, + mock_get_tags, + runner, + mock_cursor, +): + mock_conn.database = "DB" + mock_conn.schema = "SCHEMA" + mock_conn.role = "MY_ROLE" + + mock_execute.return_value = mock_cursor( + rows=[ + [ + "2023-01-01 00:00:00", + "IMAGES", + "DB", + "SCHEMA", + "orgname-alias.registry.snowflakecomputing.com/DB/SCHEMA/IMAGES", + "ROLE", + "ROLE", + "", + ] + ], + columns=[ + "date", + "name", + "db", + "schema", + "image-registry", + "role", + "unknown", + "unkown2", + ], + ) + mock_login.return_value = "TOKEN" + + mock_get_tags.return_value.status_code = 200 + mock_get_tags.return_value.text = ( + '{"name":"baserepo/super-cool-repo","tags":["1.2.0"]}' + ) + + result = runner.invoke( + [ + "spcs", + "image-repository", + "list-tags", + "IMAGES", + "--image_name", + "DB/SCHEMA/IMAGES/super-cool-repo", + "--format", + "JSON", + ] + ) + + assert result.exit_code == 0, result.output + assert json.loads(result.output) == [ + {"tag": "DB/SCHEMA/IMAGES/super-cool-repo:1.2.0"} + ] diff --git a/tests/spcs/test_registry.py b/tests/spcs/test_registry.py index 6e7f824a55..cc79b14534 100644 --- a/tests/spcs/test_registry.py +++ b/tests/spcs/test_registry.py @@ -1,5 +1,4 @@ import json -from unittest import mock from tests.testing_utils.fixtures import * @@ -21,131 +20,3 @@ def test_registry_get_token_2(mock_execute, mock_conn, mock_cursor, runner): result = runner.invoke(["spcs", "image-registry", "token", "--format", "JSON"]) assert result.exit_code == 0, result.output assert json.loads(result.stdout) == {"token": "token1234", "expires_in": 42} - - -@mock.patch("snowflake.cli.plugins.spcs.image_registry.commands.requests.get") -@mock.patch( - "snowflake.cli.plugins.spcs.image_registry.commands.RegistryManager._execute_query" -) -@mock.patch("snowflake.cli.plugins.spcs.image_registry.commands.RegistryManager._conn") -@mock.patch( - "snowflake.cli.plugins.spcs.image_registry.commands.RegistryManager.login_to_registry" -) -def test_list_images( - mock_login, - mock_conn, - mock_execute, - mock_get_images, - runner, - mock_cursor, -): - mock_conn.database = "DB" - mock_conn.schema = "SCHEMA" - mock_conn.role = "MY_ROLE" - - mock_execute.return_value = mock_cursor( - rows=[ - [ - "2023-01-01 00:00:00", - "IMAGES", - "DB", - "SCHEMA", - "orgname-alias.registry.snowflakecomputing.com/DB/SCHEMA/IMAGES", - "ROLE", - "ROLE", - "", - ] - ], - columns=[ - "date", - "name", - "db", - "schema", - "registry", - "role", - "unknown", - "unkown2", - ], - ) - mock_login.return_value = "TOKEN" - - mock_get_images.return_value.status_code = 200 - mock_get_images.return_value.text = '{"repositories":["baserepo/super-cool-repo"]}' - - result = runner.invoke( - ["spcs", "image-registry", "list-images", "-r", "IMAGES", "--format", "JSON"] - ) - - assert result.exit_code == 0, result.output - assert json.loads(result.output) == [{"image": "DB/SCHEMA/IMAGES/super-cool-repo"}] - - -@mock.patch("snowflake.cli.plugins.spcs.image_registry.commands.requests.get") -@mock.patch( - "snowflake.cli.plugins.spcs.image_registry.manager.RegistryManager._execute_query" -) -@mock.patch("snowflake.cli.plugins.spcs.image_registry.commands.RegistryManager._conn") -@mock.patch( - "snowflake.cli.plugins.spcs.image_registry.manager.RegistryManager.login_to_registry" -) -def test_list_tags( - mock_login, - mock_conn, - mock_execute, - mock_get_tags, - runner, - mock_cursor, -): - mock_conn.database = "DB" - mock_conn.schema = "SCHEMA" - mock_conn.role = "MY_ROLE" - - mock_execute.return_value = mock_cursor( - rows=[ - [ - "2023-01-01 00:00:00", - "IMAGES", - "DB", - "SCHEMA", - "orgname-alias.registry.snowflakecomputing.com/DB/SCHEMA/IMAGES", - "ROLE", - "ROLE", - "", - ] - ], - columns=[ - "date", - "name", - "db", - "schema", - "image-registry", - "role", - "unknown", - "unkown2", - ], - ) - mock_login.return_value = "TOKEN" - - mock_get_tags.return_value.status_code = 200 - mock_get_tags.return_value.text = ( - '{"name":"baserepo/super-cool-repo","tags":["1.2.0"]}' - ) - - result = runner.invoke( - [ - "spcs", - "image-registry", - "list-tags", - "--repository_name", - "IMAGES", - "--image_name", - "DB/SCHEMA/IMAGES/super-cool-repo", - "--format", - "JSON", - ] - ) - - assert result.exit_code == 0, result.output - assert json.loads(result.output) == [ - {"tag": "DB/SCHEMA/IMAGES/super-cool-repo:1.2.0"} - ] diff --git a/tests_integration/spcs/test_image_repository.py b/tests_integration/spcs/test_image_repository.py new file mode 100644 index 0000000000..1673ccdbf3 --- /dev/null +++ b/tests_integration/spcs/test_image_repository.py @@ -0,0 +1,61 @@ +import pytest + +from tests_integration.test_utils import contains_row_with + +INTEGRATION_DATABASE = "SNOWCLI_DB" +INTEGRATION_SCHEMA = "PUBLIC" +INTEGRATION_REPOSITORY = "snowcli_repository" + + +@pytest.mark.integration +def test_list_images_tags(runner): + _list_images(runner) + _list_tags(runner) + + +def _list_images(runner): + result = runner.invoke_with_connection_json( + [ + "spcs", + "image-repository", + "list-images", + "snowcli_repository", + "--database", + INTEGRATION_DATABASE, + "--schema", + INTEGRATION_SCHEMA, + ] + ) + # breakpoint() + assert isinstance(result.json, list), result.output + assert contains_row_with( + result.json, + { + "image": f"{INTEGRATION_DATABASE}/{INTEGRATION_SCHEMA}/{INTEGRATION_REPOSITORY}/snowpark_test" + }, + ) + + +@pytest.mark.integration +def _list_tags(runner): + result = runner.invoke_with_connection_json( + [ + "spcs", + "image-repository", + "list-tags", + "snowcli_repository", + "--image_name", + f"{INTEGRATION_DATABASE}/{INTEGRATION_SCHEMA}/{INTEGRATION_REPOSITORY}/snowpark_test", + "--database", + INTEGRATION_DATABASE, + "--schema", + INTEGRATION_SCHEMA, + ] + ) + assert isinstance(result.json, list), result.output + assert contains_row_with( + result.json, + { + "tag": f"{INTEGRATION_DATABASE}/{INTEGRATION_SCHEMA}/{INTEGRATION_REPOSITORY}/snowpark_test:1" + }, + ) From d02912d316624681d86ad18bdccf62b3de713632 Mon Sep 17 00:00:00 2001 From: Tomasz Urbaszek Date: Tue, 30 Jan 2024 11:43:58 +0100 Subject: [PATCH 14/17] Rename snowflake-cli-labs to snowflake-cli (#694) --- .github/workflows/old_publish.yaml | 33 +++ README.md | 2 +- compat/snowflake-cli-labs/LICENSE | 201 ++++++++++++++++++ compat/snowflake-cli-labs/README.md | 5 + compat/snowflake-cli-labs/pyproject.toml | 26 +++ pyproject.toml | 2 +- .../pyproject.toml | 2 +- .../pyproject.toml | 2 +- tests_e2e/conftest.py | 2 +- 9 files changed, 270 insertions(+), 5 deletions(-) create mode 100644 .github/workflows/old_publish.yaml create mode 100644 compat/snowflake-cli-labs/LICENSE create mode 100644 compat/snowflake-cli-labs/README.md create mode 100644 compat/snowflake-cli-labs/pyproject.toml diff --git a/.github/workflows/old_publish.yaml b/.github/workflows/old_publish.yaml new file mode 100644 index 0000000000..ab105dec2e --- /dev/null +++ b/.github/workflows/old_publish.yaml @@ -0,0 +1,33 @@ +name: Upload Python Package + +on: + on: workflow_dispatch + +permissions: + contents: read + +defaults: + run: + working-directory: "./compat/snowflake-cli-labs" + +jobs: + deploy: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + - name: + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install hatch + - name: Build package + run: python -m hatch build + - name: Publish package + run: python -m hatch publish -u __token__ -a ${{ secrets.PYPI_TOKEN }} diff --git a/README.md b/README.md index 92486c4631..f4b9b5760a 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ For complete installation and usage instructions, refer to the Requires Python >= 3.8 ```bash -pip install snowflake-cli-labs +pip install snowflake-cli snow --help ``` diff --git a/compat/snowflake-cli-labs/LICENSE b/compat/snowflake-cli-labs/LICENSE new file mode 100644 index 0000000000..48773d3d3c --- /dev/null +++ b/compat/snowflake-cli-labs/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2024 Snowflake Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/compat/snowflake-cli-labs/README.md b/compat/snowflake-cli-labs/README.md new file mode 100644 index 0000000000..b8b665eaa0 --- /dev/null +++ b/compat/snowflake-cli-labs/README.md @@ -0,0 +1,5 @@ +# Snowflake Developer CLI + +This package has been renamed. Use `pip install snowflake-cli` instead. + +New package: https://pypi.org/project/snowflake-cli/ diff --git a/compat/snowflake-cli-labs/pyproject.toml b/compat/snowflake-cli-labs/pyproject.toml new file mode 100644 index 0000000000..de1f4a9820 --- /dev/null +++ b/compat/snowflake-cli-labs/pyproject.toml @@ -0,0 +1,26 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "snowflake-cli-labs" +authors = [{ name = "Snowflake Inc." }] +license = { file = "LICENSE" } +dynamic = ["version"] +requires-python = ">=3.8" +description = "Snowflake CLI" +readme = "README.md" +dependencies = ["snowflake-cli==2.0.0"] +classifiers = [ + "Development Status :: 7 - Inactive" +] + +[project.urls] +"Source code" = "https://github.com/Snowflake-Labs/snowcli" +"Bug Tracker" = "https://github.com/Snowflake-Labs/snowcli/issues" + +[tool.hatch.build.targets.wheel] +packages = ["README.md"] + +[tool.hatch.version] +path = "../../src/snowflake/cli/__about__.py" diff --git a/pyproject.toml b/pyproject.toml index 80c24257bc..c4d9299818 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ requires = ["hatchling"] build-backend = "hatchling.build" [project] -name = "snowflake-cli-labs" +name = "snowflake-cli" authors = [{ name = "Snowflake Inc." }] license = { file = "LICENSE" } dynamic = ["version"] diff --git a/test_external_plugins/multilingual_hello_command_group/pyproject.toml b/test_external_plugins/multilingual_hello_command_group/pyproject.toml index a20d502881..e07375abf5 100644 --- a/test_external_plugins/multilingual_hello_command_group/pyproject.toml +++ b/test_external_plugins/multilingual_hello_command_group/pyproject.toml @@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta" name = "snowflakecli-test-multilingual-hello-plugin" requires-python = ">=3.8" dependencies = [ - "snowflake-cli-labs>=1.1.0" + "snowflake-cli>=1.1.0" ] version = "0.0.1" diff --git a/test_external_plugins/snowpark_hello_single_command/pyproject.toml b/test_external_plugins/snowpark_hello_single_command/pyproject.toml index dac8bd7e1a..df66799e82 100644 --- a/test_external_plugins/snowpark_hello_single_command/pyproject.toml +++ b/test_external_plugins/snowpark_hello_single_command/pyproject.toml @@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta" name = "snowflakecli-test-snowpark-hello-plugin" requires-python = ">=3.8" dependencies = [ - "snowflake-cli-labs>=1.1.0" + "snowflake-cli>=1.1.0" ] version = "0.0.1" diff --git a/tests_e2e/conftest.py b/tests_e2e/conftest.py index 1622298fbb..a95e4aabb6 100644 --- a/tests_e2e/conftest.py +++ b/tests_e2e/conftest.py @@ -67,7 +67,7 @@ def _install_snowcli_with_external_plugin( "-m", "pip", "install", - test_root_path / f"../dist/snowflake_cli_labs-{version}-py3-none-any.whl", + test_root_path / f"../dist/snowflake_cli-{version}-py3-none-any.whl", ] ) subprocess.check_call( From c023e5c3d8ed6f291d291cb059380519e795fcb8 Mon Sep 17 00:00:00 2001 From: Bhumika Goel Date: Wed, 31 Jan 2024 03:05:43 -0500 Subject: [PATCH 15/17] [NADE] Add NA commands to the release notes for v2.0.0 (#707) add NA commands to the release notes for v2.0.0 --- RELEASE-NOTES.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 8ee77352fe..6308b57141 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -51,6 +51,14 @@ * Added support for runtime version in snowpark procedures ad functions. * You can include previously uploaded packages in your functions, by listing them under `imports` in `snowflake.yml` * Added more options to `snow connection add` - now you can also specify authenticator and path to private key +* Added support for native applications by introducing new commands. + * `snow app init` command that creates a new Native App project from a git repository as a template. + * `snow app version create` command that creates or upgrades an application package and creates a version or patch for that package. + * `snow app version drop` command that drops a version associated with an application package. + * `snow app version list` command that lists all versions associated with an application package. + * `snow app run` command that creates or upgrades an application in development mode or through release directives. + * `snow app open` command that opens the application inside of your browser on Snowsight, once it has been installed in your account. + * `snow app teardown` command that attempts to drop both the application and package as defined in the project definition file. ## Fixes and improvements * Allow the use of quoted identifiers in stages From 5e0052b78d3284697ac295226eebccf53693fc6a Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 31 Jan 2024 10:08:34 +0100 Subject: [PATCH 16/17] =?UTF-8?q?SNOW-1019480-sql-filter-nonsql-statements?= =?UTF-8?q?:=20strip=20comments=20from=20stream=E2=80=A6=20(#700)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * SNOW-1019480-sql-filter-nonsql-statements: strip comments from streamed queries --- src/snowflake/cli/plugins/sql/manager.py | 9 +++++++-- tests_integration/test_sql.py | 12 ++++++++++++ 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/src/snowflake/cli/plugins/sql/manager.py b/src/snowflake/cli/plugins/sql/manager.py index 18ab55c80f..eaf8eef12b 100644 --- a/src/snowflake/cli/plugins/sql/manager.py +++ b/src/snowflake/cli/plugins/sql/manager.py @@ -28,5 +28,10 @@ def execute( elif file: query = file.read_text() - single_statement = len(list(split_statements(StringIO(query)))) == 1 - return single_statement, self._execute_string(query) + statements = tuple( + statement + for statement, _ in split_statements(StringIO(query), remove_comments=True) + ) + single_statement = len(statements) == 1 + + return single_statement, self._execute_string("\n".join(statements)) diff --git a/tests_integration/test_sql.py b/tests_integration/test_sql.py index 5a35dc7d7a..09d8b98be4 100644 --- a/tests_integration/test_sql.py +++ b/tests_integration/test_sql.py @@ -100,3 +100,15 @@ def _log( assert query_0 == "13" assert time_1 - time_0 >= 10.0 assert "waited 10 seconds" in query_1 + + +@pytest.mark.integration +def test_trailing_comments_queries(runner, snowflake_session, test_root_path): + trailin_comment_query = "select 1;\n\n-- trailing comment\n" + result = runner.invoke_with_connection_json(["sql", "-q", trailin_comment_query]) + assert result.exit_code == 0 + assert result.json == [ + [ + {"1": 1}, + ], + ] From dc814a997f8b86a7596cf6867791237948abdbf1 Mon Sep 17 00:00:00 2001 From: Tomasz Urbaszek Date: Wed, 31 Jan 2024 10:21:56 +0100 Subject: [PATCH 17/17] Fix old publish workflow (#709) --- .github/workflows/old_publish.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/old_publish.yaml b/.github/workflows/old_publish.yaml index ab105dec2e..f7807466c3 100644 --- a/.github/workflows/old_publish.yaml +++ b/.github/workflows/old_publish.yaml @@ -1,7 +1,6 @@ -name: Upload Python Package +name: Upload Old Python Package -on: - on: workflow_dispatch +on: workflow_dispatch permissions: contents: read