diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 147d7f36..fbca5d71 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -21,6 +21,7 @@ jobs: with: python-version: ${{ matrix.python-version }} - name: black + if: ${{matrix.python-version != '3.7'}} uses: psf/black@stable # Exclude list is not honored - https://github.com/psf/black/issues/1584 with: version: "22.6.0" diff --git a/CalmVersion b/CalmVersion index 64ee7ee7..eaf967fb 100644 --- a/CalmVersion +++ b/CalmVersion @@ -1 +1 @@ -3.7.2.1 +3.7.2.2 diff --git a/Documentation/docs/models/runbook/ndb_task.md b/Documentation/docs/models/runbook/ndb_task.md new file mode 100644 index 00000000..ac838924 --- /dev/null +++ b/Documentation/docs/models/runbook/ndb_task.md @@ -0,0 +1,401 @@ +# Nutanix Database (NDB) + +- NDB models represents the attributes required for performing various Database operations +- NDB models available in DSL are Database, DatabaseServer, TimeMachine and Tag. They are further divided into different subclasses based on specific operations +- NDB model also constitute of OutputVariable Model which give user leverage to set task variables for the output variables. + +## DatabaseServer: + +This model provides attributes for the server related information. This is divided into different subclasses which inherits from Postgres DatabaseServer, a subclass to DatabaseServer. + +- Postgres: + + This model provides attributes for performing various operations on Postgres Databases. + + - Create: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import DatabaseServer + + DatabaseServer.Postgres.Create( + name="db_server_name", + password="db_server_password", + cluster=Ref.NutanixDB.Cluster(name="EraCluster"), + software_profile=Ref.NutanixDB.Profile.Software(name="POSTGRES_10.4_OOB"), + software_profile_version=Ref.NutanixDB.Profile.Software_Version( + name="POSTGRES_10.4_OOB (1.0)" + ), + compute_profile=Ref.NutanixDB.Profile.Compute(name="DEFAULT_OOB_COMPUTE"), + network_profile=Ref.NutanixDB.Profile.Network( + name="DEFAULT_OOB_POSTGRESQL_NETWORK" + ), + ip_address="10.44.76.141", + ssh_public_key="ssh_key for the server", + description="Sample description of db server", + ) + ``` + + - Attributes supported for this class: + 1. **name**: (String) Name of the database server + 2. **password**: (String) Password of the database server + 3. **cluster**: (NDB Ref/ Macro) Cluster to use for the database server + 4. **software_profile**: (NDB Ref/ Macro) Software Profile to use for the database server + 5. **software_profile_version**: (NDB Ref/ Macro) Version of the Software Profile to use for the database server + 6. **compute_profile**: (NDB Ref/ Macro) Compute Profile to use for the database server + 7. **network_profile**: (NDB Ref/ Macro) Network Profile to use for the database server + 8. **ip_address**: (String) Static IP address for static network profile if provided + 9. **ssh_public_key**: (String) RSA based public key to use for the database server + 10. **description**: (String) Description of the database server + + - Clone: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import DatabaseServer + + DatabaseServer.Postgres.Clone( + name="new_db_@@{calm_time}@@", + password="abc123", + cluster=Ref.NutanixDB.Cluster(name="EraCluster"), + compute_profile=Ref.NutanixDB.Profile.Compute(name="DEFAULT_OOB_COMPUTE"), + network_profile=Ref.NutanixDB.Profile.Network( + name="DEFAULT_OOB_POSTGRESQL_NETWORK" + ), + ssh_public_key="ssh-key", + description="Sample description of db server", + ) + ``` + + - Attributes supported for this class: + 1. **name**: (String) Name of the Postgres Instance + 2. **password**: (String) Password of the database server + 3. **cluster**: (NDB Ref/ Macro) Cluster to use for the database server + 4. **compute_profile**: (NDB Ref/ Macro) Compute Profile to use for the database server + 5. **network_profile**: (NDB Ref/ Macro) Network Profile to use for the database server + 6. **ssh_public_key**: (String) RSA based public key to use for the database server + 7. **description**: (String) Description of the database server + +## Database: + +This model provides attributes for the database instance related information. This is divided into different subclasses which inherits from Postgres Database, a subclass to Database. + +- Postgres: + + This model provides attributes for performing various operations on Postgres Databases. + + - Create: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Database + + Database.Postgres.Create( + name="post_inst_@@{calm_time}@@", + description="Sample description of postgres instances", + database_parameter_profile=Ref.NutanixDB.Profile.Database_Parameter( + name="DEFAULT_POSTGRES_PARAMS" + ), + initial_database_name="TEST_DB_01", + initial_database_password="DB_PASS", + listener_port="5432", + size="200", + pre_create_script="", + post_create_script="", + ) + ``` + + - Attributes supported for this class: + 1. **name**: (String) Name of the Postgres Instance + 2. **description**: (String) Description of the Postgres Instance + 3. **database_parameter_profile**: (NDB Ref/ Macro) Database Parameter Profile to use for Postgres Instance + 4. **initial_database_name**: (String) Intial Database name to use for Postgres Instance + 5. **initial_database_password**: (String) Intial Database password to use for Postgres Instance + 6. **listener_port**: (Integer) Listener Port to use for Postgres Instance + 7. **size**: (Integer) Size of the Postgres Instance + 8. **pre_create_script**: (String) Script to run before creating the Postgres Instance + 9. **post_create_script**: (String) Script to run after creating the Postgres Instance + + - Delete: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Database + + Database.Postgres.Delete( + name=Ref.NutanixDB.Database(name="bekkam-pg-dnd") + ) + ``` + + - Attributes supported for this class: + 1. **database**: (NDB Ref/ Macro) Ref of the Postgres Instance + + - Create Snapshot: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Database + + Database.Postgres.CreateSnapshot( + snapshot_name="snap-from-dsl", + remove_schedule_in_days=2, + # time_machine="@@{tm_uuid}@@", + time_machine=Ref.NutanixDB.TimeMachine(name="dnd-pg_TM"), + database=Ref.NutanixDB.Database(name="dnd-pg"), + ) + ``` + + - Attributes supported for this class: + 1. **snapshot_name**: (String) Snapshot Name + 2. **remove_schedule_in_days**: (Integer) Removal Schedule + 3. **time_machine**: (NDB Ref/ Macro) Time Machine Name + 4. **database**: (NDB Ref/ Macro) Database Name + + - Restore From Time Machine: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Database + + Database.Postgres.RestoreFromTimeMachine( + database=Ref.NutanixDB.Database("test-pg-inst"), + snapshot_with_timeStamp=Ref.NutanixDB.Snapshot( + "era_auto_snapshot (2023-03-01 14:46:17)" + ), + time_zone="America/Resolute", + #point_in_time="2023-02-12 10:01:40", + ) + ``` + + - Attributes supported for this class: + 1. **database**: (NDB Ref/ Macro) Name of the Postgres Instance + 2. **snapshot_with_timeStamp**: (NDB Ref/ Macro) Name of the snapshot along with TimeStamp (yyyy-mm-dd hh:mm:ss) Eg-> "era_auto_snapshot (2023-02-12 10:01:40)" + 3. **point_in_time**: (String) point in Time to Restore yyyy-mm-dd hh:mm:ss Eg -> "2023-02-12 10:01:40" + 4. **time_zone**: (String) Time Zone of the snapshot/point in time (If not given defaults to system timezone) + + - Note: Either of snapshot_with_timeStamp and point_in_time can be specified + + - Clone: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Database + + Database.Postgres.Clone( + name="post_inst_@@{calm_time}@@", + database_parameter_profile=Ref.NutanixDB.Profile.Database_Parameter( + name="DEFAULT_POSTGRES_PARAMS" + ), + password="Nutanix.123", + pre_clone_cmd="", + post_clone_cmd="", + ) + ``` + + - Attributes supported for this class: + 1. **name**: (String) Name of the Postgres Instance + 2. **description**: (String) Description of the Postgres Instance + 3. **password**: (String) Password of the Postgres Instance + 4. **database_parameter_profile**: (NDB Ref/ Macro) Database Parameter Profile to use for Postgres Instance + 5. **pre_clone_cmd**: (String) Script to run before creating the Postgres Instance + 6. **post_clone_cmd**: (String) Script to run after creating the Postgres Instance + +## TimeMachine: + +This model provides attributes for the timeMachine related information. This is divided into different subclasses which inherits from Postgres TimeMachine, a subclass to TimeMachine. + +- Postgres: + + This model provides attributes for performing various operations on Postgres Databases. + + - Create: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import TimeMachine + + TimeMachine.Postgres.Create( + name="inst_@@{calm_time}@@_TM", + description="This is time machine's description", + sla=Ref.NutanixDB.SLA(name="DEFAULT_OOB_GOLD_SLA"), + snapshottimeofday__hours=12, + snapshottimeofday__minutes=0, + snapshottimeofday__seconds=0, + snapshots_perday=1, + logbackup_interval=60, + weeklyschedule__dayofweek="WEDNESDAY", + monthlyschedule__dayofmonth=17, + quartelyschedule__startmonth="FEBRUARY", + ) + ``` + + - Attributes supported for this class: + 1. **name**: (String) Name of the Time Machine + 2. **description**: (String) Description of the Time Machine + 3. **sla**: (NDB Ref/ Macro) SLA to use for the Time Machine + 4. **snapshottimeofday__hours**: (Integer) Hour of the day to take Snapshot + 5. **snapshottimeofday__minutes**: (Integer) Minute of the day to take Snapshot + 6. **snapshottimeofday__seconds**: (Integer) Second of the day to take Snapshot + 7. **snapshots_perday**: (Integer) Snapshots to take Per day + 8. **logbackup_interval**: (Integer) Log Backup Interval in minutes + 9. **weeklyschedule__dayofweek**: (String) Weekly Snapshot day of the week + 10. **monthlyschedule__dayofmonth**: (Integer) Monthly Snaphot day of the month + 11. **quartelyschedule__startmonth**: (String) Quarterly Snapshot start of the month + + - Clone: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import TimeMachine + + TimeMachine.Postgres.Clone( + time_machine_name=Ref.NutanixDB.TimeMachine("dnd-tm2"), + point_in_time="2023-02-12 10:01:40", + + time_zone="UTC", + ) + ``` + + - Attributes supported for this class: + 1. **time_machine**: (NDB Ref/ Macro) Name of the Time Machine + 2. **snapshot_with_timeStamp**: (NDB Ref/ Macro) Name of the snapshot along with TimeStamp (yyyy-mm-dd hh:mm:ss) Eg-> "era_auto_snapshot (2023-02-12 10:01:40)" + 3. **point in time**: (String) point in Time to Restore yyyy-mm-dd hh:mm:ss Eg -> "2023-02-12 10:01:40" + 4. **time_zone**: (String) Time Zone of the snapshot/point in time (If not given defaults to system timezone) + 5. **expiry_days**: (Integer) Number of days to expire + 6. **expiry_date_timezone** : (String) Timezone to be used for expiry date + 7. **delete_database**: (Boolean) Boolean input for deletion of database + 8. **refresh_in_days**: (Integer) Number of days to refresh + 9. **refresh_time**: (String) Time at which refresh should trigger + 10. **refresh_date_timezone**: (String) Timezone for the refresh time + + - Note: Either of snapshot_with_timeStamp and point_in_time can be specified + +## Tag: + +This model provides attributes for the tag related information. This is divided into different subclasses wrt to the Actions. + +- Create: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Tag + + Tag.Create( + database_tags=[ + DatabaseTag("prod_database", "true"), + DatabaseTag("database_type", "Postgres"), + ], + time_machine_tags=[ + TimemachineTag("type", "gold"), + ], + ) + ``` + + - Attributes supported for this class: + 1. **database**: ([NDB Ref]) array of NDB Database Tag Ref. Eg -> [ Ref.NutanixDB.Tag.Database(name1, value1), Ref.NutanixDB.Tag.Database(name=name2, value=value2) ] + 2. **time_machine**: ([NDB Ref]) array of NDB TimeMachine Tag Ref. Eg -> [ Ref.NutanixDB.Tag.TimeMachine(name=name1, value=value1), Ref.NutanixDB.Tag.TimeMachine(name2, value2) ] + +- Clone: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Tag + + Tag.Clone(tags=[CloneTag("tag name", "")]) + ``` + + - Attributes supported for this class: + 1. **clone**: ([NDB Ref]) array of NDB Clone Tag Ref. Eg -> [ Ref.NutanixDB.Tag.Clone(name1, value1), Ref.NutanixDB.Tag.Clone(name=name2, value=value2) ] + +## PostgresDatabaseOutputVariables: + +This model provides information about the output variables associated to postgres actions. + +- Create: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import PostgresDatabaseOutputVariables + + PostgresDatabaseOutputVariables.Create( + database_name="postgres_database_name", + database_instance_id="", + tags="", + properties="", + time_machine="postgres_time_machine", + time_machine_id="", + metric="", + type="", + platform_data="", + ) + ``` + + - Attributes supported for this class: + 1. **database_name**: (String) Name of the database instance + 2. **database_instance_id**: (String) ID of database instance created + 3. **tags**: ([Dict]) A tag is a label consisting of a user-defined name and a value that makes it easier to manage, search for, and filter entities + 4. **properties**: ([Dict]) Properties of the entity, Eg -> Database instance, database, profiles + 5. **time_machine**: (Dict) Time machine details when an instance is created + 6. **time_machine_id**:(String) UUID of time machine + 7. **metric**: (Dict) Stores storage info regarding size, allocatedSize, usedSize and unit of calculation that have been fetched from PRISM + 8. **type**: (String) The type of the database created i.e., postgres_database + 9. **platform_data**: (Dict) Platform data is the aggregate data of all the output variables supported + +- Create Snapshot: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import PostgresDatabaseOutputVariables + + PostgresDatabaseOutputVariables.CreateSnapshot( + platform_data='myplatformdata' + ) + ``` + + - Attributes supported for this class: + 1. **database_snapshot**: (Dict) Snapshot of the database + 2. **properties**: (Dict) Properties of the entity, Eg -> Database instance, database, profiles + 3. **dbserver_name**: (String) Name of the database server VM + 4. **type**: (String) The type of the database created i.e., postgres_database + 5. **dbserver_ip**: (String) IP address of the database server VM + 6. **id**: (String) ID of database instance created + 7. **parent_snapshot**: (Dict) Snapshot used to clone the database + 8. **snapshot_uuid**: (String) Uuid of the Snapshot + 9. **platform_data**: (Dict) Platform data is the aggregate data of all the output variables supported + +- Restore From Time Machine: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import PostgresDatabaseOutputVariables + + PostgresDatabaseOutputVariables.RestoreFromTimeMachine( + database_name="postgres_database_name", + database_instance_id="", + tags="", + properties="", + time_machine="postgres_time_machine", + time_machine_id="", + metric="", + type="", + platform_data="", + ) + ``` + + - Attributes supported for this class: + 1. **database_name**: (String) Name of the database instance + 2. **database_instance_id**: (String) ID of database instance created + 3. **tags**: ([Dict]) A tag is a label consisting of a user-defined name and a value that makes it easier to manage, search for, and filter entities + 4. **properties**: (Dict) Properties of the entity, Eg -> Database instance, database, profiles + 5. **time_machine**: (Dict) Time machine details when an instance is created + 6. **time_machine_id**: (String) UUID of time machine + 7. **metric**: (Dict) Stores storage info regarding size, allocatedSize, usedSize and unit of calculation that seems to have been fetched from PRISM + 8. **type**: (String) The type of the database created i.e., postgres_database + 9. **platform_data**: (Dict) Platform data is the aggregate data of all the output variables supported + +- Clone: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import PostgresDatabaseOutputVariables + + PostgresDatabaseOutputVariables.Clone( + id="postgres_Clone_id" + ) + ``` + + - Attributes supported for this class: + 1. **type**: (String) The type of the database created i.e., postgres_database + 2. **id**: (String) ID of database instance created + 3. **time_machine**: (Dict) Time machine details when an instance is created + 4. **linked_databases**: ([String]) These are databases which are created as a part of the instance + 5. **database_name**: (String) Name of the database instance + 6. **database_nodes**: ([Dict]) Info of nodes of databases + 7. **platform_data**: (Dict) Platform data is the aggregate data of all the output variables supported diff --git a/README.md b/README.md index f9762eae..bb34e314 100644 --- a/README.md +++ b/README.md @@ -149,7 +149,11 @@ Use `calm get roles` to list all roles in PC. The below roles are relevant for C ### Projects - Compile project: `calm compile project --file `. This command will print the compiled project JSON. Look at sample file [here](examples/Project/demo_project.py) and [here](examples/Project/project_with_env.py). -- Create project on Calm Server: `calm create project --file --name --description `. Use `no-cache-update` flag to skip cache updations post operation. +- Create project on Calm Server: `calm create project --file --name --description `.\ +**Options:**\ +           `--no-cache-update`: flag to skip cache updations post operation.\ +           `--force`: flag to delete existing project with the same name before create, if entities are not associated with it. + - List projects: `calm get projects`. Get projects, optionally filtered by a string - Describe project: `calm describe project `. It will print summary of project. - Update project using dsl file: `calm update project --file `. Environments will not be updated as part of this operation. Use `no-cache-update` flag to skip cache updations post operation. @@ -162,13 +166,22 @@ Use `calm get roles` to list all roles in PC. The below roles are relevant for C - If the project already has quotas set and enabled and there are no quotas in {project_file} then the original quotas in the projects would be persisted. - If the project doesn't have quotas enabled/set and the {project_file} has quotas then the quotas would be enabled and set in the project. - Note: While using `no-cache-update` flag in project create and update commands, user should not pass environment object in the project model. User should update the cache separately after creation/updation of projects. Feature is experimental and will be discontinued after [#184](https://github.com/nutanix/calm-dsl/issues/184) is fixed. +- Decompile existing server project: `calm decompile project `. +- Decompile project from existing json file: `calm decompile project --file `. +- Decompile project to a location: `calm decompile project --dir `. It will decompile project entities to `project_dir` location. ### Environments - Compile environment: `calm compile environment --file --project `. Command will print the compiled environment JSON. Look at sample file [here](examples/Environment/sample_environment.py) -- Create environment to existing project: `calm create environment --file --project --name `. Use `no-cache-update` flag to skip cache updations post operation. +- Create environment to existing project: `calm create environment --file --project --name `.\ +**Options:**\ +           `--no-cache-update`: flag to skip cache updations post operation.\ +           `--force`: flag to delete existing environment in a project with the same name before create, if entities are not associated with it. - Update environment: `calm update environment --file --project `. Use `no-cache-update` flag to skip cache updations post operation. - List environments: `calm get environments --project `. Get environments of project. - Delete environment: `calm delete environment --project `. Use `no-cache-update` flag to skip cache updations post operation. +- Decompile existing server environment: `calm decompile environment --project `. +- Decompile environment from existing json file: `calm decompile environment --file --project `. +- Decompile environment to a location: `calm decompile environment --project --dir `. It will decompile environment entities to `environment_dir` location. ### Access Control Policies Access control policies ensures that a project member can access only the entities or perform only the actions defined in the role assigned to that project member. diff --git a/calm/dsl/api/__init__.py b/calm/dsl/api/__init__.py index e47f38d8..fb74410a 100644 --- a/calm/dsl/api/__init__.py +++ b/calm/dsl/api/__init__.py @@ -1,8 +1,9 @@ -from .handle import get_client_handle_obj, get_api_client +from .handle import get_client_handle_obj, get_api_client, reset_api_client_handle from .resource import get_resource_api __all__ = [ "get_client_handle_obj", "get_api_client", "get_resource_api", + "reset_api_client_handle", ] diff --git a/calm/dsl/api/blueprint.py b/calm/dsl/api/blueprint.py index c2baf644..6f36071f 100644 --- a/calm/dsl/api/blueprint.py +++ b/calm/dsl/api/blueprint.py @@ -7,6 +7,8 @@ from .util import ( strip_secrets, patch_secrets, + strip_patch_config_tasks, + add_patch_config_tasks, ) from calm.dsl.log import get_logging_handle @@ -191,6 +193,8 @@ def upload_with_secrets( bp_resources, secret_map, secret_variables, object_lists=object_lists ) + # Removing patch config tasks before uploading + profile_patch_config_tasks = strip_patch_config_tasks(bp_resources) for obj_index, obj in enumerate( bp_resources.get("substrate_definition_list", []) or [] ): @@ -210,10 +214,23 @@ def upload_with_secrets( if err: return res, err - # Add secrets and update bp + # Add secrets, patch config tasks and update bp bp = res.json() + service_name_uuid_map = {} + app_profile_list = bp["status"].get("resources", {}).get("app_profile_list", []) + for service in ( + bp["status"].get("resources", {}).get("service_definition_list", []) + ): + service_name_uuid_map[service["name"]] = service["uuid"] del bp["status"] + add_patch_config_tasks( + bp["spec"]["resources"], + app_profile_list, + profile_patch_config_tasks, + service_name_uuid_map, + ) + patch_secrets(bp["spec"]["resources"], secret_map, secret_variables) # Adding categories at PUT call to blueprint @@ -273,6 +290,9 @@ def upload_with_decompiled_secrets( not_stripped_secrets=not_stripped_secrets, ) + # Removing patch config tasks before uploading + profile_patch_config_tasks = strip_patch_config_tasks(bp_resources) + for obj_index, obj in enumerate( bp_resources.get("substrate_definition_list", []) or [] ): @@ -299,10 +319,23 @@ def upload_with_decompiled_secrets( if err: return res, err - # Add secrets and update bp + # Add secrets, patch config tasks and update bp bp = res.json() + service_name_uuid_map = {} + app_profile_list = bp["status"].get("resources", {}).get("app_profile_list", []) + for service in ( + bp["status"].get("resources", {}).get("service_definition_list", []) + ): + service_name_uuid_map[service["name"]] = service["uuid"] del bp["status"] + add_patch_config_tasks( + bp["spec"]["resources"], + app_profile_list, + profile_patch_config_tasks, + service_name_uuid_map, + ) + LOG.info("Patching newly created/updated secrets") for k in secret_map: LOG.debug("[CREATED/MODIFIED] credential -> '{}'".format(k)) diff --git a/calm/dsl/api/handle.py b/calm/dsl/api/handle.py index 5d7c1d34..5f46e61a 100644 --- a/calm/dsl/api/handle.py +++ b/calm/dsl/api/handle.py @@ -138,3 +138,10 @@ def get_api_client(): update_api_client(host=pc_ip, port=pc_port, auth=(username, password)) return _API_CLIENT_HANDLE + + +def reset_api_client_handle(): + """resets global api client object (_API_CLIENT_HANDLE)""" + + global _API_CLIENT_HANDLE + _API_CLIENT_HANDLE = None diff --git a/calm/dsl/api/util.py b/calm/dsl/api/util.py index 66662de4..b0fe152c 100644 --- a/calm/dsl/api/util.py +++ b/calm/dsl/api/util.py @@ -1,4 +1,5 @@ import copy +import sys from calm.dsl.log import get_logging_handle @@ -122,6 +123,37 @@ def strip_entity_secret_variables( not_stripped_secrets.append( (path_list + [field_name, var_idx], variable["value"]) ) + # For dynamic variables having http task with auth + opts = variable.get("options", None) + auth = None + if opts: + attrs = opts.get("attrs", None) + if attrs: + auth = attrs.get("authentication", None) + if auth and auth.get("auth_type") == "basic": + basic_auth = auth.get("basic_auth") + username = basic_auth.get("username") + password = basic_auth.pop("password") + secret_variables.append( + ( + path_list + + [ + field_name, + var_idx, + "options", + "attrs", + "authentication", + "basic_auth", + "password", + ], + password.get("value", None), + username, + ) + ) + basic_auth["password"] = { + "value": None, + "attrs": {"is_secret_modified": False}, + } def strip_action_secret_variables(path_list, obj): @@ -259,8 +291,10 @@ def strip_runbook_secret_variables(path_list, obj): elif task.get("type", None) != "HTTP": continue auth = (task.get("attrs", {}) or {}).get("authentication", {}) or {} + if auth.get("auth_type", None) == "basic": + path_list = path_list + ["runbook"] + path_list = path_list + [ - "runbook", "task_definition_list", task_idx, "attrs", @@ -271,15 +305,14 @@ def strip_runbook_secret_variables(path_list, obj): path_list, task.get("attrs", {}) or {}, context=var_task_context ) - if auth.get("auth_type", None) == "basic": - if not (task.get("attrs", {}) or {}).get("headers", []) or []: - continue - strip_entity_secret_variables( - path_list, - task["attrs"], - field_name="headers", - context=var_task_context + ".headers", - ) + if not (task.get("attrs", {}) or {}).get("headers", []) or []: + continue + strip_entity_secret_variables( + path_list, + task["attrs"], + field_name="headers", + context=var_task_context + ".headers", + ) def strip_authentication_secret_variables(path_list, obj, context=""): @@ -344,31 +377,34 @@ def strip_vmware_secrets( obj = obj["create_spec"]["resources"]["guest_customization"] vmware_secrets_context = "create_spec.resources.guest_customization.windows_data" - if "windows_data" in obj: + if obj.get("windows_data", {}): path_list.append("windows_data") obj = obj["windows_data"] + if not obj: + return + vmware_secrets_admin_context = ( vmware_secrets_context - + obj["windows_data"] + + "windows_data" + ".password." - + obj["password"]["name"] + + obj["password"].get("name", "") ) filtered_decompiled_vmware_secrets = get_secrets_from_context( decompiled_secrets, vmware_secrets_admin_context ) # Check for admin_password - if "password" in obj: + if obj.get("password", {}): if is_secret_modified( filtered_decompiled_vmware_secrets, - obj["password"]["name"], - obj["password"]["value"], + obj["password"].get("name", ""), + obj["password"].get("value", None), ): secret_variables.append( ( path_list + ["password"], obj["password"].pop("value", ""), - obj["password"]["name"], + obj["password"].get("name", ""), ) ) obj["password"]["attrs"] = { @@ -377,26 +413,26 @@ def strip_vmware_secrets( } else: not_stripped_secrets.append( - (path_list + ["password"], obj["password"]["value"]) + (path_list + ["password"], obj["password"].get("value", "")) ) - vmware_secrets_domain_context = ( - vmware_secrets_context - + obj["windows_data"] - + ".domain_password." - + obj["domain_password"] - ) - filtered_decompiled_vmware_secrets = get_secrets_from_context( - decompiled_secrets, vmware_secrets_domain_context - ) - # Now check for domain password if obj.get("is_domain", False): - if "domain_password" in obj: + if obj.get("domain_password", {}): + vmware_secrets_domain_context = ( + vmware_secrets_context + + "windows_data" + + ".domain_password." + + obj["domain_password"].get("name", "") + ) + + filtered_decompiled_vmware_secrets = get_secrets_from_context( + decompiled_secrets, vmware_secrets_domain_context + ) if is_secret_modified( filtered_decompiled_vmware_secrets, - obj["domain_password"]["name"], - obj["domain_password"]["value"], + obj["domain_password"].get("name", ""), + obj["domain_password"].get("value", None), ): secret_variables.append( ( @@ -466,3 +502,369 @@ def patch_secrets(resources, secret_map, secret_variables, existing_secrets=[]): variable["value"] = secret return resources + + +def _create_task_name_substrate_map(bp_payload, entity_type, **kwargs): + vm_power_action_uuid_substrate_map = kwargs.get( + "vm_power_action_uuid_substrate_map", {} + ) + task_name_substrate_map = kwargs.get("task_name_substrate_map", {}) + + entity_list = bp_payload["spec"]["resources"][entity_type] + for entity in entity_list: + entity_name = entity.get("name") + for action in entity.get("action_list", []): + action_name = action.get("name") + runbook = action.get("runbook", {}) + if not runbook: + continue + for task in runbook.get("task_definition_list", []): + task_name = task.get("name") + if task.get("type", "") == "CALL_RUNBOOK" and task.get("attrs", {}): + uuid = task["attrs"]["runbook_reference"].get("uuid", "") + if not uuid: + continue + task_name_substrate_map[ + "{}_{}_{}".format(entity_name, action_name, task_name) + ] = vm_power_action_uuid_substrate_map.get(uuid, "") + + for config in entity.get("patch_list", []): + config_name = config.get("name") + runbook = config.get("runbook", {}) + if not runbook: + continue + for task in runbook.get("task_definition_list", []): + task_name = task.get("name") + if task.get("type", "") == "CALL_RUNBOOK" and task.get("attrs", {}): + uuid = task["attrs"]["runbook_reference"].get("uuid", "") + if not uuid: + continue + task_name_substrate_map[ + "{}_{}_{}".format(entity_name, config_name, task_name) + ] = vm_power_action_uuid_substrate_map.get(uuid, "") + + +def _create_reference_runbook_substrate_map(exported_bp_payload, entity_type, **kwargs): + reference_runbook_to_substrate_map = kwargs.get( + "reference_runbook_to_substrate_map", {} + ) + task_name_substrate_map = kwargs.get("task_name_substrate_map", {}) + + entity_list = exported_bp_payload["spec"]["resources"][entity_type] + for entity in entity_list: + entity_name = entity.get("name") + for action in entity.get("action_list", []): + action_name = action.get("name") + runbook = action.get("runbook", {}) + if not runbook: + continue + for task in runbook.get("task_definition_list", []): + task_name = task.get("name") + if task.get("type", "") == "CALL_RUNBOOK" and task.get("attrs", {}): + rb_name = task["attrs"]["runbook_reference"].get("name", "") + task_ref = "{}_{}_{}".format(entity_name, action_name, task_name) + if ( + task_ref in task_name_substrate_map + and task_name_substrate_map[task_ref] + ): + reference_runbook_to_substrate_map[ + rb_name + ] = task_name_substrate_map[task_ref] + + for config in entity.get("patch_list", []): + config_name = config.get("name") + runbook = config.get("runbook", {}) + if not runbook: + continue + for task in runbook.get("task_definition_list", []): + task_name = task.get("name") + if task.get("type", "") == "CALL_RUNBOOK" and task.get("attrs", {}): + rb_name = task["attrs"]["runbook_reference"].get("name", "") + if not rb_name: + continue + task_ref = "{}_{}_{}".format(entity_name, config_name, task_name) + if ( + task_ref in task_name_substrate_map + and task_name_substrate_map[task_ref] + ): + reference_runbook_to_substrate_map[ + rb_name + ] = task_name_substrate_map[task_ref] + + +def vm_power_action_target_map(bp_payload, exported_bp_payload): + """ + Args: + bp_payload (dict): bp payload response from client.blueprint.read call + exported_bp_payload (dict): bp payload response from client.blueprint.export_file call + + exported_bp_payload contains actual runbook name as reference which is called for any vm power action. + This payload only contains spec but power action reside in 'status' of res payload. + So, substrate of actual runbook name can't be found directly. + + bp_payload contains 'status' of res so substrate can be fetched from it. In this payload, + runbook name is alias to actual runbook used as reference for vm power action. So, + runbook uuid will be consumed to establish link between runbook reference and it's substrate. + + Algo: + Step 1: Create a map of power action runbook uuid and it's parent substrate + Step 2: Create map of task name calling above rb and substrate name by consuming rb uuid + rb uuid links task name -> rb uuid -> substrate name + Step 3: Create map of actual rb name calling above rb in exported bp_payload to substrate name + task name will be consumed in this process. + rb name -> task name -> substrate name + + Returns: + reference_runbook_to_substrate_map (dict): runbook name to substrate name map for vm + power action runbook used as reference inside a task, e.g. + reference_runbook_to_substrate_map = { + "rb_name1": substrate_name + "rb_name2": substrate2_name + } + """ + + # holds vm power action uuid to it's parent substrate name mapping + vm_power_action_uuid_substrate_map = {} + """ + vm_power_action_uuid_substrate_map = { + "": substrate_name + } + """ + + # holds target substrate for referenced power action runbook of a task + task_name_substrate_map = {} + """ + task_name_substrate_map = { + "__": substrate_name + } + "__" key is used to uniquely identify a task even if they are of same name + """ + + # task name to substrate map holds target substrate for referenced power action runbook of a task + reference_runbook_to_substrate_map = {} + """ + reference_runbook_to_substrate_map = { + "rb_name1": substrate_name, + "rb_name2": substrate2_name + } + """ + + substrate_def_list = bp_payload["status"]["resources"]["substrate_definition_list"] + for substrate in substrate_def_list: + substrate_name = substrate.get("name") + for action in substrate.get("action_list", []): + runbook_id = action.get("runbook", {}).get("uuid", "") + vm_power_action_uuid_substrate_map[runbook_id] = substrate_name + + kwargs = { + "vm_power_action_uuid_substrate_map": vm_power_action_uuid_substrate_map, + "task_name_substrate_map": task_name_substrate_map, + "reference_runbook_to_substrate_map": reference_runbook_to_substrate_map, + } + entity_type_list = [ + "substrate_definition_list", + "service_definition_list", + "app_profile_list", + "package_definition_list", + ] + for entity_type in entity_type_list: + _create_task_name_substrate_map(bp_payload, entity_type, **kwargs) + + for entity_type in entity_type_list: + _create_reference_runbook_substrate_map( + exported_bp_payload, entity_type, **kwargs + ) + + return reference_runbook_to_substrate_map + + +def strip_patch_config_tasks(resources): + """ + Strips out patch config tasks from patch list + Args: + resources (dict): resources dict of blueprint + Returns: + profile_patch_config_tasks (dict): dictionary of dictionary containing + user defined tasks at patch config level for each profile. + + e.g. if Profile1 contains patch_config1, patch_config2 having 3 and 2 user defined tasks respectively like: + Profile1 -> patch_config1 -> Task1, Task2, Task3 + patch_config2 -> Task4, Task5 + + then return value of this function is: + profile_patch_config_tasks = { + "Profile1": { + “patch_config1” : [Task1, Task2, Task3], + "patch_config2" : [Task4, Task5] + } + } + """ + + # contains list of dictionary of patch list for multiple profile + profile_patch_config_tasks = {} + + for profile in resources.get("app_profile_list", []): + # dictionary to hold muliple patch config to it's tasks mapping + patch_config_task_list_map = {} + + for patch_config in profile.get("patch_list", []): + tasks = patch_config.get("runbook", {}).pop("task_definition_list", []) + patch_config_task_list_map[patch_config.get("name")] = tasks + + # Strips out runbook holding patch config tasks in patch list + patch_config.pop("runbook", "") + + profile_patch_config_tasks[profile.get("name")] = patch_config_task_list_map + + return profile_patch_config_tasks + + +def add_patch_config_tasks( + resources, + app_profile_list, + profile_patch_config_tasks, + service_name_uuid_map, +): + """ + Adds patch config tasks to patch list payload for each profile in bp. + + Args: + resources (dict): resources fetch from "spec" of bp payload + app_profile_list (list): profile list fetched from "status" of bp payload + (because system defined patch config tasks are present in status) + profile_patch_config_tasks (list): returned from strip_patch_config_tasks function call. + Contains user defined patch config tasks from patch list for all profile. + service_name_uuid_map (dict): service name to its uuid map + + Step1: Reads patch config runbook from app_profile_list containing system defined + patch config tasks. + Step2: Update this runbook with user defined tasks + Step3: Attach this runbook to patch config runbook of resources fetched from spec + """ + + for profile in app_profile_list: + + # profile level map holding task name uuid map for each config + cur_profile_task_name_uuid_map = {} + """ + cur_profile_task_name_uuid_map = { + "patch_config1": { + "task1": "", + "task2": "", + "task3": "" + }, + "patch_config2": { + "task4": "", + "task5": "", + } + } + """ + + # holds patch config tasks for current profile + patch_config_task_list_map = profile_patch_config_tasks.get(profile["name"], {}) + + # iterate over all tasks in patch list and create task_name_uuid_map_list + for config_name, task_list in patch_config_task_list_map.items(): + task_name_uuid_map = {} + # cur_profile_patch_config_tasks[config_name] = task_list + for _task in task_list: + task_name_uuid_map[_task["name"]] = _task["uuid"] + + cur_profile_task_name_uuid_map[config_name] = task_name_uuid_map + + # append patch config tasks to system defined tasks in patch config + for patch_config in profile.get("patch_list", []): + patch_config_runbook = patch_config.get("runbook", {}) + if not patch_config_runbook: + continue + + # removing additional attributes of patch runbook + patch_config_runbook.pop("state", "") + patch_config_runbook.pop("message_list", "") + + system_tasks = patch_config_runbook.get("task_definition_list", []) + system_dag_task = system_tasks[0] + config_name = patch_config.get("name") + + for custom_task in patch_config_task_list_map[config_name]: + if "target_any_local_reference" in custom_task: + service_name = custom_task["target_any_local_reference"]["name"] + service_uuid = service_name_uuid_map.get(service_name, None) + if not service_uuid: + LOG.error( + "Service {} not added properly in blueprint.".format( + service_name + ) + ) + sys.exit(-1) + custom_task["target_any_local_reference"]["uuid"] = service_uuid + + # add all patch config tasks to task definition list + if custom_task["type"] != "DAG": + system_tasks.append(custom_task) + system_dag_task["child_tasks_local_reference_list"].append( + { + "kind": "app_task", + "name": custom_task["name"], + "uuid": custom_task["uuid"], + } + ) + + # create edge from patch config dag to tasks + elif custom_task["type"] == "DAG": + + user_first_task_name = custom_task[ + "child_tasks_local_reference_list" + ][0]["name"] + + # edge from patch config dag to first task + first_edge = { + "from_task_reference": { + "kind": "app_task", + "uuid": system_dag_task["child_tasks_local_reference_list"][ + 0 + ]["uuid"], + }, + "to_task_reference": { + "kind": "app_task", + "name": user_first_task_name, + "uuid": cur_profile_task_name_uuid_map[config_name][ + user_first_task_name + ], + }, + } + + # remaining edges from first task to rest of tasks + user_task_edges = custom_task["attrs"]["edges"] + for edge in user_task_edges: + task_name = edge["from_task_reference"]["name"] + edge["from_task_reference"][ + "uuid" + ] = cur_profile_task_name_uuid_map[config_name][task_name] + task_name = edge["to_task_reference"]["name"] + edge["to_task_reference"][ + "uuid" + ] = cur_profile_task_name_uuid_map[config_name][task_name] + system_dag_task["attrs"]["edges"] = [first_edge] + system_dag_task["attrs"]["edges"].extend(user_task_edges) + + for task in system_tasks: + # removing additional attributes + task.pop("state", "") + task.pop("message_list", "") + + # profile level counter + profile_idx = 0 + + # attaching updated patch runbook to patch runbook in spec + for profile in resources.get("app_profile_list", []): + # patch list counter + idx = 0 + + for patch_config in profile.get("patch_list", []): + patch_config["runbook"] = app_profile_list[profile_idx]["patch_list"][idx][ + "runbook" + ] + idx += 1 + + profile_idx += 1 diff --git a/calm/dsl/builtins/__init__.py b/calm/dsl/builtins/__init__.py index 8d02b7c2..94661335 100644 --- a/calm/dsl/builtins/__init__.py +++ b/calm/dsl/builtins/__init__.py @@ -59,11 +59,11 @@ ) from .models.ahv_recovery_vm import AhvVmRecoveryResources, ahv_vm_recovery_spec -from .models.substrate import Substrate, substrate, SubstrateType +from .models.substrate import substrate, SubstrateType, Substrate from .models.deployment import Deployment, deployment, DeploymentType from .models.pod_deployment import PODDeployment, pod_deployment -from .models.config_attrs import AhvUpdateConfigAttrs, PatchDataField +from .models.config_attrs import AhvUpdateConfigAttrs, PatchDataField, ConfigAttrs from .models.app_protection import AppProtection from .models.config_spec import ConfigSpecType from .models.app_edit import AppEdit @@ -297,4 +297,7 @@ "TimeMachine", "Tag", "PostgresDatabaseOutputVariables", + "AhvUpdateConfigAttrs", + "PatchField", + "AppEdit", ] diff --git a/calm/dsl/builtins/models/account_auth.py b/calm/dsl/builtins/models/account_auth.py index a0336717..47a40001 100644 --- a/calm/dsl/builtins/models/account_auth.py +++ b/calm/dsl/builtins/models/account_auth.py @@ -1,3 +1,6 @@ +from .utils import is_compile_secrets + + class AccountAuth: class K8s: def __new__(cls, username="", password=""): @@ -7,7 +10,10 @@ def basic(username="", password=""): auth_dict = { "type": "basic", "username": username, - "password": {"attrs": {"is_secret_modified": True}, "value": password}, + "password": { + "attrs": {"is_secret_modified": True}, + "value": password if is_compile_secrets() else "", + }, } return auth_dict @@ -17,11 +23,11 @@ def client_certificate(client_certificate="", client_key=""): "type": "client_certificate", "client_certificate": { "attrs": {"is_secret_modified": True}, - "value": client_certificate, + "value": client_certificate if is_compile_secrets() else "", }, "client_key": { "attrs": {"is_secret_modified": True}, - "value": client_key, + "value": client_key if is_compile_secrets() else "", }, } @@ -32,15 +38,15 @@ def ca_certificate(ca_certificate="", client_certificate="", client_key=""): "type": "ca_certificate", "ca_certificate": { "attrs": {"is_secret_modified": True}, - "value": ca_certificate, + "value": ca_certificate if is_compile_secrets() else "", }, "client_certificate": { "attrs": {"is_secret_modified": True}, - "value": client_certificate, + "value": client_certificate if is_compile_secrets() else "", }, "client_key": { "attrs": {"is_secret_modified": True}, - "value": client_key, + "value": client_key if is_compile_secrets() else "", }, } @@ -49,10 +55,13 @@ def ca_certificate(ca_certificate="", client_certificate="", client_key=""): def service_account(ca_certificate="", token=""): auth_dict = { "type": "service_account", - "token": {"attrs": {"is_secret_modified": True}, "value": token}, + "token": { + "attrs": {"is_secret_modified": True}, + "value": token if is_compile_secrets() else "", + }, "ca_certificate": { "attrs": {"is_secret_modified": True}, - "value": ca_certificate, + "value": ca_certificate if is_compile_secrets() else "", }, } diff --git a/calm/dsl/builtins/models/account_resources.py b/calm/dsl/builtins/models/account_resources.py index fd6ea390..ba68cd2e 100644 --- a/calm/dsl/builtins/models/account_resources.py +++ b/calm/dsl/builtins/models/account_resources.py @@ -19,6 +19,7 @@ from calm.dsl.constants import CACHE from calm.dsl.store import Cache from .helper.common import get_provider +from .utils import is_compile_secrets from calm.dsl.log import get_logging_handle @@ -194,7 +195,11 @@ def __new__(cls, provider, parent, variable_dict): LOG.error("{} is a mandatory variable".format(label_dict_key)) sys.exit("Mandatory variable not provided") - auth_schema["value"] = variable_list_values[label_dict_key] + auth_schema["value"] = ( + "" + if auth_schema["type"] == "SECRET" and not is_compile_secrets() + else variable_list_values[label_dict_key] + ) if auth_schema["type"] == "SECRET": auth_schema.pop("attrs") diff --git a/calm/dsl/builtins/models/action.py b/calm/dsl/builtins/models/action.py index 2a7dcfb0..4573e1eb 100644 --- a/calm/dsl/builtins/models/action.py +++ b/calm/dsl/builtins/models/action.py @@ -3,9 +3,9 @@ from .entity import EntityType, Entity from .validator import PropertyValidator -from .task import create_call_rb +from .task import create_call_rb, _get_target_ref from .runbook import runbook, runbook_create - +from calm.dsl.constants import SUBSTRATE from calm.dsl.log import get_logging_handle LOG = get_logging_handle(__name__) @@ -18,7 +18,51 @@ class ActionType(EntityType): __schema_name__ = "Action" __openapi_type__ = "app_action" - def __call__(cls, name=None): + def __call__(cls, name=None, target=None): + """ + This function is called whenever ENTITY_TYPE.function_name() is invoked + and it returns a task of CALL_RUNBOOK type. + + e.g. Service class having name SampleService defines a function "foo" then + each call to SampleService.foo() invokes this function. + """ + + if cls.name in list(SUBSTRATE.VM_POWER_ACTIONS_REV.keys()) and cls.runbook: + if not isinstance(name, str): + LOG.error("{} if not of type string".format(name)) + sys.exit(-1) + + # global import raises ImportError + from .substrate import SubstrateType + + entity = cls.runbook.tasks[0].target_any_local_reference.__self__ + + # guard condition to only allow substrate level vm power actions + if isinstance(entity, SubstrateType): + substrate = entity + vm_power_action = getattr( + substrate, SUBSTRATE.VM_POWER_ACTIONS_REV[cls.name], None + ) + if not vm_power_action: + LOG.error( + "Action {} not implemented in substrate".format( + SUBSTRATE.VM_POWER_ACTIONS_REV[cls.name] + ) + ) + sys.exit(-1) + target_runbook = vm_power_action.runbook + task = create_call_rb(cls.runbook, name=name, target=target_runbook) + task.name = name or task.name + # setting default target to service coupled to a substrate + if not target: + target = substrate.get_service_target() + if not target: + LOG.error("No service found to target") + sys.exit(-1) + + task.target_any_local_reference = _get_target_ref(target) + return task + return create_call_rb(cls.runbook, name=name) if cls.runbook else None def assign_targets(cls, parent_entity): @@ -121,6 +165,10 @@ def __get__(self, instance, cls): elif func_name in FRAGMENT: ACTION_TYPE = "fragment" action_name = FRAGMENT[func_name] + # Prevent accidental modification of target. + # Fragment actions have target ref to it's caller class. + for task in self.user_runbook.tasks: + task.target_any_local_reference = cls.get_task_target() else: # `name` argument is only supported in non-system actions @@ -156,6 +204,22 @@ def __get__(self, instance, cls): _task.exec_target_reference = _task.target_any_local_reference _task.target_any_local_reference = self.task_target + # Import error if imported globally + from .substrate import SubstrateType + + # Case for creating vm power action runbooks for a substrate + if isinstance(cls, SubstrateType) and action_name in list( + SUBSTRATE.VM_POWER_ACTIONS_REV.keys() + ): + if len(self.user_runbook.tasks) > 1: + LOG.error( + "{} can't be overriden in {}".format( + SUBSTRATE.VM_POWER_ACTIONS_REV[action_name], cls.__name__ + ) + ) + sys.exit(-1) + self.user_runbook = cls.create_power_action_runbook(action_name) + # Finally create the action self.user_action = _action_create( **{ diff --git a/calm/dsl/builtins/models/ahv_account.py b/calm/dsl/builtins/models/ahv_account.py index 19ea45ab..277bdfa3 100644 --- a/calm/dsl/builtins/models/ahv_account.py +++ b/calm/dsl/builtins/models/ahv_account.py @@ -3,6 +3,7 @@ from calm.dsl.log import get_logging_handle from .validator import PropertyValidator from calm.dsl.constants import ENTITY +from .utils import is_compile_secrets LOG = get_logging_handle(__name__) @@ -16,8 +17,9 @@ class AhvAccountType(EntityType): def compile(cls): cdict = super().compile() + pswd = cdict.pop("password", "") cdict["password"] = { - "value": cdict.pop("password", ""), + "value": pswd if is_compile_secrets() else "", "attrs": {"is_secret_modified": True}, } diff --git a/calm/dsl/builtins/models/ahv_vm.py b/calm/dsl/builtins/models/ahv_vm.py index 7d258275..f010b613 100644 --- a/calm/dsl/builtins/models/ahv_vm.py +++ b/calm/dsl/builtins/models/ahv_vm.py @@ -50,6 +50,8 @@ def compile(cls): boot_type = cdict.pop("boot_type", None) if boot_type == "UEFI": cdict["boot_config"]["boot_type"] = "UEFI" + elif boot_type == "SECURE_BOOT": + cdict["boot_config"]["boot_type"] = "SECURE_BOOT" if not cdict["boot_config"]: cdict.pop("boot_config", None) diff --git a/calm/dsl/builtins/models/ahv_vm_nic.py b/calm/dsl/builtins/models/ahv_vm_nic.py index 732c6e05..be829650 100644 --- a/calm/dsl/builtins/models/ahv_vm_nic.py +++ b/calm/dsl/builtins/models/ahv_vm_nic.py @@ -136,7 +136,10 @@ def compile(cls): env_accounts = environment_cache_data.get("accounts_data", {}).get( "nutanix_pc", [] ) - if subnet_uuid not in env_accounts.get(account_uuid, []): + env_accounts = {_acc["uuid"]: _acc for _acc in env_accounts} + if subnet_uuid not in env_accounts.get(account_uuid, {}).get( + "subnet_uuids", [] + ): LOG.error( "Subnet {} is not whitelisted in environment {}".format( subnet_name, str(pfl_env) diff --git a/calm/dsl/builtins/models/app_protection.py b/calm/dsl/builtins/models/app_protection.py index 5df328a8..e512aeff 100644 --- a/calm/dsl/builtins/models/app_protection.py +++ b/calm/dsl/builtins/models/app_protection.py @@ -5,6 +5,9 @@ from .config_spec import snapshot_config_create, restore_config_create from .helper import common as common_helper +from .metadata_payload import get_metadata_obj +from calm.dsl.constants import PROVIDER +from calm.dsl.builtins import Ref LOG = get_logging_handle(__name__) @@ -15,60 +18,47 @@ def __new__(cls, *args, **kwargs): class ProtectionPolicy: def __new__(cls, name, **kwargs): - rule_name = kwargs.get("rule_name", None) - rule_uuid = kwargs.get("rule_uuid", None) - project_cache_data = common_helper.get_cur_context_project() - project_name = project_cache_data.get("name") - protection_policy_cache_data = Cache.get_entity_data( - entity_type="app_protection_policy", - name=name, - rule_name=rule_name, - rule_uuid=rule_uuid, - project_name=project_name, - ) - if not protection_policy_cache_data: - LOG.error( - "Protection Policy {} not found. Please run: calm update cache".format( - name - ) - ) - sys.exit("Protection policy {} does not exist".format(name)) - return { - "kind": "app_protection_policy", - "name": protection_policy_cache_data["name"], - "uuid": protection_policy_cache_data["uuid"], - "rule_uuid": protection_policy_cache_data["rule_uuid"], - } + # Capturing metadata object to read project name supplied in blueprint + # Use config project if no project supplied in metadata + metadata = get_metadata_obj() + if metadata.get("project_reference", {}): + project_name = metadata.get("project_reference", {}).get("name") + else: + project_cache_data = common_helper.get_cur_context_project() + project_name = project_cache_data.get("name") + kwargs["project_name"] = project_name + + return Ref.ProtectionPolicy(name, **kwargs) class SnapshotConfig: - def __new__( - cls, - name, - target=None, - num_of_replicas="ONE", - restore_config=None, - policy=None, - description="", - ): - if restore_config: + class Ahv: + def __new__( + cls, + name, + target=None, + num_of_replicas="ONE", + restore_config=None, + policy=None, + description="", + snapshot_location_type="LOCAL", + ): + if not restore_config: + LOG.error("Restore Config reference not supplied.") + sys.exit(-1) + return snapshot_config_create( - name, + name=name, + provider=PROVIDER.TYPE.AHV, target=target, num_of_replicas=num_of_replicas, config_references=[restore_config], policy=policy, description=description, + snapshot_location_type=snapshot_location_type, ) - return snapshot_config_create( - name, - target=target, - num_of_replicas=num_of_replicas, - policy=policy, - description=description, - ) - class CrashConsistent: + class Vmware: def __new__( cls, name, @@ -78,24 +68,74 @@ def __new__( policy=None, description="", ): - if restore_config: - return snapshot_config_create( - name, - target=target, - num_of_replicas=num_of_replicas, - config_references=[restore_config], - policy=policy, - description=description, - ) + if not restore_config: + LOG.error("Restore Config reference not supplied.") + sys.exit(-1) + return snapshot_config_create( - name, + name=name, + provider=PROVIDER.TYPE.VMWARE, target=target, num_of_replicas=num_of_replicas, + config_references=[restore_config], policy=policy, description=description, ) + # AHV snapshot object will be created by default for backward compatibility + def __new__( + cls, + name, + target=None, + num_of_replicas="ONE", + restore_config=None, + policy=None, + description="", + snapshot_location_type="LOCAL", + ): + return cls.Ahv.__new__( + cls, + name=name, + target=target, + num_of_replicas=num_of_replicas, + restore_config=restore_config, + policy=policy, + description=description, + snapshot_location_type=snapshot_location_type, + ) + class RestoreConfig: + class Ahv: + def __new__( + cls, + name, + target=None, + delete_vm_post_restore=False, + description="", + ): + return restore_config_create( + name=name, + provider=PROVIDER.TYPE.AHV, + target=target, + delete_vm_post_restore=delete_vm_post_restore, + description=description, + ) + + class Vmware: + def __new__( + cls, + name, + target=None, + description="", + ): + return restore_config_create( + name=name, + provider=PROVIDER.TYPE.VMWARE, + target=target, + description=description, + ) + + # AHV restore object will be created by default for backward compatibility def __new__( cls, name, @@ -103,8 +143,9 @@ def __new__( delete_vm_post_restore=False, description="", ): - return restore_config_create( - name, + return cls.Ahv.__new__( + cls, + name=name, target=target, delete_vm_post_restore=delete_vm_post_restore, description=description, diff --git a/calm/dsl/builtins/models/aws_account.py b/calm/dsl/builtins/models/aws_account.py index 3e0837f5..4d90b390 100644 --- a/calm/dsl/builtins/models/aws_account.py +++ b/calm/dsl/builtins/models/aws_account.py @@ -3,6 +3,7 @@ from calm.dsl.log import get_logging_handle from .validator import PropertyValidator from calm.dsl.constants import ENTITY +from .utils import is_compile_secrets LOG = get_logging_handle(__name__) @@ -17,7 +18,7 @@ def compile(cls): cdict = super().compile() secret_access_key_value = cdict.pop("secret_access_key", "") cdict["secret_access_key"] = { - "value": secret_access_key_value, + "value": secret_access_key_value if is_compile_secrets() else "", "attrs": {"is_secret_modified": True}, } return cdict diff --git a/calm/dsl/builtins/models/aws_c2s_account.py b/calm/dsl/builtins/models/aws_c2s_account.py index c58ac84d..56b7ebe4 100644 --- a/calm/dsl/builtins/models/aws_c2s_account.py +++ b/calm/dsl/builtins/models/aws_c2s_account.py @@ -3,6 +3,7 @@ from calm.dsl.log import get_logging_handle from .validator import PropertyValidator from calm.dsl.constants import ENTITY +from .utils import is_compile_secrets LOG = get_logging_handle(__name__) @@ -25,7 +26,7 @@ def compile(cls): client_key = cdict.pop("client_key", None) cdict["client_key"] = { "attrs": {"is_secret_modified": True}, - "value": client_key, + "value": client_key if is_compile_secrets() else "", } return cdict diff --git a/calm/dsl/builtins/models/azure_account.py b/calm/dsl/builtins/models/azure_account.py index 7fa7d206..0e363a6d 100644 --- a/calm/dsl/builtins/models/azure_account.py +++ b/calm/dsl/builtins/models/azure_account.py @@ -4,6 +4,7 @@ from calm.dsl.log import get_logging_handle from .validator import PropertyValidator from calm.dsl.constants import ENTITY +from .utils import is_compile_secrets LOG = get_logging_handle(__name__) @@ -19,7 +20,7 @@ def compile(cls): client_key = cdict.pop("client_key", None) cdict["client_key"] = { "attrs": {"is_secret_modified": True}, - "value": client_key, + "value": client_key if is_compile_secrets() else "", } # TODO remove from here diff --git a/calm/dsl/builtins/models/brownfield.py b/calm/dsl/builtins/models/brownfield.py index d9dd0dfa..1fd14977 100644 --- a/calm/dsl/builtins/models/brownfield.py +++ b/calm/dsl/builtins/models/brownfield.py @@ -4,6 +4,7 @@ from .validator import PropertyValidator from .deployment import DeploymentType from .metadata_payload import get_metadata_obj +from distutils.version import LooseVersion as LV from .helper import common as common_helper from calm.dsl.config import get_context @@ -11,6 +12,7 @@ from calm.dsl.api import get_api_client from calm.dsl.constants import CACHE, PROVIDER_ACCOUNT_TYPE_MAP from calm.dsl.log import get_logging_handle +from calm.dsl.store import Version LOG = get_logging_handle(__name__) @@ -303,6 +305,8 @@ def get_vmware_bf_vm_data( ): """Return vmware vm data matched with provided instacne details""" + CALM_VERSION = Version.get_version("Calm") + client = get_api_client() params = { @@ -331,7 +335,12 @@ def get_vmware_bf_vm_data( e_name = e_resources["instance_name"] e_id = e_resources["instance_id"] e_address = e_resources["address"] - e_address_list = e_resources["guest.ipAddress"] + + # fixes: https://github.com/nutanix/calm-dsl/issues/226 (KeyError: 'guest.ipAddress') + if LV(CALM_VERSION) >= LV("3.3.0"): + e_address_list = e_resources["guest_ipaddress"] + else: + e_address_list = e_resources["guest.ipAddress"] if match_vm_data( vm_name=e_name, @@ -448,7 +457,8 @@ def get_profile_environment(cls): environment = {} cls_profile = common_helper._walk_to_parent_with_given_type(cls, "ProfileType") - environment = getattr(cls_profile, "environment", {}) + profile_envs = getattr(cls_profile, "environments", []) + environment = profile_envs[0].get_dict() if profile_envs else dict() if environment: LOG.debug( "Found environment {} associated to app-profile {}".format( diff --git a/calm/dsl/builtins/models/calm_ref.py b/calm/dsl/builtins/models/calm_ref.py index 9833f03f..c22ce1a4 100644 --- a/calm/dsl/builtins/models/calm_ref.py +++ b/calm/dsl/builtins/models/calm_ref.py @@ -666,6 +666,11 @@ def decompile(cls, cdict): """return decompile class""" if cdict.get("uuid"): + if cdict["uuid"].startswith("@@{") and cdict["uuid"].endswith("}@@"): + return cls.__new__( + cls, + name=cdict["uuid"], + ) cache_data = Cache.get_entity_data_using_uuid( entity_type=CACHE.ENTITY.AHV_CLUSTER, uuid=cdict["uuid"] ) @@ -1355,3 +1360,41 @@ def compile(cls, name, **kwargs): "value": kwargs.get("value", ""), } return tag_dict + + class ProtectionPolicy: + def __new__(cls, name, **kwargs): + kwargs["__ref_cls__"] = cls + kwargs["policy_name"] = name + return _calm_ref(**kwargs) + + def compile(cls, **kwargs): + rule_name = kwargs.get("rule_name", None) + rule_uuid = kwargs.get("rule_uuid", None) + name = kwargs.get("policy_name", None) + project_name = kwargs.get("project_name", None) + + if not project_name: + LOG.error("Unable to find project associated to blueprint") + sys.exit("Unable to find project associated to blueprint") + + protection_policy_cache_data = Cache.get_entity_data( + entity_type="app_protection_policy", + name=name, + rule_name=rule_name, + rule_uuid=rule_uuid, + project_name=project_name, + ) + + if not protection_policy_cache_data: + LOG.error( + "Protection Policy {} not found. Please run: calm update cache".format( + name + ) + ) + sys.exit("Protection policy {} does not exist".format(name)) + return { + "kind": "app_protection_policy", + "name": protection_policy_cache_data["name"], + "uuid": protection_policy_cache_data["uuid"], + "rule_uuid": protection_policy_cache_data["rule_uuid"], + } diff --git a/calm/dsl/builtins/models/config_attrs.py b/calm/dsl/builtins/models/config_attrs.py index ab57a85a..12847277 100644 --- a/calm/dsl/builtins/models/config_attrs.py +++ b/calm/dsl/builtins/models/config_attrs.py @@ -8,6 +8,8 @@ from .action import action, _action_create from .runbook import runbook_create from calm.dsl.log import get_logging_handle +from calm.dsl.builtins.models.ahv_vm_disk import AhvDiskType +from calm.dsl.builtins.models.ahv_vm_nic import AhvNicType LOG = get_logging_handle(__name__) @@ -17,6 +19,49 @@ class AhvDiskRuleset(EntityType): __schema_name__ = "AhvDiskRuleset" __openapi_type__ = "ahv_disk_rule" + @classmethod + def decompile(mcls, cdict, context=[], prefix=""): + disk_operation = cdict.pop("operation", "") + index = cdict.pop("index", "") + disk_details = cdict.get("disk_size_mib", {}) + operation = "" + editable = False + disk_size = "" + max_value = "" + min_value = "" + value = "" + + if disk_details: + editable = disk_details.pop("editable", False) + operation = disk_details.pop("operation", "") + max_value = disk_details.pop("max_value", "") + min_value = disk_details.pop("min_value", "") + # creating valid disk size dictionary + disk_size = disk_details.get("value", "") + + if disk_size: + cdict["disk_size_mib"] = int(disk_size) + value = str(disk_size) if disk_size else "" + + disk_value = AhvDiskType.decompile(cdict, context=context, prefix=prefix) + + kwargs = { + "disk_operation": disk_operation, + "operation": operation, + "editable": editable, + "disk_value": disk_value, + } + if max_value: + kwargs["max_value"] = max_value + if min_value: + kwargs["min_value"] = min_value + if value: + kwargs["value"] = value + if index: + kwargs["index"] = index + + return mcls(None, (Entity,), kwargs) + class AhvDiskRulesetValidator(PropertyValidator, openapi_type="ahv_disk_rule"): __default__ = None @@ -36,6 +81,24 @@ class AhvNicRuleset(EntityType): __schema_name__ = "AhvNicRuleset" __openapi_type__ = "ahv_nic_rule" + @classmethod + def decompile(mcls, cdict, context=[], prefix=""): + + operation = cdict.pop("operation", "") + editable = cdict.pop("editable", False) + value = cdict.pop("value", "") + index = cdict.pop("identifier", "") + + nic_value = AhvNicType.decompile(cdict, context=context, prefix=prefix) + + kwargs = {"operation": operation, "editable": editable, "nic_value": nic_value} + if value: + kwargs["value"] = value + if index: + kwargs["index"] = index + + return mcls(None, (Entity,), kwargs) + class AhvNicRulesetValidator(PropertyValidator, openapi_type="ahv_nic_rule"): __default__ = None @@ -74,6 +137,10 @@ class ConfigAttrs(EntityType): __schema_name__ = "ConfigAttrs" __openapi_type__ = "config_attrs" + def get_config_actions(cls): + cdict = super().compile() + return cdict["action_list"] + class ConfigAttrsValidator(PropertyValidator, openapi_type="config_attrs"): __default__ = None diff --git a/calm/dsl/builtins/models/config_spec.py b/calm/dsl/builtins/models/config_spec.py index f61c49dc..29141fee 100644 --- a/calm/dsl/builtins/models/config_spec.py +++ b/calm/dsl/builtins/models/config_spec.py @@ -12,6 +12,12 @@ from calm.dsl.api import get_resource_api, get_api_client from calm.dsl.config import get_context from calm.dsl.log import get_logging_handle +from .runbook import runbook_create +from .action import _action_create +from calm.dsl.builtins import get_valid_identifier +from calm.dsl.constants import PROVIDER +from calm.dsl.store import Cache +from calm.dsl.builtins.models.config_attrs import ahv_disk_ruleset, ahv_nic_ruleset LOG = get_logging_handle(__name__) @@ -30,6 +36,16 @@ class ConfigSpecType(EntityType): __schema_name__ = "ConfigSpec" __openapi_type__ = "app_config_spec" + @classmethod + def pre_decompile(mcls, cdict, context, prefix=""): + cdict = super().pre_decompile(cdict, context, prefix=prefix) + + # Calling pre_decompile of PatchConfigSpecType for patch configs + if cdict.get("type", "") == "PATCH": + cdict = PatchConfigSpecType.pre_decompile(cdict, context, prefix=prefix) + + return cdict + def get_ref(cls, kind=None): """Note: app_blueprint_deployment kind to be used for pod deployment""" return super().get_ref(kind=ConfigSpecType.__openapi_type__) @@ -40,14 +56,29 @@ def compile(cls): cdict.pop("patch_attrs", None) return cdict attrs = cdict.pop("patch_attrs")[0] + target = cdict["attrs_list"][0]["target_any_local_reference"] + + resource_categories = ( + target.__self__.substrate.__self__.provider_spec.categories + ) categories_data = [] categories = attrs.categories + + # attaching pre defined categories if no categories are supplied. + if not categories: + for key, value in resource_categories.items(): + val = {} + val["operation"] = "modify" + val["value"] = "{}:{}".format(key, value) + categories_data.append(val) + for op_category in categories: for op in op_category["val"]: val = {} val["operation"] = op_category["operation"] val["value"] = op categories_data.append(val) + memory = attrs.memory if memory.min_value: memory.min_value = memory.min_value * 1024 @@ -55,9 +86,27 @@ def compile(cls): memory.max_value = memory.max_value * 1024 if memory.value: memory.value = str(int(float(memory.value) * 1024)) - target = cdict["attrs_list"][0]["target_any_local_reference"] + + resource_disks = ( + target.__self__.substrate.__self__.provider_spec.resources.disks + ) disk_data = [] disks = attrs.disks + + # attaching pre defined resource disks if no disks are supplied. + if not disks: + for idx, _disk in enumerate(resource_disks): + _disk = _disk.compile() + kwargs = { + "disk_operation": "modify", + "operation": "", + "index": idx, + "editable": False, + } + _value = _disk.get("disk_size_mib", 0) + kwargs["value"] = str(_value // 1024) + disks.append(ahv_disk_ruleset(**kwargs)) + adapter_name_index_map = {} for disk in disks: if disk.disk_operation in ["delete", "modify"]: @@ -97,8 +146,17 @@ def compile(cls): val["disk_size_mib"]["max_value"] = disk.max_value * 1024 val.pop("bootable", None) disk_data.append(val) + + resource_nics = target.__self__.substrate.__self__.provider_spec.resources.nics nic_data = [] nics = attrs.nics + + # attaching pre defined nics if no nics are supplied. + if not nics: + for idx, _ in enumerate(resource_nics): + kwargs = {"operation": "modify", "index": str(idx), "editable": False} + nics.append(ahv_nic_ruleset(**kwargs)) + counter = 1 for nic in nics: if nic.operation in ["delete", "modify"]: @@ -127,16 +185,193 @@ def compile(cls): "pre_defined_nic_list": nic_data, "pre_defined_categories": categories_data, } + + # Setting actions attribute to PatchConfigSpecType to compile actions + if isinstance(cls, PatchConfigSpecType): + actions = attrs.get_config_actions() + cls.set_actions(actions) + cdict["attrs_list"][0]["data"] = data return cdict + def post_compile(cls, cdict): + cdict = super().post_compile(cdict) + + # Remove policy_reference after compiling as it is invalid in blueprint payload + cdict.pop("policy_reference", "") + return cdict + class PatchConfigSpecType(ConfigSpecType): - pass + @classmethod + def pre_decompile(mcls, cdict, context, prefix=""): + patch_attr_data = cdict["attrs_list"][0]["data"] + actions = [] + patch_action_data = cdict.get("runbook", {}) + if patch_action_data: + patch_action_data.pop("uuid", "") + patch_action_data.get("main_task_local_reference", {}).pop("uuid", "") + task_definition_list = patch_action_data.get("task_definition_list", []) + for tdl in task_definition_list: + # Removing additional attributes + tdl.pop("uuid", "") + tdl.get("target_any_local_reference", {}).pop("uuid", "") + for child in tdl.get("child_tasks_local_reference_list", []): + child.pop("uuid", "") + for edge in tdl.get("attrs", {}).get("edges", []): + edge.get("from_task_reference", {}).pop("uuid", "") + edge.get("to_task_reference", {}).pop("uuid", "") + + action_name = "custom_app_edit_action" + if len(task_definition_list) > 1: + action_name = task_definition_list[1].get("name", action_name) + + actions = [ + _action_create( + **{ + "name": get_valid_identifier(action_name), + "description": "", + "critical": True, + "type": "user", + "runbook": patch_action_data, + } + ).get_dict() + ] + + for idx, _disk in enumerate(patch_attr_data.get("pre_defined_disk_list", [])): + _disk["index"] = idx + + kwargs = { + "nic_delete": patch_attr_data.get("nic_delete_allowed", False), + "categories_delete": patch_attr_data.get( + "categories_delete_allowed", False + ), + "categories_add": patch_attr_data.get("categories_add_allowed", False), + "disk_delete": patch_attr_data.get("disk_delete_allowed", False), + "disks": patch_attr_data.get("pre_defined_disk_list", []), + "nics": patch_attr_data.get("pre_defined_nic_list", []), + "categories": patch_attr_data.get("pre_defined_categories", []), + "numsocket": patch_attr_data.get("num_sockets_ruleset", {}), + "memory": patch_attr_data.get("memory_size_mib_ruleset", {}), + "vcpu": patch_attr_data.get("num_vcpus_per_socket_ruleset", {}), + "action_list": actions, + } + + cdict["patch_attrs"] = [kwargs] + cdict.pop("runbook", "") + return cdict + + def compile(cls): + cdict = super().compile() + + attrs = getattr(cls, "attrs_list")[0] if getattr(cls, "attrs_list") else None + try: + target_deployment = attrs["target_any_local_reference"] + services_ref = target_deployment.__self__.get_service_ref() + except: + LOG.error("No deployment target set for patch config") + sys.exit(-1) + + if not services_ref: + LOG.error("No service to target patch config actions.") + sys.exit(-1) + + # there will be a single action in the patch_config + actions = getattr(cls, "action_list", []) + if len(actions) > 1: + LOG.error("Single action is allowed at patch_config") + sys.exit(-1) + + if actions: + actions = actions[0].get_dict() + config_runbook = actions["runbook"] + for tdl in config_runbook["task_definition_list"]: + tdl["uuid"] = str(uuid.uuid4()) + tdl["target_any_local_reference"] = services_ref.get_dict() + + cdict["runbook"] = config_runbook + + return cdict + + @classmethod + def set_actions(cls, actions): + setattr(cls, "action_list", actions) class SnapshotConfigSpecType(ConfigSpecType): - pass + def compile(cls): + cdict = super().compile() + rule_ref = {} + snapshot_location_type = None + policy = None + policy_ref = getattr(cls, "policy") + if policy_ref: + policy = policy_ref.compile() + if policy: + rule = policy.pop("rule_uuid", None) + + # Reading protection policy data from cache, required if no rule is present in above policy reference + protection_policy_cache_data = Cache.get_entity_data_using_uuid( + entity_type="app_protection_policy", + uuid=policy.get("uuid"), + ) + + if not protection_policy_cache_data: + LOG.error( + "Protection Policy {} not found. Please run: calm update cache".format( + policy.get("name") + ) + ) + sys.exit( + "Protection policy {} does not exist".format(policy.get("name")) + ) + + protection_rule_list = [] + + for _policy in protection_policy_cache_data: + protection_rule_list.append( + { + "uuid": _policy["rule_uuid"], + "name": _policy["rule_name"], + "rule_type": _policy.get("rule_type", ""), + } + ) + + rule_ref["kind"] = "app_protection_rule" + + # if no rule is given, pick first rule specified in policy + if not rule: + if protection_rule_list and isinstance(protection_rule_list, list): + rule_ref["uuid"] = protection_rule_list[0]["uuid"] + rule_ref["name"] = protection_rule_list[0]["name"] + if protection_rule_list["rule_type"] == "Remote": + snapshot_location_type = "REMOTE" + else: + for pr in protection_rule_list: + if pr.get("uuid") == rule: + rule_ref["uuid"] = rule + rule_ref["name"] = pr.get("name") + if pr.get("rule_type", "") == "Remote": + snapshot_location_type = "REMOTE" + + if "uuid" not in rule_ref: + LOG.error( + "No Protection Rule {} found under Protection Policy {}".format( + rule, policy["name"] + ) + ) + sys.exit("Invalid protection rule") + + if ( + cdict["attrs_list"][0].get("snapshot_location_type") + and snapshot_location_type + ): + cdict["attrs_list"][0]["snapshot_location_type"] = snapshot_location_type + if policy: + cdict["attrs_list"][0]["app_protection_policy_reference"] = policy + cdict["attrs_list"][0]["app_protection_rule_reference"] = rule_ref + + return cdict class RestoreConfigSpecType(ConfigSpecType): @@ -190,8 +425,54 @@ def patch_config_create( return _config_create(config_type="patch", **kwargs) +def _update_ahv_snapshot_config(attrs, snapshot_location_type, **kwargs): + attrs["snapshot_location_type"] = snapshot_location_type + + +def _update_vmw_snapshot_config(attrs, **kwargs): + attrs["snapshot_name"] = "" + attrs["vm_memory_snapshot_enabled"] = "" + attrs["snapshot_description"] = "" + attrs["snapshot_quiesce_enabled"] = "" + + snapshot_description = CalmVariable.Simple( + "", name="snapshot_description", runtime=True, is_mandatory=True + ) + vm_memory_snapshot_enabled = CalmVariable.Simple( + "false", name="vm_memory_snapshot_enabled", runtime=True, is_mandatory=True + ) + snapshot_quiesce_enabled = CalmVariable.Simple( + "false", name="snapshot_quiesce_enabled", runtime=True, is_mandatory=True + ) + updated_variables = [ + snapshot_description, + vm_memory_snapshot_enabled, + snapshot_quiesce_enabled, + ] + kwargs["variables"].extend(updated_variables) + + +def _update_ahv_restore_config( + attrs, snapshot_location_type, delete_vm_post_restore, **kwargs +): + attrs["delete_vm_post_restore"] = delete_vm_post_restore + attrs["snapshot_location_type"] = snapshot_location_type + delete_vm_post_restore = CalmVariable.Simple( + str(delete_vm_post_restore).lower(), + name="delete_vm_post_restore", + runtime=True, + is_mandatory=True, + ) + kwargs["variables"].append(delete_vm_post_restore) + + +def _update_vmw_restore_config(attrs, **kwargs): + attrs["suppress_power_on"] = "true" + + def snapshot_config_create( name, + provider, target=None, snapshot_type="CRASH_CONSISTENT", num_of_replicas="ONE", @@ -200,57 +481,19 @@ def snapshot_config_create( policy=None, description="", ): - rule_ref = {} - if policy: - rule = policy.pop("rule_uuid", None) - client = get_api_client() - res, err = client.app_protection_policy.read(id=policy.get("uuid")) - if err: - LOG.error("[{}] - {}".format(err["code"], err["error"])) - sys.exit("Unable to retrieve protection policy details") - - res = res.json() - protection_rule_list = res["status"]["resources"]["app_protection_rule_list"] - - rule_ref["kind"] = "app_protection_rule" - if not rule: - if protection_rule_list and isinstance(protection_rule_list, list): - rule_ref["uuid"] = protection_rule_list[0]["uuid"] - rule_ref["name"] = protection_rule_list[0]["name"] - if protection_rule_list[0].get( - "remote_snapshot_retention_policy", None - ): - snapshot_location_type = "REMOTE" - else: - for pr in protection_rule_list: - if pr.get("uuid") == rule: - rule_ref["uuid"] = rule - rule_ref["name"] = pr.get("name") - if pr.get("remote_snapshot_retention_policy", None): - snapshot_location_type = "REMOTE" - - if "uuid" not in rule_ref: - LOG.error( - "No Protection Rule {} found under Protection Policy {}".format( - rule, res["metadata"]["name"] - ) - ) - sys.exit("Invalid protection rule") - + # Only AHV support snapshot location. VMWARE doesn't support snapshot location + # therefore not setting snapshot location in config reference for VMWARE if config_references: - for config_ref in config_references: - config_ref.__self__.attrs_list[0][ - "snapshot_location_type" - ] = snapshot_location_type + if provider == PROVIDER.TYPE.AHV: + for config_ref in config_references: + config_ref.__self__.attrs_list[0][ + "snapshot_location_type" + ] = snapshot_location_type attrs = { "target_any_local_reference": target, - "snapshot_location_type": snapshot_location_type, "num_of_replicas": num_of_replicas, } - if policy: - attrs["app_protection_policy_reference"] = policy - attrs["app_protection_rule_reference"] = rule_ref snapshot_name = CalmVariable.Simple( name, name="snapshot_name", runtime=True, is_mandatory=True ) @@ -264,12 +507,20 @@ def snapshot_config_create( "type": "", # Set at profile level during compile "variables": [snapshot_name, snapshot_type], "config_references": config_references, + "policy": policy, } + + if provider == PROVIDER.TYPE.AHV: + _update_ahv_snapshot_config(attrs, snapshot_location_type) + elif provider == PROVIDER.TYPE.VMWARE: + _update_vmw_snapshot_config(attrs, **kwargs) + return _config_create(config_type="snapshot", **kwargs) def restore_config_create( name, + provider, target, snapshot_location_type="LOCAL", delete_vm_post_restore=False, @@ -277,23 +528,25 @@ def restore_config_create( ): attrs = { "target_any_local_reference": target, - "delete_vm_post_restore": delete_vm_post_restore, - "snapshot_location_type": snapshot_location_type, } + snapshot_uuids = CalmVariable.Simple( "", name="snapshot_uuids", runtime=True, is_mandatory=True ) - delete_vm_post_restore = CalmVariable.Simple( - str(delete_vm_post_restore).lower(), - name="delete_vm_post_restore", - runtime=True, - is_mandatory=True, - ) + kwargs = { "name": name, "description": description, "attrs_list": [attrs], "type": "", # Set at profile level based on target - "variables": [snapshot_uuids, delete_vm_post_restore], + "variables": [snapshot_uuids], } + + if provider == PROVIDER.TYPE.AHV: + _update_ahv_restore_config( + attrs, snapshot_location_type, delete_vm_post_restore, **kwargs + ) + elif provider == PROVIDER.TYPE.VMWARE: + _update_vmw_restore_config(attrs, **kwargs) + return _config_create(config_type="restore", **kwargs) diff --git a/calm/dsl/builtins/models/credential.py b/calm/dsl/builtins/models/credential.py index 9baf0cc4..c181191a 100644 --- a/calm/dsl/builtins/models/credential.py +++ b/calm/dsl/builtins/models/credential.py @@ -10,6 +10,7 @@ from calm.dsl.api.handle import get_api_client from calm.dsl.log import get_logging_handle from calm.dsl.store import Version +from calm.dsl.builtins.models.utils import is_compile_secrets LOG = get_logging_handle(__name__) @@ -56,7 +57,10 @@ def basic_cred( if filename: password = read_file(filename, depth=2) - secret = {"attrs": {"is_secret_modified": True}, "value": password} + secret = { + "attrs": {"is_secret_modified": True}, + "value": password if is_compile_secrets() else "", + } kwargs = {} kwargs["type"] = type @@ -80,7 +84,11 @@ def secret_cred( ): # This secret value will be replaced when user is creatring a blueprint - secret = {"attrs": {"is_secret_modified": True}, "value": "", "secret": secret} + secret = { + "attrs": {"is_secret_modified": True}, + "value": "", + "secret": secret if is_compile_secrets() else "", + } kwargs = {} kwargs["type"] = type @@ -123,16 +131,21 @@ def dynamic_cred( resource_type = Ref.Resource_Type(account.name) if variable_dict: - resource_type_uuid = resource_type.compile()["uuid"] - res, err = client.resource_types.read(id=resource_type_uuid) + account_uuid = account.compile()["uuid"] + res, err = client.account.resource_types_list(account_uuid) if err: LOG.error(err) sys.exit(-1) - resource_type_payload = res.json() + resource_type_list = res.json().get("entities", []) + if len(resource_type_list) == 0: + LOG.error("No resource types found in account") + sys.exit("No resource types found in account") + + resource_type_payload = resource_type_list[0] cred_attr_list = ( - resource_type_payload.get("spec", {}) + resource_type_payload.get("status", {}) .get("resources", {}) .get("schema_list", {}) ) @@ -149,6 +162,9 @@ def dynamic_cred( cred_attr_copy["value"] = variable_dict.pop(var_name) cred_attr_copy.pop("uuid", None) + cred_attr_copy.pop("message_list", None) + cred_attr_copy.pop("state", None) + cred_attr_copy.get("attrs", None).pop("secret_reference", None) variable_list.append(cred_attr_copy) if variable_dict: diff --git a/calm/dsl/builtins/models/deployment.py b/calm/dsl/builtins/models/deployment.py index 5d2471ec..dfddee65 100644 --- a/calm/dsl/builtins/models/deployment.py +++ b/calm/dsl/builtins/models/deployment.py @@ -36,6 +36,18 @@ def decompile(mcls, cdict, context=[], prefix=""): return super().decompile(cdict, context=context, prefix=prefix) + def get_service_ref(cls): + """get service class reference from deployment""" + + package_ref = getattr(cls, "packages") + package_cls = [] + for package in package_ref: + package_cls.append(package.__self__) + if package_cls: + # Target for package is service, retrieving service ref for a package + return package_cls[0].get_task_target() + return None + class DeploymentValidator(PropertyValidator, openapi_type="app_blueprint_deployment"): __default__ = None diff --git a/calm/dsl/builtins/models/endpoint.py b/calm/dsl/builtins/models/endpoint.py index cb46a656..b4951d64 100644 --- a/calm/dsl/builtins/models/endpoint.py +++ b/calm/dsl/builtins/models/endpoint.py @@ -6,6 +6,7 @@ from .entity import EntityType, Entity, EntityTypeBase from .validator import DictValidator, PropertyValidator from .credential import CredentialType +from .utils import is_compile_secrets from calm.dsl.store import Cache from calm.dsl.constants import CACHE from calm.dsl.store import Version @@ -280,7 +281,10 @@ def windows_endpoint_vm( def _basic_auth(username, password): - secret = {"attrs": {"is_secret_modified": True}, "value": password} + secret = { + "attrs": {"is_secret_modified": True}, + "value": password if is_compile_secrets() else "", + } auth = {} auth["type"] = "basic" auth["username"] = username diff --git a/calm/dsl/builtins/models/entity.py b/calm/dsl/builtins/models/entity.py index 9ed0373f..02cb416c 100644 --- a/calm/dsl/builtins/models/entity.py +++ b/calm/dsl/builtins/models/entity.py @@ -404,6 +404,12 @@ def pre_compile(cls): update_dsl_metadata_map(entity_type, entity_name=ui_name, entity_obj=entity_obj) + def get_gui_name(cls): + """ + Gives the ui name for entity class + """ + return getattr(cls, "name", "") or cls.__name__ + def compile(cls): cls.pre_compile() diff --git a/calm/dsl/builtins/models/gcp_account.py b/calm/dsl/builtins/models/gcp_account.py index e3b6660e..02238932 100644 --- a/calm/dsl/builtins/models/gcp_account.py +++ b/calm/dsl/builtins/models/gcp_account.py @@ -3,6 +3,8 @@ from calm.dsl.log import get_logging_handle from .validator import PropertyValidator from calm.dsl.constants import ENTITY +from .utils import is_compile_secrets + LOG = get_logging_handle(__name__) @@ -19,7 +21,7 @@ def compile(cls): private_key = cdict.pop("private_key", None) cdict["private_key"] = { "attrs": {"is_secret_modified": True}, - "value": private_key, + "value": private_key if is_compile_secrets() else "", } region_list = cdict.pop("regions", None) diff --git a/calm/dsl/cli/quotas.py b/calm/dsl/builtins/models/helper/quotas.py similarity index 84% rename from calm/dsl/cli/quotas.py rename to calm/dsl/builtins/models/helper/quotas.py index 6c523b31..d678fc49 100644 --- a/calm/dsl/cli/quotas.py +++ b/calm/dsl/builtins/models/helper/quotas.py @@ -72,12 +72,12 @@ def _set_quota_state(client, state, quota_entities): """ spec = generate_quota_state_spec(state=state, quota_entities=quota_entities) - LOG.info("Spec sent for set quota state: {}".format(spec)) + LOG.debug("Spec sent for set quota state: {}".format(spec)) try: res, err = client.quotas.update_state(payload=spec) res = res.json() - LOG.info("Response from function call {}".format(res)) + LOG.debug("Response from function call {}".format(res)) if ( isinstance(res, dict) @@ -118,12 +118,12 @@ def _get_quota(client, quota_entities): """ spec = generate_get_quota_spec(quota_entities=quota_entities) - LOG.info("Spec sent for get quota: {}".format(spec)) + LOG.debug("Spec sent for get quota: {}".format(spec)) try: res, err = client.quotas.list(payload=spec) res = res.json() - LOG.info("Response from function call {}".format(res)) + LOG.debug("Response from function call {}".format(res)) if isinstance(res, dict): return res, None @@ -150,12 +150,12 @@ def _get_quota_uuid(res): res["entities"][0].get("status", {}).get("resources", {}).get("uuid") ) - LOG.info("Quota Uuid {} ".format(quota_uuid)) + LOG.debug("Quota Uuid {} ".format(quota_uuid)) if quota_uuid: return quota_uuid, None else: - LOG.info("Quota is not set: {0}".format(res)) + LOG.debug("Quota is not set: {0}".format(res)) return None, "Quota is not set: {0}".format(res) @@ -252,11 +252,11 @@ def _create_quota(client, quota, project_uuid, quota_entities, quota_uuid=None): ) try: - LOG.info("Spec sent for creating quota: {}".format(spec)) + LOG.debug("Spec sent for creating quota: {}".format(spec)) res, err = client.quotas.create(payload=spec) res = res.json() - LOG.info("Response from function call {}".format(res)) + LOG.debug("Response from function call {}".format(res)) if isinstance(res, dict) and res.get("status", {}).get("state") == "SUCCESS": return res, None @@ -349,11 +349,11 @@ def _set_quota(client, quota, project_uuid, quota_uuid, quota_entities): quota_entities=quota_entities, ) try: - LOG.info("Spec sent for set quota: {}".format(spec)) + LOG.debug("Spec sent for set quota: {}".format(spec)) res, err = client.quotas.update(payload=spec, quota_uuid=quota_uuid) - LOG.info("Response from function call {}".format(res)) + LOG.debug("Response from function call {}".format(res)) if not err: return res, None @@ -398,3 +398,46 @@ def set_quota_at_project(client, quota, project_uuid, quota_uuid, quota_entities quota=quota, quota_entities=quota_entities, ) + + +def read_quota_resources(client, project_name, quota_entities): + """ + This routine returns quota resources at project level + Args: + project_name(str): Valid project name + quota_entities(dict): Contains quota entity (project) + Returns: + quota_resources(dict): Details of quota, if fetched, or empty dictionary + """ + quota_resources = {} + quota_uuid, _ = get_quota_uuid_at_project( + client=client, quota_entities=quota_entities + ) + if not quota_uuid: + LOG.info( + "Quota is either disabled or not set for project {}".format(project_name) + ) + else: + res, err = _get_quota(client=client, quota_entities=quota_entities) + LOG.debug("Quota API response: res:{}, err:{}".format(res, err)) + if res and res.get("entities") and len(res["entities"]): + state = ( + res["entities"][0] + .get("status", {}) + .get("resources", {}) + .get("state", {}) + ) + + # Returning empty quota_resources if quota is disabled + if state == QUOTA.STATE.DISABLED: + LOG.info("Quota is disabled for project {}".format(project_name)) + else: + quota_resources = ( + res["entities"][0] + .get("status", {}) + .get("resources", {}) + .get("data", {}) + ) + LOG.debug("Quota resources read: {} ".format(quota_resources)) + + return quota_resources diff --git a/calm/dsl/builtins/models/job.py b/calm/dsl/builtins/models/job.py index a53ad0c2..9b96118b 100644 --- a/calm/dsl/builtins/models/job.py +++ b/calm/dsl/builtins/models/job.py @@ -135,10 +135,11 @@ def _create_recurring_job_schedule_payload( payload = { "schedule": schedule, - "expiry_time": str(expiry_time), "start_time": str(start_time), "time_zone": str(time_zone), } + if expiry_time: + payload["expiry_time"] = str(expiry_time) return _jobschedule_payload(**payload) @@ -394,7 +395,12 @@ def set_one_time_schedule_info(start_time, time_zone="UTC"): return _create_one_time_job_schedule_payload(seconds_since_epoch, time_zone) -def set_recurring_schedule_info(schedule, start_time, expiry_time, time_zone="UTC"): +def set_recurring_schedule_info( + schedule, start_time, expiry_time=None, time_zone="UTC" +): + """ + To set no expiration for recurring job skip passing expiry_time parameter. + """ # Get User timezone user_tz = ZoneInfo(time_zone) # Convert datetime string to datetime object @@ -412,17 +418,20 @@ def set_recurring_schedule_info(schedule, start_time, expiry_time, time_zone="UT # Convert to Epoch seconds_since_epoch_start_time = int(datetime_obj_with_tz.timestamp()) - datetime_obj = datetime.strptime(expiry_time, "%Y-%m-%d %H:%M:%S") - datetime_obj_with_tz = datetime( - datetime_obj.year, - datetime_obj.month, - datetime_obj.day, - datetime_obj.hour, - datetime_obj.minute, - datetime_obj.second, - tzinfo=user_tz, - ) - seconds_since_epoch_expiry_time = int(datetime_obj_with_tz.timestamp()) + if expiry_time: + datetime_obj = datetime.strptime(expiry_time, "%Y-%m-%d %H:%M:%S") + datetime_obj_with_tz = datetime( + datetime_obj.year, + datetime_obj.month, + datetime_obj.day, + datetime_obj.hour, + datetime_obj.minute, + datetime_obj.second, + tzinfo=user_tz, + ) + seconds_since_epoch_expiry_time = int(datetime_obj_with_tz.timestamp()) + else: + seconds_since_epoch_expiry_time = None return _create_recurring_job_schedule_payload( schedule, diff --git a/calm/dsl/builtins/models/profile.py b/calm/dsl/builtins/models/profile.py index 6bcfc4f9..6eee28aa 100644 --- a/calm/dsl/builtins/models/profile.py +++ b/calm/dsl/builtins/models/profile.py @@ -10,6 +10,7 @@ from .config_spec import SnapshotConfigSpecType, RestoreConfigSpecType from calm.dsl.log import get_logging_handle from .config_spec import PatchConfigSpecType +from calm.dsl.constants import PROVIDER, CONFIG_TYPE LOG = get_logging_handle(__name__) @@ -41,11 +42,11 @@ def pre_decompile(mcls, cdict, context, prefix=""): # Dont support decompilation for other providers configs = {"snapshot_config_list": [], "restore_config_list": []} for _config in cdict.get("snapshot_config_list", []): - if _config.get("type") == "AHV_SNAPSHOT": + if _config.get("type") in CONFIG_TYPE.SNAPSHOT.TYPE: configs["snapshot_config_list"].append(_config) for _config in cdict.get("restore_config_list", []): - if _config.get("type") == "AHV_RESTORE": + if _config.get("type") in CONFIG_TYPE.RESTORE.TYPE: configs["restore_config_list"].append(_config) cdict.update(configs) @@ -57,11 +58,7 @@ def compile(cls): # description attribute in profile gives bp launch error: https://jira.nutanix.com/browse/CALM-19380 cdict.pop("description", None) - config_type_map = { - "restore": "AHV_RESTORE", - "snapshot": "AHV_SNAPSHOT", - "patch": "PATCH", - } + config_type_map = CONFIG_TYPE.CONFIG_TYPE_MAP config_action_prefix_map = {"restore": "Restore_", "snapshot": "Snapshot_"} action_names = list(map(lambda x: x.name, cdict["action_list"])) @@ -91,7 +88,13 @@ def set_config_type_based_on_target(config, config_type): cdict["deployment_create_list"][0] ) deployment = config.attrs_list[0]["target_any_local_reference"].__self__ - if deployment.substrate.__self__.provider_type == "AHV_VM": + if deployment.substrate.__self__.provider_type == PROVIDER.TYPE.AHV: + if config_type != "patch": + config_type = PROVIDER.TYPE.AHV + "_" + config_type + config.type = config_type_map[config_type] + elif deployment.substrate.__self__.provider_type == PROVIDER.TYPE.VMWARE: + if config_type != "patch": + config_type = PROVIDER.TYPE.VMWARE + "_" + config_type config.type = config_type_map[config_type] else: LOG.error( diff --git a/calm/dsl/builtins/models/project.py b/calm/dsl/builtins/models/project.py index 3c9fee89..79b5c8db 100644 --- a/calm/dsl/builtins/models/project.py +++ b/calm/dsl/builtins/models/project.py @@ -3,8 +3,20 @@ from calm.dsl.providers.base import get_provider from .entity import EntityType + +from calm.dsl.api import get_api_client + from calm.dsl.log import get_logging_handle -from calm.dsl.store import Version +from calm.dsl.store import Version, Cache + +from calm.dsl.builtins.models.providers import Provider +from calm.dsl.builtins.models.calm_ref import Ref + +from calm.dsl.constants import CACHE, ACCOUNT + +from calm.dsl.providers import get_provider + +from calm.dsl.builtins.models.helper.quotas import _get_quota LOG = get_logging_handle(__name__) @@ -17,6 +29,7 @@ class ProjectType(EntityType): __openapi_type__ = "project" def compile(cls): + cdict = super().compile() cdict["account_reference_list"] = [] @@ -170,6 +183,115 @@ def compile(cls): cdict.pop("default_subnet_reference", None) return cdict + @classmethod + def pre_decompile(mcls, cdict, context, prefix=""): + cdict = super().pre_decompile(cdict, context, prefix=prefix) + + project_uuid = cdict["metadata"]["uuid"] + cdict = cdict["status"]["resources"] + + if "__name__" in cdict: + cdict["__name__"] = "{}{}".format(prefix, cdict["__name__"]) + + accs = cdict["account_reference_list"] + + subnet_cache_data = [ + Cache.get_entity_data_using_uuid(CACHE.ENTITY.AHV_SUBNET, sub["uuid"]) + for sub in cdict["subnet_reference_list"] + ] + + cluster_cache_data = [ + Cache.get_entity_data_using_uuid(CACHE.ENTITY.AHV_CLUSTER, cluster["uuid"]) + for cluster in cdict["cluster_reference_list"] + ] + + vpcs_cache_data = [ + Cache.get_entity_data_using_uuid(CACHE.ENTITY.AHV_VPC, vpc["uuid"]) + for vpc in cdict["vpc_reference_list"] + ] + + quota_vcpus = 0 + quota_disk = 0 + quota_memory = 0 + + _quotas = None + + providers_list = [] + for _acc in accs: + _provider_data = {} + account_cache_data = Cache.get_entity_data_using_uuid( + entity_type=CACHE.ENTITY.ACCOUNT, uuid=_acc["uuid"] + ) + + _provider_data["account_reference"] = { + "kind": "account", + "uuid": account_cache_data["uuid"], + } + + _provider_data["type"] = account_cache_data["provider_type"] + + subnets = [] + cluster_uuids = [] + cluster_names = [] + for subnet in subnet_cache_data: + if subnet["account_uuid"] == account_cache_data["uuid"]: + _subnet = {"kind": "subnet", "uuid": subnet["uuid"]} + subnets.append(_subnet) + cluster_uuids.append(subnet.get("cluster_uuid", "")) + + _provider_data["subnet_references"] = subnets + + if ( + account_cache_data["provider_type"] == ACCOUNT.TYPE.VMWARE + or account_cache_data["provider_type"] == ACCOUNT.TYPE.AHV + ): + + if account_cache_data["provider_type"] == ACCOUNT.TYPE.VMWARE: + vmware_provider = get_provider("VMWARE_VM") + vmware_provider_obj = vmware_provider.get_api_obj() + cluster_names = vmware_provider_obj.clusters(_acc["uuid"]) + clusters = cluster_names + + if account_cache_data["provider_type"] == ACCOUNT.TYPE.AHV: + clusters = cluster_uuids + + for cluster in clusters: + quota_entities = { + "project": project_uuid, + "account": _acc["uuid"], + "cluster": cluster, + } + + client = get_api_client() + quota = _get_quota(client, quota_entities) + + if len(quota) != 0 and len(quota[0]["entities"]) != 0: + quota_vcpus += quota[0]["entities"][0]["status"]["resources"][ + "data" + ]["vcpu"] + quota_disk_ = quota[0]["entities"][0]["status"]["resources"][ + "data" + ]["disk"] + quota_disk += int(quota_disk_ / 1073741824) + quota_memory_ = quota[0]["entities"][0]["status"]["resources"][ + "data" + ]["memory"] + quota_memory += int(quota_memory_ / 1073741824) + + providers_list.append(_provider_data) + + cdict["provider_list"] = providers_list + + if quota_disk != 0 and quota_memory != 0 and quota_vcpus != 0: + _quotas = { + "VCPUS": quota_vcpus, + "STORAGE": quota_disk, + "MEMORY": quota_memory, + } + cdict["quotas"] = _quotas + + return cdict + def project(**kwargs): name = kwargs.get("name", None) diff --git a/calm/dsl/builtins/models/providers.py b/calm/dsl/builtins/models/providers.py index c08a31cd..f48babfe 100644 --- a/calm/dsl/builtins/models/providers.py +++ b/calm/dsl/builtins/models/providers.py @@ -11,6 +11,22 @@ class AccountProviderType(EntityType): __schema_name__ = "AccountProvider" __openapi_type__ = "app_account_provider" + @classmethod + def pre_decompile(mcls, cdict, context=[], prefix=""): + + for _i in cdict.get("subnet_references", []): + _i["kind"] = "subnet" + cdict["subnet_reference_list"] = cdict.pop("subnet_references", []) + + for _i in cdict.get("cluster_references", []): + _i["kind"] = "cluster" + cdict["cluster_reference_list"] = cdict.pop("cluster_references", []) + + if cdict.get("default_subnet_reference", {}): + cdict["default_subnet_reference"]["kind"] = "subnet" + + return cdict + class AccountProviderValidator(PropertyValidator, openapi_type="app_account_provider"): __default__ = None diff --git a/calm/dsl/builtins/models/schemas/config_attrs.yaml.jinja2 b/calm/dsl/builtins/models/schemas/config_attrs.yaml.jinja2 index e546dc83..31178cee 100644 --- a/calm/dsl/builtins/models/schemas/config_attrs.yaml.jinja2 +++ b/calm/dsl/builtins/models/schemas/config_attrs.yaml.jinja2 @@ -98,6 +98,11 @@ properties: additionalProperties: true type: object x-calm-dsl-type: dict + action_list: + x-calm-dsl-display-name: actions + type: array + items: + $ref: '#/components/schemas/Action' {%- endmacro %} {% macro AhvDiskRulesetSchema() -%} diff --git a/calm/dsl/builtins/models/schemas/config_spec.yaml.jinja2 b/calm/dsl/builtins/models/schemas/config_spec.yaml.jinja2 index fcc6b263..faa87eeb 100644 --- a/calm/dsl/builtins/models/schemas/config_spec.yaml.jinja2 +++ b/calm/dsl/builtins/models/schemas/config_spec.yaml.jinja2 @@ -1,3 +1,5 @@ +{% import "calm_ref.yaml.jinja2" as calm_ref %} + {% macro ConfigSpec() -%} title: ConfigSpec type: object @@ -29,6 +31,10 @@ properties: type: object additionalProperties: true x-calm-dsl-type: dict + policy_reference: + x-calm-dsl-display-name: policy + {{ calm_ref.CalmRef() | indent(4) }} + {%- endmacro %} diff --git a/calm/dsl/builtins/models/schemas/simple_blueprint.yaml.jinja2 b/calm/dsl/builtins/models/schemas/simple_blueprint.yaml.jinja2 index 090cdd1a..1b041271 100644 --- a/calm/dsl/builtins/models/schemas/simple_blueprint.yaml.jinja2 +++ b/calm/dsl/builtins/models/schemas/simple_blueprint.yaml.jinja2 @@ -12,6 +12,12 @@ properties: type: string maxLength: 1000 default: '' + package_definition_list: + x-calm-dsl-display-name: packages + description: Package definitions for blueprint. + type: array + items: + $ref: '#/components/schemas/Package' environment_reference_list: type: array x-calm-dsl-min-version: 3.2.0 diff --git a/calm/dsl/builtins/models/simple_blueprint.py b/calm/dsl/builtins/models/simple_blueprint.py index 544d5ab6..ef300f1f 100644 --- a/calm/dsl/builtins/models/simple_blueprint.py +++ b/calm/dsl/builtins/models/simple_blueprint.py @@ -173,6 +173,9 @@ def make_bp_dict(cls, categories=None): app_profile["deployment_create_list"].append(ddict) + for pkg in cdict["package_definition_list"]: + package_definition_list.append(pkg) + for pdep in pod_deployments: pod_dict = pdep.extract_deployment() for sd in pod_dict["service_definition_list"]: diff --git a/calm/dsl/builtins/models/substrate.py b/calm/dsl/builtins/models/substrate.py index 49788d8a..52365e32 100644 --- a/calm/dsl/builtins/models/substrate.py +++ b/calm/dsl/builtins/models/substrate.py @@ -11,9 +11,12 @@ from .helper import common as common_helper from calm.dsl.config import get_context -from calm.dsl.constants import CACHE, PROVIDER_ACCOUNT_TYPE_MAP +from calm.dsl.constants import CACHE, PROVIDER_ACCOUNT_TYPE_MAP, SUBSTRATE from calm.dsl.store import Cache from calm.dsl.store import Version +from .task import dag, vm_power_action_task, check_login +from .action import runbook_create, _action_create +from calm.dsl.constants import SUBSTRATE, READINESS_PROBE from calm.dsl.log import get_logging_handle LOG = get_logging_handle(__name__) @@ -41,9 +44,14 @@ class SubstrateType(EntityType): ALLOWED_FRAGMENT_ACTIONS = { "__pre_create__": "pre_action_create", + "__post_create__": "post_action_create", "__post_delete__": "post_action_delete", } + ALLOWED_SYSTEM_ACTIONS = SUBSTRATE.VM_POWER_ACTIONS + + ALLOWED_SYSTEM_ACTIONS_REV = SUBSTRATE.VM_POWER_ACTIONS_REV + def get_profile_environment(cls): """returns the profile environment, if substrate has been defined in blueprint file""" @@ -55,7 +63,8 @@ def get_profile_environment(cls): if cls_deployment.substrate.name != str(cls): continue - environment = getattr(cls_profile, "environment", {}) + profile_envs = getattr(cls_profile, "environments", []) + environment = profile_envs[0].get_dict() if profile_envs else dict() if environment: LOG.debug( "Found environment {} associated to app-profile {}".format( @@ -113,21 +122,7 @@ def get_referenced_account_uuid(cls): # If substrate is defined in blueprint file cls_bp = common_helper._walk_to_parent_with_given_type(cls, "BlueprintType") if cls_bp: - environment = {} - for cls_profile in cls_bp.profiles: - for cls_deployment in cls_profile.deployments: - if cls_deployment.substrate.name != str(cls): - continue - - profile_envs = getattr(cls_profile, "environments", []) - environment = profile_envs[0].get_dict() if profile_envs else dict() - if environment: - LOG.debug( - "Found environment {} associated to app-profile {}".format( - environment.get("name"), cls_profile - ) - ) - break + environment = cls.get_profile_environment() # If environment is given at profile level if environment: @@ -554,10 +549,30 @@ def pre_decompile(mcls, cdict, context=[], prefix=""): if "__name__" in cdict: cdict["__name__"] = "{}{}".format(prefix, cdict["__name__"]) + # Removing system defined vm power actions as they are not needed in decompile + deleted_actions = [] + for action in cdict["action_list"]: + if action["name"] in list(SUBSTRATE.VM_POWER_ACTIONS_REV.keys()): + deleted_actions.append(action) + + for action in deleted_actions: + cdict["action_list"].remove(action) + return cdict @classmethod def decompile(mcls, cdict, context=[], prefix=""): + def make_empty_runbook(action_name): + suffix = getattr(cls, "name", "") or cls.__name__ + user_dag = dag( + name="DAG_Task_for_Service_{}_{}".format(suffix, action_name), + target=cls.get_task_target(), + ) + return runbook_create( + name="Runbook_for_Service_{}_{}".format(suffix, action_name), + main_task_local_reference=user_dag.get_ref(), + tasks=[user_dag], + ) if cdict["type"] == "K8S_POD": LOG.error("Decompilation support for pod deployments is not available.") @@ -572,11 +587,122 @@ def decompile(mcls, cdict, context=[], prefix=""): cls.provider_spec = vm_cls + # Checking if power on actions are included in decompiled blueprint + compulsory_actions = list(cls.ALLOWED_SYSTEM_ACTIONS.values()) + for action_obj in cls.actions: + if action_obj.__name__ in compulsory_actions: + compulsory_actions.remove(action_obj.__name__) + + # Adding power on actions (if absent) to include in decompiled blueprint + # necessary to create default vm power action after decompile. + for action_name in compulsory_actions: + user_action = _action_create( + **{ + "name": action_name, + "description": "", + "critical": True, + "type": "system", + "runbook": make_empty_runbook(action_name), + } + ) + cls.actions.append(user_action) + return cls def get_task_target(cls): return cls.get_ref() + def get_service_target(cls): + """Deployment couples substrate with services. + This helper finds target service for a substrate""" + + # profiles clone same services therefore reading deployment from first profile + deployments = cls.__parent__.profiles[0].deployments + for deployment in deployments: + # finding which deployment couples this substrate with service + if cls is deployment.substrate.__self__: + # getting target service for this substrate + service_target = deployment.get_service_ref() + # returning target service for this substrate + return service_target + + # if no service found return None + return None + + def create_power_action_runbook(cls, action_name): + + # Taking default readiness probe as system vm power actions have default check login only + readiness_probe_dict = readiness_probe().compile() + provider = "AHV_VM" if not hasattr(cls, "provider_type") else cls.provider_type + + if provider in READINESS_PROBE.ADDRESS: + readiness_probe_dict["address"] = READINESS_PROBE.ADDRESS[provider] + else: + raise NotImplementedError( + "vm power action not implemented for {}".format(provider) + ) + + suffix = getattr(cls, "name", "") or cls.__name__ + tasks = [] + edges = [] + if action_name == SUBSTRATE.POWER_ON: + tasks.append( + vm_power_action_task( + action_name=action_name, + provider=provider, + target=cls.get_task_target(), + ) + ) + tasks.append( + check_login( + readiness_probe=readiness_probe_dict, target=cls.get_task_target() + ) + ) + edges.append((tasks[0].get_ref(), tasks[1].get_ref())) + elif action_name == SUBSTRATE.POWER_OFF: + tasks.append( + vm_power_action_task( + action_name=action_name, + provider=provider, + target=cls.get_task_target(), + ) + ) + elif action_name == SUBSTRATE.RESTART: + tasks.append( + vm_power_action_task( + action_name=action_name, + provider=provider, + target=cls.get_task_target(), + ) + ) + tasks.append( + check_login( + readiness_probe=readiness_probe_dict, target=cls.get_task_target() + ) + ) + edges.append((tasks[0].get_ref(), tasks[1].get_ref())) + elif action_name == SUBSTRATE.CHECK_LOGIN: + tasks.append( + check_login( + readiness_probe=readiness_probe_dict, target=cls.get_task_target() + ) + ) + else: + LOG.error("Unsupported vm action supplied {}".format(action_name)) + + user_dag = dag( + name="DAG_Task_for_Substrate_{}_{}".format(suffix, action_name), + target=cls.get_task_target(), + child_tasks=tasks, + edges=edges, + ) + tasks.insert(0, user_dag) + return runbook_create( + name="Runbook_for_Substrate_{}_{}".format(suffix, action_name), + main_task_local_reference=user_dag.get_ref(), + tasks=tasks, + ) + class SubstrateValidator(PropertyValidator, openapi_type="app_substrate"): __default__ = None diff --git a/calm/dsl/builtins/models/task.py b/calm/dsl/builtins/models/task.py index 49104eef..5595b14d 100644 --- a/calm/dsl/builtins/models/task.py +++ b/calm/dsl/builtins/models/task.py @@ -11,6 +11,7 @@ from .task_input import TaskInputType from .variable import CalmVariable from .helper import common as common_helper +from .utils import is_compile_secrets from calm.dsl.log import get_logging_handle from calm.dsl.store import Cache @@ -22,7 +23,7 @@ Tag, ) from calm.dsl.builtins.models.constants import NutanixDB as NutanixDBConst -from calm.dsl.constants import CACHE +from calm.dsl.constants import CACHE, SUBSTRATE, PROVIDER, TASKS LOG = get_logging_handle(__name__) @@ -434,6 +435,92 @@ def meta(name=None, child_tasks=None, edges=None, target=None): return _task_create(**kwargs) +def vm_power_action_task(name=None, action_name=None, provider=None, target=None): + """ + Create a VM POWER ON/OFF/RESTART task + Args: + name (str): Name for the task + provider (str): Type of provider + action_name (str): Valid power action name + target (Ref): Target entity reference + Returns: + (Task): DAG task + """ + if action_name not in list(SUBSTRATE.VM_POWER_ACTIONS_REV.keys()): + LOG.error( + "{} is not a valid vm power on action {}".format( + list(SUBSTRATE.VM_POWER_ACTIONS_REV.keys()) + ) + ) + sys.exit(-1) + + # name follows UI naming convention for runbooks + kwargs = { + "name": name + or "SYS_GEN__{}_Operation_{}_".format( + PROVIDER.NAME[provider], SUBSTRATE.POWER_ACTION_CAMEL_CASE[action_name] + ) + + str(uuid.uuid4())[:8], + "type": TASKS.TASK_TYPES.VM_OPERATION[provider], + "attrs": { + "operation_type": action_name, + "type": TASKS.TASK_TYPES.GENERIC_OPERATION, + }, + } + + if target: + kwargs["target_any_local_reference"] = _get_target_ref(target) + + return _task_create(**kwargs) + + +def check_login(name=None, readiness_probe=None, target=None): + """ + Create a VM CHECK LOGIN task. + Args: + name (str): Name for the task + readiness_probe (dict): Compiled readiness probe data + target (Ref): Target entity reference + Returns: + (Task): DAG task + """ + + if not target: + LOG.error("Target not supplied") + if not readiness_probe: + LOG.error("Readiness probe not supplied") + + substrate_name = "Substrate" # Default substrate name + if isinstance(target, EntityType): + substrate_name = target.name or target.__name__ + elif isinstance(target, RefType): + substrate_name = target.__self__.name or target.__self__.__name__ + else: + raise ValueError("Target is not of ref or entity type") + + # This follows UI naming convention for runbooks + name = ( + name + or "SYS_GEN__check_login_for_" + substrate_name + "_" + str(uuid.uuid4())[:8] + ) + kwargs = { + "name": name, + "type": TASKS.TASK_TYPES.CHECK_LOGIN, + "attrs": { + "retries": readiness_probe["retries"], + "dial_timeout": "", + "timeout": readiness_probe["delay_secs"], + "address": readiness_probe["address"], + "type": TASKS.TASK_TYPES.CHECK_LOGIN, + "sleep_time": "", + }, + } + + kwargs["target_any_local_reference"] = _get_target_ref(target) + + return _task_create(**kwargs) + + def exec_task_ssh( script=None, filename=None, @@ -1371,6 +1458,7 @@ def http_task( """ auth_obj = {"auth_type": "none"} + # Auth object for basic auth with credential. if cred is not None: cred_ref = _get_target_ref(cred) if getattr(cred_ref, "kind", None) != "app_credential": @@ -1385,6 +1473,7 @@ def http_task( "credential_local_reference": cred_ref, } + # Auth object for basic auth elif credential is not None: if getattr(credential, "__kind__", None) != "app_credential": raise ValueError( @@ -1393,16 +1482,14 @@ def http_task( + " should be a Credential object of PASSWORD type" ) - # TODO: Auth should be changed to basic auth with credential. - # This is dependent on https://jira.nutanix.com/browse/CALM-12149 - # We could also possibly check calm server version to switch between - # the two auth mechanisms since basic auth will be deprecated. auth_obj = { "auth_type": "basic", "basic_auth": { "username": credential.username, "password": { - "value": credential.secret.get("value"), + "value": credential.secret.get("value") + if is_compile_secrets() + else "", "attrs": {"is_secret_modified": True}, }, }, diff --git a/calm/dsl/builtins/models/utils.py b/calm/dsl/builtins/models/utils.py index 4f78966f..9f161290 100644 --- a/calm/dsl/builtins/models/utils.py +++ b/calm/dsl/builtins/models/utils.py @@ -9,9 +9,10 @@ from calm.dsl.config import get_context LOG = get_logging_handle(__name__) +COMPILE_WITH_SECRETS = True -def read_file(filename, depth=1): +def read_file(filename, depth=1, default=None): """reads the file""" if not filename: @@ -24,8 +25,18 @@ def read_file(filename, depth=1): ) if not file_exists(file_path): - LOG.debug("file {} not found at location {}".format(filename, file_path)) - raise ValueError("file {} not found".format(filename)) + if default is None: + raise ValueError( + "file {} not found at location {}, no default value provided".format( + filename, file_path + ) + ) + LOG.warning( + "file {} not found at location {}, using default value {}".format( + filename, file_path, default + ) + ) + return default with open(file_path, "r") as data: return data.read() @@ -107,7 +118,7 @@ def file_exists(file_path): return os.path.exists(file_path) -def read_local_file(filename): +def read_local_file(filename, default=None): file_path = os.path.join(".local", filename) # Checking if file exists @@ -120,7 +131,9 @@ def read_local_file(filename): ContextObj = get_context() init_data = ContextObj.get_init_config() file_path = os.path.join(init_data["LOCAL_DIR"]["location"], filename) - return read_file(file_path, 0).rstrip() # To remove \n, use rstrip + return read_file( + file_path, 0, default=default + ).rstrip() # To remove \n, use rstrip return read_file(file_path, depth=2) @@ -152,3 +165,12 @@ def get_valid_identifier(data=None): data = "_{}".format(data) return data + + +def set_compile_secrets_flag(compile_with_secrets): + global COMPILE_WITH_SECRETS + COMPILE_WITH_SECRETS = compile_with_secrets + + +def is_compile_secrets(): + return COMPILE_WITH_SECRETS diff --git a/calm/dsl/builtins/models/variable.py b/calm/dsl/builtins/models/variable.py index e2073c60..108fc7ec 100644 --- a/calm/dsl/builtins/models/variable.py +++ b/calm/dsl/builtins/models/variable.py @@ -4,7 +4,7 @@ from .validator import PropertyValidator from .task_input import _task_input from .helper import common as common_helper - +from .utils import is_compile_secrets # Variable @@ -39,6 +39,9 @@ def compile(cls): if not cdict.get("editables", {}): del cdict["editables"] + if cdict["type"] == "SECRET" and not is_compile_secrets(): + cdict["value"] = "" + if cdict.get("options", None): options = cdict["options"] # Only EScript/HTTP request info needed for dynamically fetching options diff --git a/calm/dsl/builtins/models/vmware_account.py b/calm/dsl/builtins/models/vmware_account.py index 1afbde1c..ff11a34c 100644 --- a/calm/dsl/builtins/models/vmware_account.py +++ b/calm/dsl/builtins/models/vmware_account.py @@ -5,6 +5,7 @@ from calm.dsl.log import get_logging_handle from .validator import PropertyValidator from calm.dsl.constants import ENTITY +from .utils import is_compile_secrets LOG = get_logging_handle(__name__) @@ -55,7 +56,10 @@ def compile(cls): cdict = super().compile() password = cdict.pop("password", "") - cdict["password"] = {"attrs": {"is_secret_modified": True}, "value": password} + cdict["password"] = { + "attrs": {"is_secret_modified": True}, + "value": password if is_compile_secrets() else "", + } price_items = cdict.pop("price_items", {}) if price_items: diff --git a/calm/dsl/cli/account_commands.py b/calm/dsl/cli/account_commands.py index 24e7047f..c0088818 100644 --- a/calm/dsl/cli/account_commands.py +++ b/calm/dsl/cli/account_commands.py @@ -46,7 +46,16 @@ multiple=True, help="Search for accounts of specific provider", type=click.Choice( - ["aws", "k8s", "vmware", "azure", "gcp", "nutanix", "custom_provider"] + [ + "aws", + "k8s", + "vmware", + "azure", + "gcp", + "nutanix", + "nutanix_pc", + "custom_provider", + ] ), ) def _get_accounts(name, filter_by, limit, offset, quiet, all_items, account_type): @@ -133,7 +142,7 @@ def create_account_command(account_file, name, force, auto_verify): return if auto_verify: - verify_account(account_data["account_name"]) + verify_account(account_data["name"]) @compile.command("account") diff --git a/calm/dsl/cli/app_commands.py b/calm/dsl/cli/app_commands.py index b8c2ef28..609673b1 100644 --- a/calm/dsl/cli/app_commands.py +++ b/calm/dsl/cli/app_commands.py @@ -50,6 +50,13 @@ type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True), help="Path of Brownfield Deployment file", ) +@click.option( + "--watch", + "-w", + is_flag=True, + default=False, + help="Watch scrolling Application state", +) @click.option( "--name", "-n", "app_name", default=None, help="Application name (Optional)" ) @@ -72,6 +79,7 @@ def _create_app( brownfield_deployment_file, ignore_runtime_variables, launch_params, + watch, ): """Creates an application. @@ -87,6 +95,7 @@ def _create_app( patch_editables=not ignore_runtime_variables, launch_params=launch_params, brownfield_deployment_file=brownfield_deployment_file, + watch=watch, ) diff --git a/calm/dsl/cli/apps.py b/calm/dsl/cli/apps.py index 4fa4affd..b673c110 100644 --- a/calm/dsl/cli/apps.py +++ b/calm/dsl/cli/apps.py @@ -5,6 +5,7 @@ import re import uuid from json import JSONEncoder +from datetime import datetime import arrow import click @@ -25,6 +26,7 @@ parse_launch_params_attribute, _decompile_bp, ) +from calm.dsl.constants import CONFIG_TYPE from calm.dsl.log import get_logging_handle LOG = get_logging_handle(__name__) @@ -39,6 +41,9 @@ def get_apps(name, filter_by, limit, offset, quiet, all_items, out): filter_query = get_name_query([name]) if filter_by: filter_query = filter_query + ";(" + filter_by + ")" + # deleted state filter works without paranthesis, using this as workaround until CALM-43342 is resolved + if filter_by == "_state==deleted": + filter_query = filter_query + ";" + filter_by if all_items: filter_query += get_states_filter(APPLICATION.STATES, state_key="_state") if filter_query.startswith(";"): @@ -212,12 +217,205 @@ def describe_app(app_name, out): deployment_list = app["status"]["resources"]["deployment_list"] click.echo("Deployments [{}]:".format(highlight_text((len(deployment_list))))) + for deployment in deployment_list: - click.echo( - "\t {} {}".format( - highlight_text(deployment["name"]), highlight_text(deployment["state"]) - ) + + num_services = len( + deployment.get("substrate_configuration", {}).get("element_list", {}) ) + for i in range(num_services): + exta_suffix = "" + if num_services > 1: + exta_suffix = "[" + str(i) + "]" + + temp_var = "[" + str(deployment["state"][i]) + "]" + click.echo( + "\t Service : {}{} {}".format( + highlight_text(deployment["service_list"][0]["name"]), + highlight_text(exta_suffix), + highlight_text(temp_var), + ) + ) + click.echo("\t \t VM Details") + click.echo("\t \t \t Configuration") + click.echo( + "\t \t \t \t {:<15} : {}".format( + "Name", + highlight_text(deployment["substrate_configuration"]["name"]), + ) + ) + click.echo( + "\t \t \t \t {:<15} : {}".format( + "IP Address", + highlight_text( + deployment["substrate_configuration"]["element_list"][i][ + "address" + ] + ), + ) + ) + click.echo( + "\t \t \t \t {:<15} : {}".format( + "vCPUs", + highlight_text( + deployment["substrate_configuration"]["element_list"][i][ + "create_spec" + ]["resources"]["num_vcpus_per_socket"] + ), + ) + ) + click.echo( + "\t \t \t \t {:<15} : {}".format( + "Cores", + highlight_text( + deployment["substrate_configuration"]["element_list"][i][ + "create_spec" + ]["resources"]["num_sockets"] + ), + ) + ) + click.echo( + "\t \t \t \t {:<15} : {} {}".format( + "Memory", + highlight_text( + deployment["substrate_configuration"]["element_list"][i][ + "create_spec" + ]["resources"]["memory_size_mib"] + / 1024.0 + ), + highlight_text("GB"), + ) + ) + click.echo( + "\t \t \t \t {:<15} : {}".format( + "VM UUID", + highlight_text( + deployment["substrate_configuration"]["element_list"][i][ + "instance_id" + ] + ), + ) + ) + click.echo( + "\t \t \t \t {:<15} : {}".format( + "Image", + highlight_text( + deployment["substrate_configuration"]["create_spec"][ + "resources" + ]["disk_list"][0]["data_source_reference"]["uuid"] + ), + ) + ) + + click.echo("\t \t \t Network Adapters (NICs)") + if ( + len( + deployment["substrate_configuration"]["element_list"][i][ + "create_spec" + ]["resources"]["nic_list"] + ) + > 0 + ): + for nic in deployment["substrate_configuration"]["element_list"][i][ + "create_spec" + ]["resources"]["nic_list"]: + click.echo( + "\t \t \t \t {:<15} : {}".format( + "Type", highlight_text(nic["nic_type"]) + ) + ) + for variable in deployment["substrate_configuration"]["element_list"][ + i + ]["variable_list"]: + if variable["name"] == "mac_address": + click.echo( + "\t \t \t \t {:<15} : {}".format( + "MAC Address", highlight_text(variable["value"]) + ) + ) + for nic in deployment["substrate_configuration"]["element_list"][i][ + "create_spec" + ]["resources"]["nic_list"]: + click.echo( + "\t \t \t \t {:<15} : {}".format( + "Subnet", highlight_text(nic["subnet_reference"]["name"]) + ) + ) + else: + click.echo("\t \t \t \t {:<15} : ".format("Type")) + click.echo("\t \t \t \t {:<15} : ".format("MAC Address")) + click.echo("\t \t \t \t {:<15} : ".format("Subnet")) + + if ( + deployment["substrate_configuration"]["element_list"][i]["create_spec"][ + "cluster_reference" + ] + != None + ): + click.echo("\t \t \t Cluster Information") + click.echo( + "\t \t \t \t {:<15} : {}".format( + "Cluster UUID", + highlight_text( + deployment["substrate_configuration"]["element_list"][i][ + "create_spec" + ]["cluster_reference"]["uuid"] + ), + ) + ) + click.echo( + "\t \t \t \t {:<15} : {}".format( + "Cluster Name", + highlight_text( + deployment["substrate_configuration"]["element_list"][i][ + "create_spec" + ]["cluster_reference"]["name"] + ), + ) + ) + else: + click.echo("\t \t \t Cluster Information") + click.echo("\t \t \t \t {:<15} : ".format("Cluster UUID")) + click.echo("\t \t \t \t {:<15} : ".format("Cluster Name")) + + categories = deployment["substrate_configuration"]["element_list"][i][ + "create_spec" + ]["categories"] + if len(categories) > 0: + click.echo("\t \t \t Categories") + for key, value in categories.items(): + click.echo( + "\t \t \t \t {:<15} : {}".format(key, highlight_text(value)) + ) + + if ( + len(deployment["service_list"]) > 0 + and len( + deployment["service_list"][0]["element_list"][i]["variable_list"] + ) + > 0 + ): + click.echo("\t \t Variables") + for variable in deployment["service_list"][0]["element_list"][i][ + "variable_list" + ]: + if ( + variable["type"] == "LOCAL" or variable["type"] == "HTTP_LOCAL" + ) and variable["value"] != "": + click.echo( + "\t \t \t \t {:<15} : {}".format( + variable["name"], highlight_text(variable["value"]) + ) + ) + elif variable["type"] == "SECRET": + click.echo( + "\t \t \t \t {:<15} : {}".format( + variable["name"], highlight_text("********") + ) + ) + else: + click.echo("\t \t \t \t {:<15}".format(variable["name"])) + click.echo(" ") action_list = app["status"]["resources"]["action_list"] click.echo("App Actions [{}]:".format(highlight_text(len(action_list)))) @@ -269,9 +467,9 @@ def create_app( profile_name=None, patch_editables=True, launch_params=None, + watch=False, ): client = get_api_client() - # Compile blueprint bp_payload = compile_blueprint( bp_file, brownfield_deployment_file=brownfield_deployment_file @@ -318,7 +516,8 @@ def create_app( # Creating an app LOG.info("Creating app {}".format(app_name)) - launch_blueprint_simple( + # if app_launch_state=1 implies blueprint launch is successful, app_launch_state=0 implies blueprint launch has failed + app_launch_state = launch_blueprint_simple( blueprint_name=bp_name, app_name=app_name, profile_name=profile_name, @@ -334,6 +533,16 @@ def create_app( if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) + # if app_launch_state=True that is if blueprint launch is successful then only we will enter in watch mode + if app_launch_state and watch: + + def display_action(screen): + watch_app(app_name=app_name, screen=screen, poll_interval=10) + screen.wait_for_input(10.0) + + Display.wrapper(display_action, watch=True) + LOG.info("Application with name: {} got created successfully".format(app_name)) + class RunlogNode(NodeMixin): def __init__(self, runlog, parent=None, children=None): @@ -1343,33 +1552,94 @@ def get_snapshot_name_arg(config, config_task_id): default=default_value, show_default=False, ) - return {"name": "snapshot_name", "value": val, "task_uuid": config_task_id} + action_args = [{"name": "snapshot_name", "value": val, "task_uuid": config_task_id}] + if config["type"] == CONFIG_TYPE.SNAPSHOT.VMWARE: + choices = { + 1: "1. Crash Consistent", + 2: "2. Snapshot VM Memory", + 3: "3. Enable Snapshot Quiesce", + } + default_idx = 1 + click.echo("Choose from given snapshot type: ") + for choice in choices.values(): + click.echo("\t{}".format(highlight_text(repr(choice)))) + selected_val = click.prompt( + "Selected Snapshot Type [{}]".format(highlight_text(repr(default_idx))), + default=default_idx, + show_default=False, + ) + if selected_val not in choices: + LOG.error( + "Invalid value {}, not present in choices: {}".format( + selected_val, choices.keys() + ) + ) + sys.exit("Use valid choices from {}".format(choices.keys())) + + action_args_choices_map = { + 2: { + "name": "vm_memory_snapshot_enabled", + "value": "true", + "task_uuid": config_task_id, + }, + 3: { + "name": "snapshot_quiesce_enabled", + "value": "true", + "task_uuid": config_task_id, + }, + } + + # check action args of crash consistent + if selected_val != 1: + action_args.append(action_args_choices_map[selected_val]) + + return action_args -def get_recovery_point_group_arg(config, config_task_id, recovery_groups): +def get_recovery_point_group_arg(config, config_task_id, recovery_groups, config_type): choices = {} for i, rg in enumerate(recovery_groups): - choices[i + 1] = { - "label": "{}. {} [Created On: {} Expires On: {}]".format( - i + 1, - rg["status"]["name"], - time.strftime( - "%Y-%m-%d %H:%M:%S", - time.gmtime( - rg["status"]["recovery_point_info_list"][0]["creation_time"] - // 1000000 + if config_type == CONFIG_TYPE.RESTORE.AHV: + choices[i + 1] = { + "label": "{}. {} [Created On: {} Expires On: {}]".format( + i + 1, + rg["status"]["name"], + time.strftime( + "%Y-%m-%d %H:%M:%S", + time.gmtime( + rg["status"]["recovery_point_info_list"][0]["creation_time"] + // 1000000 + ), ), - ), - time.strftime( - "%Y-%m-%d %H:%M:%S", - time.gmtime( - rg["status"]["recovery_point_info_list"][0]["expiration_time"] - // 1000000 + time.strftime( + "%Y-%m-%d %H:%M:%S", + time.gmtime( + rg["status"]["recovery_point_info_list"][0][ + "expiration_time" + ] + // 1000000 + ), ), ), - ), - "uuid": rg["status"]["uuid"], - } + "uuid": rg["status"]["uuid"], + } + elif config_type == CONFIG_TYPE.RESTORE.VMWARE: + # Defining it separately for vmware because snapshots taken don't have any expiry + created_time = rg["status"]["recovery_point_info_list"][0][ + "snapshot_create_time" + ] + created_time = datetime.strptime( + created_time, "%Y-%m-%dT%H:%M:%S.%fZ" + ).replace(microsecond=0) + created_time.strftime("%Y-%m-%d %H:%M:%S") + choices[i + 1] = { + "label": "{}. {} [Created On: {}]".format( + i + 1, + rg["status"]["name"], + created_time, + ), + "uuid": rg["status"]["uuid"], + } if not choices: LOG.error( "No recovery group found. Please take a snapshot before running restore action" @@ -1455,9 +1725,9 @@ def run_actions( for config in config_list if config["uuid"] == task["attrs"]["config_spec_reference"]["uuid"] ) - if config["type"] == "AHV_SNAPSHOT": - action_args.append(get_snapshot_name_arg(config, task["uuid"])) - elif config["type"] == "AHV_RESTORE": + if config["type"] in CONFIG_TYPE.SNAPSHOT.TYPE: + action_args.extend(get_snapshot_name_arg(config, task["uuid"])) + elif config["type"] in CONFIG_TYPE.RESTORE.TYPE: substrate_id = next( ( dep["substrate_configuration"]["uuid"] @@ -1475,7 +1745,7 @@ def run_actions( raise Exception("[{}] - {}".format(err["code"], err["error"])) action_args.append( get_recovery_point_group_arg( - config, task["uuid"], res.json()["entities"] + config, task["uuid"], res.json()["entities"], config["type"] ) ) @@ -1759,7 +2029,10 @@ def remove_non_escript_actions_variables(bp_payload): if "variable_list" in _e: _e["variable_list"] = get_escript_vars_in_entity(_e) - if _el == "package_definition_list" and _e.get("type", "") == "DEB": + if _el == "package_definition_list" and _e.get("type", "") in ( + "DEB", + "CUSTOM", + ): for pkg_runbook_name in ["install_runbook", "uninstall_runbook"]: ( _e["options"][pkg_runbook_name], @@ -1767,6 +2040,8 @@ def remove_non_escript_actions_variables(bp_payload): ) = get_runbook_payload_having_escript_task_vars_only( _e["options"].get(pkg_runbook_name, {}) ) + if "patch_list" in _e: + _e["patch_list"] = get_actions_having_escript_entities(_e["patch_list"]) def get_escript_tasks_in_runbook(runbook_payload): @@ -1862,6 +2137,19 @@ def describe_app_actions_to_update(app_name): elif get_escript_vars_in_entity(_action["runbook"]): runbook_containing_migratable_entities.append(runbook_uuid) + for _action in _entity.get("patch_list", []): + runbook_uuid = _action["runbook"]["uuid"] + runbook_uuid_context[runbook_uuid] = "{}.{}.Action.{}".format( + DISPLAY_MAP[_key], _entity["name"], _action["name"] + ) + dependencies[runbook_uuid] = get_runbook_dependencies( + _action["runbook"] + ) + if get_escript_tasks_in_runbook(_action["runbook"]): + runbook_containing_migratable_entities.append(runbook_uuid) + elif get_escript_vars_in_entity(_action["runbook"]): + runbook_containing_migratable_entities.append(runbook_uuid) + for _key in resources.keys(): if _key in [ "service_definition_list", @@ -1933,5 +2221,64 @@ def describe_app_actions_to_update(app_name): ) ) + # Printing migratable tasks, variables, actions in patch config + for _action in _entity.get("patch_list", []): + + runbook_uuid = _action["runbook"]["uuid"] + has_migratable_entities = ( + runbook_uuid in runbook_containing_migratable_entities + ) + + dependable_migratable_actions = [] + for _run_uuid in dependencies.get(runbook_uuid, []): + if _run_uuid in runbook_containing_migratable_entities: + dependable_migratable_actions.append( + runbook_uuid_context[_run_uuid] + ) + + if has_migratable_entities or dependable_migratable_actions: + any_action_to_be_modified = True + print("\t\t-> {}".format(highlight_text(_action["name"]))) + if has_migratable_entities: + print("\t\t Tasks:") + task_list = get_escript_tasks_in_runbook(_action["runbook"]) + migratable_task_names = [ + _task["name"] for _task in task_list + ] + + if migratable_task_names: + for _ind, _tname in enumerate(migratable_task_names): + print( + "\t\t\t{}. {}".format( + _ind, highlight_text(_tname) + ) + ) + else: + print("\t\t\t No Tasks to be migrated") + + print("\t\t Variables:") + var_list = get_escript_vars_in_entity(_action["runbook"]) + migratable_var_names = [_var["name"] for _var in var_list] + if migratable_var_names: + for _ind, _tname in enumerate(migratable_var_names): + print( + "\t\t\t{}. {}".format( + _ind, highlight_text(_tname) + ) + ) + else: + print("\t\t\t No Variables to be migrated") + + if dependable_migratable_actions: + print("\t\t Dependable actions to be migrated:") + for _ind, _act_ctx in enumerate( + dependable_migratable_actions + ): + print( + "\t\t\t{}. {}".format( + _ind, highlight_text(_act_ctx) + ) + ) + if not any_action_to_be_modified: print("\t\t No actions found to be modified") diff --git a/calm/dsl/cli/bps.py b/calm/dsl/cli/bps.py index 1d8225d1..1dd9c866 100644 --- a/calm/dsl/cli/bps.py +++ b/calm/dsl/cli/bps.py @@ -34,6 +34,7 @@ from calm.dsl.decompile.file_handler import get_bp_dir from calm.dsl.decompile.bp_file_helper import decrypt_decompiled_secrets_file from calm.dsl.decompile.main import init_decompile_context +from calm.dsl.api.util import vm_power_action_target_map from .utils import ( get_name_query, @@ -51,6 +52,7 @@ from calm.dsl.constants import CACHE, DSL_CONFIG from calm.dsl.log import get_logging_handle from calm.dsl.builtins.models.calm_ref import Ref +from calm.dsl.decompile.ref_dependency import update_power_action_target_substrate LOG = get_logging_handle(__name__) @@ -574,13 +576,31 @@ def decompile_bp_from_server(name, with_secrets=False, prefix="", bp_dir=None): blueprint = get_blueprint(name) bp_uuid = blueprint["metadata"]["uuid"] - res, err = client.blueprint.export_file(bp_uuid) + exported_bp_res, err = client.blueprint.export_file(bp_uuid) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) - res = res.json() + exported_bp_res_payload = exported_bp_res.json() + + # adding metadata payload from 'blueprint' payload to response of 'export blueprint' + # because 'export blueprint' response doesn't have project reference, while 'blueprint' has project reference info. + exported_bp_res_payload["metadata"] = blueprint["metadata"] + + # Filter out system created tasks in patch config + filter_patch_config_tasks(exported_bp_res_payload["spec"]["resources"]) + + kwargs = { + "reference_runbook_to_substrate_map": vm_power_action_target_map( + blueprint, exported_bp_res_payload + ) + } + _decompile_bp( - bp_payload=res, with_secrets=with_secrets, prefix=prefix, bp_dir=bp_dir + bp_payload=exported_bp_res_payload, + with_secrets=with_secrets, + prefix=prefix, + bp_dir=bp_dir, + **kwargs, ) @@ -593,18 +613,32 @@ def decompile_bp_from_server_with_secrets( blueprint = get_blueprint(name) bp_uuid = blueprint["metadata"]["uuid"] - res, err = client.blueprint.export_file(bp_uuid, passphrase) + exported_bp_res, err = client.blueprint.export_file(bp_uuid, passphrase) if err: raise Exception("[{}] - {}".format(err["code"], err["error"])) - res = res.json() + exported_bp_res_payload = exported_bp_res.json() + + # adding metadata payload from 'blueprint' payload to response of 'export blueprint' + # because 'export blueprint' response doesn't have project reference, while 'blueprint' has project reference info. + exported_bp_res_payload["metadata"] = blueprint["metadata"] + + # Filter out system created tasks in patch config + filter_patch_config_tasks(exported_bp_res_payload["spec"]["resources"]) + + kwargs = { + "reference_runbook_to_substrate_map": vm_power_action_target_map( + blueprint, exported_bp_res_payload + ) + } _decompile_bp( - bp_payload=res, + bp_payload=exported_bp_res_payload, with_secrets=with_secrets, prefix=prefix, bp_dir=bp_dir, contains_encrypted_secrets=True, + **kwargs, ) @@ -625,11 +659,21 @@ def _decompile_bp( prefix="", bp_dir=None, contains_encrypted_secrets=False, + **kwargs, ): """decompiles the blueprint from payload""" init_decompile_context() + # reference_runbook_to_substrate_map will be used to update vm power action to it's substrate + reference_runbook_to_substrate_map = kwargs.get( + "reference_runbook_to_substrate_map", {} + ) + + if reference_runbook_to_substrate_map: + for rb_name, substrate_name in reference_runbook_to_substrate_map.items(): + update_power_action_target_substrate(rb_name, substrate_name) + blueprint = bp_payload["spec"]["resources"] blueprint_name = bp_payload["spec"].get("name", "DslBlueprint") blueprint_description = bp_payload["spec"].get("description", "") @@ -1096,7 +1140,7 @@ def get_protection_policy_rule( subnet_cluster_map, substrate_list, ): - """returns protection policy, protection rule tuple from cli_prompt""" + """returns protection policy, protection rule tuple from cli_prompt for ahv""" snapshot_config = next( ( @@ -1332,6 +1376,160 @@ def get_target_cluster_name(target_cluster_idx): return selected_policy, rule_choices[selected_rule]["rule"] +def get_protection_policy_rule_vmware( + protection_policy_uuid, + protection_rule_uuid, + snapshot_config_uuid, + app_profile, + protection_policies, + substrate_list, + vmware_account, +): + """returns protection policy, protection rule tuple from cli_prompt for vmware""" + + snapshot_config = next( + ( + config + for config in app_profile["snapshot_config_list"] + if config["uuid"] == snapshot_config_uuid + ), + None, + ) + if not snapshot_config: + LOG.err( + "No snapshot config with uuid {} found in App Profile {}".format( + snapshot_config_uuid, app_profile["name"] + ) + ) + sys.exit("Snapshot config {} not found".format(snapshot_config_uuid)) + is_local_snapshot = True + config_target = snapshot_config["attrs_list"][0]["target_any_local_reference"] + target_substrate_reference = next( + ( + deployment["substrate_local_reference"] + for deployment in app_profile["deployment_create_list"] + if deployment["uuid"] == config_target["uuid"] + ), + None, + ) + if not target_substrate_reference: + LOG.error( + "No deployment with uuid {} found under app profile {}".format( + config_target, app_profile["name"] + ) + ) + sys.exit("Deployment {} not found".format(config_target)) + + host_uuids = set() + for substrate in substrate_list: + host_uuids.add(substrate.get("create_spec", {}).get("host")) + + default_policy = "" + policy_choices = {} + for policy in protection_policies: + if ( + is_local_snapshot and policy["resources"]["rule_type"].lower() != "remote" + ) or ( + not is_local_snapshot + and policy["resources"]["rule_type"].lower() != "local" + ): + policy_choices[policy["name"]] = policy + if (not default_policy and policy["resources"]["is_default"]) or ( + protection_policy_uuid == policy["uuid"] + ): + default_policy = policy["name"] + if not policy_choices: + LOG.error( + "No protection policy found under this project. Please add one from the UI" + ) + sys.exit("No protection policy found") + if not default_policy or default_policy not in policy_choices: + default_policy = list(policy_choices.keys())[0] + click.echo("") + click.echo("Choose from given choices: ") + for choice in policy_choices.keys(): + click.echo("\t{}".format(highlight_text(repr(choice)))) + + selected_policy_name = click.prompt( + "Protection Policy for '{}' [{}]".format( + snapshot_config["name"], highlight_text(repr(default_policy)) + ), + default=default_policy, + show_default=False, + ) + if selected_policy_name not in policy_choices: + LOG.error( + "Invalid value '{}' for protection policy".format(selected_policy_name) + ) + sys.exit("Invalid protection policy") + + selected_policy = policy_choices[selected_policy_name] + ordered_site_list = selected_policy["resources"]["ordered_availability_site_list"] + account_uuid = vmware_account.get("account_reference", {}).get("uuid") + account_name = vmware_account.get("account_reference", {}).get("name") + + # reading vmware clusters associated with given account + VMWProvider = get_provider("VMWARE_VM") + VMWObj = VMWProvider.get_api_obj() + clusters = VMWObj.clusters(account_uuid) + + if not clusters: + LOG.error( + "Cannot find the cluster associated with account {} (uuid={})".format( + account_name, account_uuid + ) + ) + sys.exit("Cluster not found") + + cluster = clusters[0] + cluster_idx = -1 + for i, site in enumerate(ordered_site_list): + if site["infra_inclusion_list"]["cluster_references"][0]["name"] == cluster: + cluster_idx = i + break + if cluster_idx < 0: + LOG.error( + "Unable to find cluster {} in protection policy {}".format( + cluster, selected_policy_name + ) + ) + sys.exit("Cluster not found") + + default_rule_idx, i = 1, 1 + rule_choices = {} + label = "Snapshot no expiry. Target cluster: {}".format(cluster) + + for rule in selected_policy["resources"]["app_protection_rule_list"]: + source_cluster_idx = rule["first_availability_site_index"] + if source_cluster_idx == cluster_idx: + rule_choices[i] = {"label": label, "rule": rule} + i += 1 + + if not rule_choices: + LOG.error( + "No matching protection rules found under protection policy {}. Please add the rules using UI to continue".format( + selected_policy_name + ) + ) + sys.exit("No protection rules found") + click.echo("") + click.echo("Choose from given choices: ") + for choice in rule_choices.values(): + click.echo("\t{}".format(highlight_text(repr(choice["label"])))) + + selected_rule = click.prompt( + "Protection Rule for '{}' [{}]".format( + snapshot_config["name"], highlight_text(repr(default_rule_idx)) + ), + default=default_rule_idx, + show_default=False, + ) + if selected_rule not in rule_choices: + LOG.error("Invalid value '{}' for protection rule".format(selected_rule)) + sys.exit("Invalid protection rule") + return selected_policy, rule_choices[selected_rule]["rule"] + + def get_app(app_name): """ This routine checks if app with give name exists or not. @@ -1710,28 +1908,16 @@ def launch_blueprint_simple( ntnx_acc = next( (acc for acc in infra_list if acc["type"] == "nutanix_pc"), None ) - if not ntnx_acc: + vmware_acc = next( + (acc for acc in infra_list if acc["type"] == "vmware"), None + ) + + if not (ntnx_acc or vmware_acc): LOG.error( - "No nutanix account found associated with the environment" + "No nutanix/vmware account found associated with the environment" ) - sys.exit("No nutanix account found in environment") - ahv_new = AhvNew(client.connection) - filter_query = "_entity_id_=in={}".format( - "|".join(subnet["uuid"] for subnet in ntnx_acc["subnet_references"]) - ) - subnets = ahv_new.subnets( - filter_query=filter_query, - account_uuid=ntnx_acc["account_reference"]["uuid"], - ) - subnet_cluster_map = [ - { - "cluster_name": subnet["status"]["cluster_reference"]["name"], - "cluster_uuid": subnet["status"]["cluster_reference"]["uuid"], - "subnet_name": subnet["status"]["name"], - "subnet_uuid": subnet["metadata"]["uuid"], - } - for subnet in subnets["entities"] - ] + sys.exit("No nutanix/vmware account found in environment") + protection_policy = snapshot_config["value"]["attrs_list"][0][ "app_protection_policy_reference" ] @@ -1739,15 +1925,54 @@ def launch_blueprint_simple( "app_protection_rule_reference" ] - protection_policy, protection_rule = get_protection_policy_rule( - protection_policy, - protection_rule, - snapshot_config["uuid"], - app_profile, - protection_policies, - subnet_cluster_map, - substrate_list, - ) + if ntnx_acc: + ahv_new = AhvNew(client.connection) + filter_query = "_entity_id_=in={}".format( + "|".join( + subnet["uuid"] for subnet in ntnx_acc["subnet_references"] + ) + ) + subnets = ahv_new.subnets( + filter_query=filter_query, + account_uuid=ntnx_acc["account_reference"]["uuid"], + ) + subnet_cluster_map = [ + { + "cluster_name": subnet["status"]["cluster_reference"][ + "name" + ], + "cluster_uuid": subnet["status"]["cluster_reference"][ + "uuid" + ], + "subnet_name": subnet["status"]["name"], + "subnet_uuid": subnet["metadata"]["uuid"], + } + for subnet in subnets["entities"] + if subnet["status"].get("cluster_reference", {}) + ] + protection_policy, protection_rule = get_protection_policy_rule( + protection_policy, + protection_rule, + snapshot_config["uuid"], + app_profile, + protection_policies, + subnet_cluster_map, + substrate_list, + ) + + elif vmware_acc: + ( + protection_policy, + protection_rule, + ) = get_protection_policy_rule_vmware( + protection_policy, + protection_rule, + snapshot_config["uuid"], + app_profile, + protection_policies, + substrate_list, + vmware_acc, + ) snapshot_config_obj = next( ( @@ -1816,11 +2041,12 @@ def launch_blueprint_simple( response = res.json() launch_req_id = response["status"]["request_id"] - poll_launch_status(client, blueprint_uuid, launch_req_id) + return poll_launch_status(client, blueprint_uuid, launch_req_id) def poll_launch_status(client, blueprint_uuid, launch_req_id): # Poll every 10 seconds on the app status, for 5 mins + # Return True for sucess and False for failure, as watch option depends on success or failure of application create option maxWait = 5 * 60 count = 0 while count < maxWait: @@ -1845,16 +2071,19 @@ def poll_launch_status(client, blueprint_uuid, launch_req_id): pc_ip, pc_port, app_uuid ) ) + return True break elif app_state == "failure": LOG.debug("API response: {}".format(response)) LOG.error("Failed to launch blueprint. Check API response above.") + return False break elif err: raise Exception("[{}] - {}".format(err["code"], err["error"])) LOG.info(app_state) count += 10 time.sleep(10) + return False def delete_blueprint(blueprint_names): @@ -1955,3 +2184,67 @@ def patch_bp_if_required( return new_blueprint["metadata"]["name"], new_blueprint return blueprint_name, None + + +def filter_patch_config_tasks(bp_payload): + """ + Removes system tasks that are not required in decompiled patch configs + """ + + for profile in bp_payload.get("app_profile_list", []): + for patch_config in profile.get("patch_list", []): + rb_payload = patch_config.get("runbook", None) + if not rb_payload: + continue + + system_dag_task = [ + _task + for _task in rb_payload["task_definition_list"] + if _task["type"] == "DAG" + ][0] + + patch_meta_task = [ + _task + for _task in rb_payload["task_definition_list"] + if _task["type"] == "PATCH_META" + ][0] + + # stores names of user defined tasks. Used to filter out system tasks. + runbook_child_names = set() + runbook_edges = [] # stores edges of user defined tasks + runbook_tasks = [] # stores list of user defined tasks + dag_childs = ( + [] + ) # stores child task local reference list of user defined tasks + user_dag_target = ( + {} + ) # stores target of user dag that will be created. It will be same target as of user defined task. + + # system dag task contains reference of user defined tasks and patch meta task in + # it's edges. So, to extract user defined tasks we will exclude patch meta task. + for _edge in system_dag_task.get("attrs", {}).get("edges", []): + if _edge["from_task_reference"]["name"] != patch_meta_task["name"]: + runbook_edges.append(deepcopy(_edge)) + runbook_child_names.add(_edge["to_task_reference"]["name"]) + + for _task in rb_payload["task_definition_list"]: + if _task["name"] in runbook_child_names: + dag_childs.append({"kind": "app_task", "name": _task["name"]}) + runbook_tasks.append(_task) + # Target of user dag and user defined task will be same. + user_dag_target = _task["target_any_local_reference"] + + # updating runbook with user defined tasks if present + if runbook_tasks: + # using system dag task to create user defined task + user_dag = system_dag_task + user_dag["child_tasks_local_reference_list"] = dag_childs + user_dag["target_any_local_reference"] = user_dag_target + user_dag["attrs"]["edges"] = runbook_edges + runbook_tasks.insert(0, user_dag) + rb_payload["task_definition_list"] = runbook_tasks + + # case when user defined tasks are absent then there is no need to + # decompile runbook hence popping it out. + else: + patch_config.pop("runbook", "") diff --git a/calm/dsl/cli/constants.py b/calm/dsl/cli/constants.py index 5e1636cb..c319cc1d 100644 --- a/calm/dsl/cli/constants.py +++ b/calm/dsl/cli/constants.py @@ -204,6 +204,8 @@ class SOURCES: GLOBAL = "GLOBAL_STORE" LOCAL = "LOCAL" + WARN_MSG = "Projects associated with MPI should have accounts attached in blueprint for deployment" + class TASKS: class TASK_TYPES: diff --git a/calm/dsl/cli/environment_commands.py b/calm/dsl/cli/environment_commands.py index d3955d52..74128de6 100644 --- a/calm/dsl/cli/environment_commands.py +++ b/calm/dsl/cli/environment_commands.py @@ -1,12 +1,13 @@ import click -from .main import get, delete, create, update, compile +from .main import get, delete, create, update, compile, decompile from .environments import ( create_environment_from_dsl_file, get_environment_list, delete_environment, update_environment_from_dsl_file, compile_environment_command, + decompile_environment_command, ) from calm.dsl.log import get_logging_handle @@ -23,6 +24,7 @@ default=None, help="Filter environments by this string", ) +@click.argument("name", required=False) @click.option("--limit", "-l", default=20, help="Number of results to return") @click.option( "--offset", "-s", default=0, help="Offset results by the specified amount" @@ -81,14 +83,21 @@ def _delete_environment(environment_name, project_name, no_cache_update): default=False, help="if true, cache is not updated for project", ) -def _create_environment(env_file, env_name, project_name, no_cache_update): +@click.option( + "--force", + "-fc", + is_flag=True, + default=False, + help="Deletes existing environment with the same name before create, if entities are not associated with it.", +) +def _create_environment(env_file, env_name, project_name, no_cache_update, force): """ Creates a environment to existing project. """ if env_file.endswith(".py"): create_environment_from_dsl_file( - env_file, env_name, project_name, no_cache_update + env_file, env_name, project_name, no_cache_update, force ) else: LOG.error("Unknown file format {}".format(env_file)) @@ -149,3 +158,34 @@ def _compile_environment_command(env_file, project_name, out): """Compiles a DSL (Python) environment into JSON or YAML""" compile_environment_command(env_file, project_name, out) + + +@decompile.command("environment", experimental=True) +@click.option( + "--name", + "-n", + "name", + default=None, + help="Environment name", +) +@click.option( + "--file", + "-f", + "environment_file", + type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True), + help="Path to Environment file", +) +@click.option("--project", "-p", "project_name", help="Project name") +@click.option( + "--dir", + "-d", + "environment_dir", + default=None, + help="Environment directory location used for placing decompiled entities", +) +def _decompile_environment_command( + name, environment_file, project_name, environment_dir +): + """Decompiles environment present on server or json file""" + + decompile_environment_command(name, environment_file, project_name, environment_dir) diff --git a/calm/dsl/cli/environments.py b/calm/dsl/cli/environments.py index 66a7466f..71cd248c 100644 --- a/calm/dsl/cli/environments.py +++ b/calm/dsl/cli/environments.py @@ -2,6 +2,7 @@ import uuid import click import json +import os import time import arrow from prettytable import PrettyTable @@ -9,12 +10,22 @@ from calm.dsl.config import get_context from calm.dsl.api import get_api_client -from calm.dsl.builtins import create_environment_payload, Environment +from calm.dsl.builtins import ( + create_environment_payload, + Environment, + get_valid_identifier, + MetadataType, + CredentialType, +) from calm.dsl.builtins.models.helper.common import get_project +from calm.dsl.decompile.main import init_decompile_context +from calm.dsl.decompile.decompile_render import create_environment_dir +from calm.dsl.decompile.file_handler import get_environment_dir from calm.dsl.tools import get_module_from_file from calm.dsl.store import Cache from calm.dsl.constants import CACHE from calm.dsl.log import get_logging_handle +from calm.dsl.builtins.models.environment import EnvironmentType from .utils import ( get_name_query, @@ -31,6 +42,21 @@ def create_environment(env_payload): env_payload.pop("status", None) env_name = env_payload["spec"]["name"] + + # Adding uuid to creds + cred_name_uuid_map = {} + for cred in env_payload["spec"]["resources"].get("credential_definition_list", []): + cred["uuid"] = str(uuid.uuid4()) + cred_name_uuid_map[cred["name"]] = cred["uuid"] + + # Adding uuid readiness-probe + for sub in env_payload["spec"]["resources"].get("substrate_definition_list", []): + try: + cred_ref_obj = sub["readiness_probe"]["login_credential_local_reference"] + cred_ref_obj["uuid"] = cred_name_uuid_map[cred_ref_obj["name"]] + except Exception: + pass + LOG.info("Creating environment '{}'".format(env_name)) res, err = client.environment.create(env_payload) if err: @@ -206,7 +232,7 @@ def get_env_class_from_module(user_env_module): def create_environment_from_dsl_file( - env_file, env_name, project_name, no_cache_update=False + env_file, env_name, project_name, no_cache_update=False, force=False ): """ Helper creates an environment from dsl file (for calm_version >= 3.2) @@ -217,6 +243,31 @@ def create_environment_from_dsl_file( Returns: response (object): Response object containing environment object details """ + if force: + env_exist, res = is_environment_exist(env_name, project_name) + if env_exist: + entities = get_environments_usage(res["metadata"]["uuid"], project_name) + if entities: + click.echo(highlight_text("\n-------- Environments usage --------\n")) + for entity in entities: + click.echo( + highlight_text(list(entity.keys())[0]) + + ": " + + highlight_text(list(entity.values())[0]) + ) + LOG.error( + f"\nEnvironment with name {env_name} has entities associated with it, environment creation with same name cannot be forced.\n" + ) + sys.exit(-1) + else: + LOG.info( + f"Forcing the environment create with name {env_name} by deleting the existing environment with same name" + ) + delete_environment(env_name, project_name) + else: + LOG.info( + f"Environment with same name {env_name} does not exist in system, no need of forcing the environment create" + ) # Update project on context ContextObj = get_context() @@ -509,3 +560,143 @@ def delete_environment(environment_name, project_name, no_cache_update=False): LOG.info("Updating environments cache ...") Cache.delete_one(entity_type=CACHE.ENTITY.ENVIRONMENT, uuid=environment_id) LOG.info("[Done]") + + +def decompile_environment_command( + name, environment_file, project, environment_dir=None +): + """helper to decompile environment""" + if name and environment_file: + LOG.error( + "Please provide either environment file location or server environment name" + ) + sys.exit("Both environment name and file location provided.") + init_decompile_context() + + if name: + decompile_environment_from_server( + name=name, environment_dir=environment_dir, project=project + ) + + elif environment_file: + decompile_environment_from_file( + filename=environment_file, environment_dir=environment_dir + ) + else: + LOG.error( + "Please provide either environment file location or server environment name" + ) + sys.exit("Environment name or file location not provided.") + + +def decompile_environment_from_server(name, environment_dir, project): + """decompiles the environment by fetching it from server""" + + client = get_api_client() + environment = get_environment(name, project) + environment_uuid = environment["status"]["uuid"] + res, err = client.environment.read(environment_uuid) + if err: + LOG.error(err) + sys.exit("Not able to decompile environment from server.") + + environment = res.json() + _decompile_environment( + environment_payload=environment, environment_dir=environment_dir + ) + + +def decompile_environment_from_file(filename, environment_dir): + """decompile environment from local environment file""" + + environment_payload = json.loads(open(filename).read()) + _decompile_environment( + environment_payload=environment_payload, environment_dir=environment_dir + ) + + +def _decompile_environment(environment_payload, environment_dir): + """decompiles the environment from payload""" + + environment_name = environment_payload["status"].get("name", "DslEnvironment") + environment_description = environment_payload["status"].get("description", "") + + environment_metadata = environment_payload["metadata"] + # POP unnecessary keys + environment_metadata.pop("creation_time", None) + environment_metadata.pop("last_update_time", None) + + metadata_obj = MetadataType.decompile(environment_metadata) + + LOG.info("Decompiling environment {}".format(environment_name)) + environment_cls = EnvironmentType.decompile( + environment_payload["status"]["resources"] + ) + + credentials = environment_cls.credentials + + environment_cls.__name__ = get_valid_identifier(environment_name) + environment_cls.__doc__ = environment_description + + create_environment_dir( + environment_cls=environment_cls, + environment_dir=environment_dir, + metadata_obj=metadata_obj, + credentials=credentials, + ) + click.echo( + "\nSuccessfully decompiled. Directory location: {}. Environment location: {}".format( + highlight_text(get_environment_dir()), + highlight_text(os.path.join(get_environment_dir(), "environment.py")), + ) + ) + + +def is_environment_exist(env_name, project_name): + client = get_api_client() + payload = { + "length": 250, + "offset": 0, + "filter": "name=={}".format(env_name), + } + + if project_name: + project = get_project(project_name) + project_id = project["metadata"]["uuid"] + payload["filter"] += ";project_reference=={}".format(project_id) + + res, err = client.environment.list(payload) + if err: + raise Exception("[{}] - {}".format(err["code"], err["error"])) + + res = res.json() + if res["metadata"]["total_matches"] == 0: + return False, None + + return True, res["entities"][0] + + +def get_environments_usage(env_uuid, project_name): + filter = {"filter": {"environment_reference_list": [env_uuid]}} + client = get_api_client() + project_name_uuid_map = client.project.get_name_uuid_map() + project_id = project_name_uuid_map.get(project_name) + res, err = client.project.usage(project_id, filter) + if err: + LOG.error(err) + sys.exit(-1) + + entities = [] + + def collect_entities(usage): + for entity_name, count in usage.items(): + if entity_name not in ["environment", "marketplace_item"]: + if isinstance(count, dict): + collect_entities(count) + continue + if count > 0: + entities.append({entity_name: count}) + + res = res.json() + collect_entities(res["status"]["usage"]) + return entities diff --git a/calm/dsl/cli/library_tasks_commands.py b/calm/dsl/cli/library_tasks_commands.py index 7e41b3e6..2c122fc0 100644 --- a/calm/dsl/cli/library_tasks_commands.py +++ b/calm/dsl/cli/library_tasks_commands.py @@ -82,7 +82,7 @@ def _delete_task(task_names): "task_file", type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True), required=True, - help="Path of task file (.json, .sh, .escript, .escript.py2, .escript.py3, .ps1)", + help="Path of task file (.sh, .escript, .escript.py2, .escript.py3, .ps1)", ) @click.option("--name", "-n", default=None, help="Task Library item name (Optional)") @click.option( @@ -93,7 +93,7 @@ def _delete_task(task_names): "-v", "out_vars", default=None, - help="Set-variable output variables coma seperated (,) (Optional)", + help="Set-variable output variables comma seperated (,) (Optional)", ) @click.option( "--force", diff --git a/calm/dsl/cli/main.py b/calm/dsl/cli/main.py index 1a76dbb2..ac124a51 100644 --- a/calm/dsl/cli/main.py +++ b/calm/dsl/cli/main.py @@ -2,6 +2,7 @@ import click import json import copy +import os import click_completion import click_completion.core @@ -10,15 +11,19 @@ # TODO - move providers to separate file from calm.dsl.providers import get_provider, get_provider_types -from calm.dsl.api import get_api_client, get_resource_api +from calm.dsl.api import get_api_client, get_resource_api, reset_api_client_handle from calm.dsl.log import get_logging_handle -from calm.dsl.config import get_context +from calm.dsl.config import get_context, get_default_config_file +from calm.dsl.config.env_config import EnvConfig from calm.dsl.store import Cache from calm.dsl.constants import DSL_CONFIG +from calm.dsl.builtins.models.utils import set_compile_secrets_flag from .version_validator import validate_version from .click_options import simple_verbosity_option, show_trace_option from .utils import FeatureFlagGroup, highlight_text +from calm.dsl.store import Version +from calm.dsl.config.init_config import get_init_config_handle CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) @@ -69,13 +74,42 @@ def main(ctx, config_file, sync): ctx.ensure_object(dict) ctx.obj["verbose"] = True try: - validate_version() + ContextObj = get_context() + old_pc_ip = Version.get_version_data("PC").get("pc_ip", "") + if config_file: + if not os.path.exists(config_file): + raise ValueError("file not found {}".format(config_file)) + + if ctx.invoked_subcommand == "init": + raise ValueError("config file passing is not supported in init command") + + ContextObj.update_config_file_context(config_file=config_file) + + if ctx.invoked_subcommand != "init": + server_config = ContextObj.get_server_config() + + if old_pc_ip != server_config.get("pc_ip", ""): + LOG.warning("Host IP changed.") + + if not sync: + LOG.warning( + "Cache is outdated. Please pass `-s/--sync` if command fails." + ) + + # reset api client handle, so current context will be used while syncing version cache. + reset_api_client_handle() + Version.sync() # sync version cache so that correct version validation happens. + + # While initializing DSL version may not be present in cache, even if we validate version in this case then + # it will be version of previous context. Therefore, skipping version validation while initializing. + validate_version() + except Exception: LOG.debug("Could not validate version") pass - if config_file: - ContextObj = get_context() - ContextObj.update_config_file_context(config_file=config_file) + + # This is added to ensure non compile commands has secrets in the dictionary. + set_compile_secrets_flag(True) ContextObj = get_context() project_config = ContextObj.get_project_config() @@ -108,7 +142,7 @@ def validate(): "-t", "provider_type", type=click.Choice(get_provider_types()), - default="AHV_VM", + default=None, help="Provider type", ) def validate_provider_spec(spec_file, provider_type): @@ -117,6 +151,33 @@ def validate_provider_spec(spec_file, provider_type): with open(spec_file) as f: spec = yaml.safe_load(f.read()) + if provider_type == None: + spec_type = spec.get("type", None) + recommended_type = "AHV_VM" + + if spec_type == "PROVISION_AWS_VM": + recommended_type = "AWS_VM" + elif spec_type == "PROVISION_AZURE_VM": + recommended_type = "AZURE_VM" + elif spec_type == "PROVISION_GCP_VM": + recommended_type = "GCP_VM" + elif spec_type == "PROVISION_VMWARE_VM": + recommended_type = "VMWARE_VM" + + if spec_type == None: + LOG.warning( + "You haven't chosen a provider type, so we'll proceed with '{}'.".format( + recommended_type + ) + ) + else: + LOG.warning( + "You haven't chosen a provider type, and it should be '{}' according to your spec file, so we'll proceed with that.".format( + recommended_type + ) + ) + provider_type = recommended_type + try: Provider = get_provider(provider_type) Provider.validate_spec(spec) @@ -302,7 +363,9 @@ def format(): @main.group(cls=FeatureFlagGroup) def compile(): """Compile blueprint to json / yaml""" - pass + + # Setting this to make sure during compile secrets are not printed + set_compile_secrets_flag(EnvConfig.is_compile_secret()) @main.group(cls=FeatureFlagGroup) diff --git a/calm/dsl/cli/marketplace.py b/calm/dsl/cli/marketplace.py index 35f97936..64cb46ac 100644 --- a/calm/dsl/cli/marketplace.py +++ b/calm/dsl/cli/marketplace.py @@ -68,7 +68,6 @@ def get_group_data_value(data_list, field, value_list=False): entity_value = entity["values"] if not entity_value: return None - return ( entity_value[0]["values"] if value_list @@ -347,6 +346,28 @@ def get_mpi_latest_version(name, app_source=None, app_states=[], type=None): return entity_version +def get_mpi_all_versions(name, app_source=None, app_states=[], type=None): + + res = get_mpis_group_call( + name=name, + app_states=app_states, + group_member_count=20, + app_source=app_source, + type=type, + ) + group_results = res["group_results"] + + if not group_results: + LOG.error("No Marketplace Item found with name {}".format(name)) + sys.exit(-1) + + entity_results = group_results[0]["entity_results"] + all_versions = [] + for i in range(0, len(entity_results)): + all_versions.append(get_group_data_value(entity_results[i]["data"], "version")) + return all_versions + + def get_mpi_by_name_n_version(name, version, app_states=[], app_source=None, type=None): """ It will fetch marketplace item with particular version. @@ -806,6 +827,8 @@ def publish_bp_to_marketplace_manager( app_group_uuid=None, icon_name=None, icon_file=None, + projects=[], + all_projects=False, ): client = get_api_client() @@ -878,6 +901,33 @@ def publish_bp_to_marketplace_manager( } ] + if not bp_template["spec"]["resources"].get("project_reference_list", {}): + bp_template["spec"]["resources"]["project_reference_list"] = [] + + project_name_uuid_map = client.project.get_name_uuid_map(params={"length": 250}) + if all_projects: + for k, v in project_name_uuid_map.items(): + bp_template["spec"]["resources"]["project_reference_list"].append( + { + "kind": "project", + "name": k, + "uuid": v, + } + ) + + else: + for _project in projects: + bp_template["spec"]["resources"]["project_reference_list"].append( + { + "kind": "project", + "name": _project, + "uuid": project_name_uuid_map[_project], + } + ) + + if bp_template["spec"]["resources"].get("project_reference_list"): + LOG.warning(MARKETPLACE_ITEM.WARN_MSG) + res, err = client.market_place.create(bp_template) LOG.debug("Api response: {}".format(res.json())) if err: @@ -932,6 +982,8 @@ def publish_bp_as_new_marketplace_bp( with_secrets=with_secrets, icon_name=icon_name, icon_file=icon_file, + projects=projects, + all_projects=all_projects, ) if publish_to_marketplace or auto_approve: @@ -1032,6 +1084,8 @@ def publish_bp_as_existing_marketplace_bp( app_group_uuid=app_group_uuid, icon_name=icon_name, icon_file=icon_file, + projects=projects, + all_projects=all_projects, ) if publish_to_marketplace or auto_approve: @@ -1063,6 +1117,7 @@ def approve_marketplace_item( category=None, all_projects=False, type=None, + remove_projects=[], ): client = get_api_client() @@ -1143,6 +1198,26 @@ def approve_marketplace_item( "uuid": project_name_uuid_map[_project], } ) + for _project in remove_projects: + project_valid = False + for index, project_detail in enumerate( + item_data["spec"]["resources"]["project_reference_list"] + ): + if _project == project_detail["name"]: + item_data["spec"]["resources"]["project_reference_list"].pop(index) + project_valid = True + break + + # Validating the project association with MPI + if not project_valid: + LOG.error( + "Project {} is not associated to Marketplace Item {}".format( + _project, name + ) + ) + + if item_type == MARKETPLACE_ITEM.TYPES.BLUEPRINT: + LOG.warning(MARKETPLACE_ITEM.WARN_MSG) res, err = client.market_place.update(uuid=item_uuid, payload=item_data) if err: @@ -1272,6 +1347,7 @@ def update_marketplace_item( projects=[], description=None, app_source=None, + all_projects=None, type=None, ): """ @@ -1314,23 +1390,41 @@ def update_marketplace_item( item_data["metadata"]["categories"] = {"AppFamily": category} - if projects: - # Clear all stored projects + if projects or all_projects: + # Clear the stored projects item_data["spec"]["resources"]["project_reference_list"] = [] - for project in projects: - project_data = get_project(project) + project_name_uuid_map = client.project.get_name_uuid_map(params={"length": 250}) - item_data["spec"]["resources"]["project_reference_list"].append( - { - "kind": "project", - "name": project, - "uuid": project_data["metadata"]["uuid"], - } - ) + if all_projects: + for k, v in project_name_uuid_map.items(): + item_data["spec"]["resources"]["project_reference_list"].append( + { + "kind": "project", + "name": k, + "uuid": v, + } + ) + else: + for _project in projects: + item_data["spec"]["resources"]["project_reference_list"].append( + { + "kind": "project", + "name": _project, + "uuid": project_name_uuid_map[_project], + } + ) if description: item_data["spec"]["description"] = description + item_type = MARKETPLACE_ITEM.TYPES.BLUEPRINT + CALM_VERSION = Version.get_version("Calm") + if LV(CALM_VERSION) >= LV("3.2.0"): + item_type = mpi_data["status"]["resources"]["type"] + + if item_type == MARKETPLACE_ITEM.TYPES.BLUEPRINT: + LOG.warning(MARKETPLACE_ITEM.WARN_MSG) + res, err = client.market_place.update(uuid=item_uuid, payload=item_data) if err: LOG.error("[{}] - {}".format(err["code"], err["error"])) @@ -1443,10 +1537,25 @@ def reject_marketplace_item(name, version, type=None): ) -def unpublish_marketplace_item(name, version, app_source=None, type=None): +def unpublish_marketplace_item( + name, version, app_source=None, projects=None, all_versions=None, type=None +): client = get_api_client() - if not version: + versions = [] + if version: + versions.append(version) + elif all_versions: + # Fecth all versions + all_versions = get_mpi_all_versions( + name=name, + app_states=[MARKETPLACE_ITEM.STATES.PUBLISHED], + app_source=app_source, + type=type, + ) + versions.extend(all_versions) + LOG.info(versions) + elif not version and not all_versions: # Search for published items, only those can be unpublished LOG.info( "Fetching latest version of published Marketplace Item {} ".format(name) @@ -1457,45 +1566,86 @@ def unpublish_marketplace_item(name, version, app_source=None, type=None): app_source=app_source, type=type, ) - LOG.info(version) + versions.append(version) - LOG.info( - "Fetching details of published marketplace item {} with version {}".format( - name, version + for version in versions: + LOG.info( + "Fetching details of published marketplace item {} with version {}".format( + name, version + ) ) - ) - item = get_mpi_by_name_n_version( - name=name, - version=version, - app_states=[MARKETPLACE_ITEM.STATES.PUBLISHED], - app_source=app_source, - type=type, - ) - item_uuid = item["metadata"]["uuid"] - - res, err = client.market_place.read(item_uuid) - if err: - LOG.error("[{}] - {}".format(err["code"], err["error"])) - sys.exit(-1) + item = get_mpi_by_name_n_version( + name=name, + version=version, + app_states=[MARKETPLACE_ITEM.STATES.PUBLISHED], + app_source=app_source, + type=type, + ) + item_uuid = item["metadata"]["uuid"] - item_data = res.json() - item_data.pop("status", None) - item_data["api_version"] = "3.0" - item_data["spec"]["resources"]["app_state"] = MARKETPLACE_ITEM.STATES.ACCEPTED + res, err = client.market_place.read(item_uuid) + if err: + LOG.error("[{}] - {}".format(err["code"], err["error"])) + sys.exit(-1) - res, err = client.market_place.update(uuid=item_uuid, payload=item_data) - if err: - LOG.error("[{}] - {}".format(err["code"], err["error"])) - sys.exit(-1) + item_data = res.json() + item_data.pop("status", None) + item_data["api_version"] = "3.0" + if projects: + project_name_uuid_map = client.project.get_name_uuid_map( + params={"length": 250} + ) + for project in projects: + # Validating the given projects + if project not in project_name_uuid_map: + LOG.error( + "Project {} does not exist in system to unpublish from MPI".format( + project + ) + ) + sys.exit(-1) + + project_valid = False + for index, project_detail in enumerate( + item_data["spec"]["resources"]["project_reference_list"] + ): + if project == project_detail["name"]: + item_data["spec"]["resources"]["project_reference_list"].pop( + index + ) + project_valid = True + break + + # Validating the project association with MPI + if not project_valid: + LOG.error( + "Project {} is not associated with MPI {} to unpublish".format( + project, name + ) + ) + sys.exit(-1) + else: + item_data["spec"]["resources"][ + "app_state" + ] = MARKETPLACE_ITEM.STATES.ACCEPTED - LOG.info( - "Marketplace Item {} with version {} is unpublished successfully".format( - name, version + res, err = client.market_place.update(uuid=item_uuid, payload=item_data) + if err: + LOG.error("[{}] - {}".format(err["code"], err["error"])) + sys.exit(-1) + additional_log = ( + "from projects {}".format(", ".join(projects)) if projects else "" + ) + LOG.info( + "Marketplace Item {} with version {} is unpublished successfully {}".format( + name, version, additional_log + ) ) - ) -def unpublish_marketplace_bp(name, version, app_source=None): +def unpublish_marketplace_bp( + name, version, app_source=None, projects=None, all_versions=None +): """unpublishes marketplace blueprint""" if not version: @@ -1534,11 +1684,13 @@ def unpublish_marketplace_bp(name, version, app_source=None): "Marketplace blueprint {} with version {} not found".format(name, version) ) sys.exit(-1) - + version = None if all_versions else version unpublish_marketplace_item( name=name, version=version, app_source=app_source, + projects=projects, + all_versions=all_versions, type=MARKETPLACE_ITEM.TYPES.BLUEPRINT, ) @@ -1553,6 +1705,8 @@ def publish_runbook_to_marketplace_manager( app_group_uuid=None, icon_name=None, icon_file=None, + projects=[], + all_projects=False, ): client = get_api_client() @@ -1606,6 +1760,30 @@ def publish_runbook_to_marketplace_manager( } ] + if not mpi_spec["spec"]["resources"].get("project_reference_list", {}): + mpi_spec["spec"]["resources"]["project_reference_list"] = [] + + project_name_uuid_map = client.project.get_name_uuid_map(params={"length": 250}) + if all_projects: + for k, v in project_name_uuid_map.items(): + mpi_spec["spec"]["resources"]["project_reference_list"].append( + { + "kind": "project", + "name": k, + "uuid": v, + } + ) + + else: + for _project in projects: + mpi_spec["spec"]["resources"]["project_reference_list"].append( + { + "kind": "project", + "name": _project, + "uuid": project_name_uuid_map[_project], + } + ) + res, err = client.market_place.create(mpi_spec) LOG.debug("Api response: {}".format(res.json())) if err: @@ -1628,6 +1806,7 @@ def publish_runbook_as_new_marketplace_item( category=None, icon_name=None, icon_file=None, + all_projects=False, ): # Search whether this marketplace item exists or not @@ -1660,6 +1839,8 @@ def publish_runbook_as_new_marketplace_item( with_endpoints=with_endpoints, icon_name=icon_name, icon_file=icon_file, + projects=projects, + all_projects=all_projects, ) if publish_to_marketplace or auto_approve: @@ -1673,6 +1854,7 @@ def publish_runbook_as_new_marketplace_item( version=version, projects=projects, category=category, + all_projects=all_projects, ) if publish_to_marketplace: @@ -1696,6 +1878,7 @@ def publish_runbook_as_existing_marketplace_item( category=None, icon_name=None, icon_file=None, + all_projects=False, ): LOG.info( @@ -1757,9 +1940,12 @@ def publish_runbook_as_existing_marketplace_item( version=version, description=description, with_secrets=with_secrets, + with_endpoints=with_endpoints, app_group_uuid=app_group_uuid, icon_name=icon_name, icon_file=icon_file, + projects=projects, + all_projects=all_projects, ) if publish_to_marketplace or auto_approve: @@ -1773,6 +1959,7 @@ def publish_runbook_as_existing_marketplace_item( version=version, projects=projects, category=category, + all_projects=all_projects, ) if publish_to_marketplace: diff --git a/calm/dsl/cli/marketplace_bp_commands.py b/calm/dsl/cli/marketplace_bp_commands.py index 4ea5a1b9..203aff7d 100644 --- a/calm/dsl/cli/marketplace_bp_commands.py +++ b/calm/dsl/cli/marketplace_bp_commands.py @@ -418,7 +418,14 @@ def publish_bp( "-p", "projects", multiple=True, - help="Projects for marketplace blueprint", + help="Add projects to marketplace blueprint", +) +@click.option( + "--remove-project", + "-rp", + "remove_projects", + multiple=True, + help="Remove projects from marketplace blueprint", ) @click.option( "--all_projects", @@ -427,7 +434,7 @@ def publish_bp( default=False, help="Approve bp to all projects", ) -def approve_bp(name, version, category, all_projects, projects=[]): +def approve_bp(name, version, category, all_projects, projects=[], remove_projects=[]): """Approves a marketplace manager blueprint""" approve_marketplace_item( @@ -437,6 +444,7 @@ def approve_bp(name, version, category, all_projects, projects=[]): category=category, all_projects=all_projects, type=MARKETPLACE_ITEM.TYPES.BLUEPRINT, + remove_projects=remove_projects, ) @@ -504,7 +512,16 @@ def _publish_marketplace_bp(name, version, category, source, all_projects, proje type=click.Choice(APP_SOURCES), help="App Source for marketplace blueprint", ) -def _update_marketplace_bp(name, version, category, projects, description, source): +@click.option( + "--all_projects", + "-ap", + is_flag=True, + default=False, + help="Update marketplace blueprints with all projects", +) +def _update_marketplace_bp( + name, version, category, projects, description, source, all_projects +): """Update a marketplace manager blueprint""" update_marketplace_item( @@ -514,6 +531,7 @@ def _update_marketplace_bp(name, version, category, projects, description, sourc projects=projects, description=description, app_source=source, + all_projects=all_projects, type=MARKETPLACE_ITEM.TYPES.BLUEPRINT, ) @@ -565,7 +583,7 @@ def _reject_marketplace_bp(name, version): @marketplace_unpublish.command("bp") @click.argument("name") @click.option( - "--version", "-v", required=True, help="Version of marketplace blueprint" + "--version", "-v", default=None, help="Version of marketplace blueprint" ) # Required to prevent unwanted unpublish of unknown mpi @click.option( "--source", @@ -574,7 +592,27 @@ def _reject_marketplace_bp(name, version): type=click.Choice(APP_SOURCES), help="App Source of marketplace blueprint", ) -def _unpublish_marketplace_bp(name, version, source): +@click.option( + "--project", + "-p", + "projects", + multiple=True, + help="Unpublishes bp from specific project", +) +@click.option( + "--all_versions", + "-av", + is_flag=True, + default=False, + help="Unpublishes bp from all version", +) +def _unpublish_marketplace_bp(name, version, source, all_versions, projects=[]): """Unpublish marketplace store blueprint""" - unpublish_marketplace_bp(name=name, version=version, app_source=source) + unpublish_marketplace_bp( + name=name, + version=version, + app_source=source, + projects=projects, + all_versions=all_versions, + ) diff --git a/calm/dsl/cli/marketplace_runbook_commands.py b/calm/dsl/cli/marketplace_runbook_commands.py index cb84c1a8..8d57c1a0 100644 --- a/calm/dsl/cli/marketplace_runbook_commands.py +++ b/calm/dsl/cli/marketplace_runbook_commands.py @@ -125,7 +125,14 @@ def _describe_marketplace_runbook(name, out, version, source, app_state): "-p", "projects", multiple=True, - help="Projects for marketplace runbook", + help="Add projects to marketplace runbook", +) +@click.option( + "--remove-project", + "-rp", + "remove_projects", + multiple=True, + help="Remove projects from marketplace runbook", ) @click.option( "--all_projects", @@ -134,7 +141,9 @@ def _describe_marketplace_runbook(name, out, version, source, app_state): default=False, help="Approve runbook to all runbook", ) -def approve_runbook(name, version, category, all_projects, projects=[]): +def approve_runbook( + name, version, category, all_projects, projects=[], remove_projects=[] +): """Approves a marketplace manager runbook""" approve_marketplace_item( @@ -144,6 +153,7 @@ def approve_runbook(name, version, category, all_projects, projects=[]): category=category, all_projects=all_projects, type=MARKETPLACE_ITEM.TYPES.RUNBOOK, + remove_projects=remove_projects, ) @@ -209,7 +219,16 @@ def _publish_marketplace_runbook( type=click.Choice(APP_SOURCES), help="App Source for marketplace runbook", ) -def _update_marketplace_runbook(name, version, category, projects, description, source): +@click.option( + "--all_projects", + "-ap", + is_flag=True, + default=False, + help="Update marketplace runbook with all projects", +) +def _update_marketplace_runbook( + name, version, category, projects, description, source, all_projects +): """Update a marketplace manager runbook""" update_marketplace_item( @@ -219,6 +238,7 @@ def _update_marketplace_runbook(name, version, category, projects, description, projects=projects, description=description, app_source=source, + all_projects=all_projects, type=MARKETPLACE_ITEM.TYPES.RUNBOOK, ) @@ -289,7 +309,7 @@ def _reject_marketplace_runbook(name, version): help="Preserve endpoints publishing runbooks to marketplace", ) @click.option( - "--existing_markeplace_runbook", + "--existing_marketplace_runbook", "-e", is_flag=True, default=False, @@ -333,6 +353,13 @@ def _reject_marketplace_runbook(name, version): @click.option( "--icon_name", "-i", default=None, help="App icon name for marketplace runbook" ) +@click.option( + "--all_projects", + "-ap", + is_flag=True, + default=False, + help="Publishes runbook to all projects", +) def publish_runbook( runbook_name, name, @@ -340,13 +367,14 @@ def publish_runbook( description, with_secrets, with_endpoints, - existing_markeplace_runbook, + existing_marketplace_runbook, publish_to_marketplace, projects=[], category=None, auto_approve=False, icon_name=False, icon_file=None, + all_projects=False, ): """Publish a runbook to marketplace manager""" @@ -354,19 +382,21 @@ def publish_runbook( # Using runbook name as the marketplace runbook name if no name provided name = runbook_name - if not existing_markeplace_runbook: + if not existing_marketplace_runbook: publish_runbook_as_new_marketplace_item( runbook_name=runbook_name, marketplace_item_name=name, version=version, description=description, with_secrets=with_secrets, + with_endpoints=with_endpoints, publish_to_marketplace=publish_to_marketplace, projects=projects, category=category, auto_approve=auto_approve, icon_name=icon_name, icon_file=icon_file, + all_projects=all_projects, ) else: @@ -376,12 +406,14 @@ def publish_runbook( version=version, description=description, with_secrets=with_secrets, + with_endpoints=with_endpoints, publish_to_marketplace=publish_to_marketplace, projects=projects, category=category, auto_approve=auto_approve, icon_name=icon_name, icon_file=icon_file, + all_projects=all_projects, ) diff --git a/calm/dsl/cli/network_group.py b/calm/dsl/cli/network_group.py index 63499813..64a3d5a8 100644 --- a/calm/dsl/cli/network_group.py +++ b/calm/dsl/cli/network_group.py @@ -631,12 +631,17 @@ def get_network_group_by_name( return network_group_json -def create_network_group_tunnel_vm(tunnel_vm_payload, tunnel_name): +def reset_network_group_tunnel_vm(tunnel_vm_payload, tunnel_name, delete_old_app=True): client = get_api_client() network_group_json = get_network_group_by_tunnel_name(tunnel_name) + # keep app_uuid for cleanup + app_uuid = ( + network_group_json.get("status", {}).get("resources", {}).get("app_uuid", "") + ) + network_group_uuid = network_group_json.get("metadata", {}).get("uuid") # Update tunnel reference in tunnel_vm_payload @@ -661,6 +666,19 @@ def create_network_group_tunnel_vm(tunnel_vm_payload, tunnel_name): watch_tunnel_creation(network_group_json, create_response) + LOG.info("Tunnel VM created successfully") + + if delete_old_app and app_uuid: + res, err = client.application.delete(app_uuid) + if err: + LOG.error("Failed deleting old app after tunnel reset.") + + LOG.info( + "Triggered delete of old app with uuid {0}, previously used for tunnel".format( + app_uuid + ) + ) + def create_network_group_tunnel(payload): @@ -789,7 +807,7 @@ def create_network_group_tunnel_vm_from_dsl( ) LOG.debug("Payload: {}".format(network_group_tunnel_vm_payload)) - network_group_json = create_network_group_tunnel_vm( + network_group_json = reset_network_group_tunnel_vm( network_group_tunnel_vm_payload, network_group_tunnel_name ) diff --git a/calm/dsl/cli/network_group_commands.py b/calm/dsl/cli/network_group_commands.py index 6933968a..cf1ddac6 100644 --- a/calm/dsl/cli/network_group_commands.py +++ b/calm/dsl/cli/network_group_commands.py @@ -157,7 +157,10 @@ def _delete_network_group_tunnel(network_group_tunnel_names): def _reset_network_group_tunnel_vm( network_group_tunnel_vm_file, network_group_tunnel_name ): - """Deploy a new Tunnel VM for a Network Group Tunnel""" + """ + Deploy a new Tunnel VM for a Network Group Tunnel. + Note: Orphan app will be deleted post reset. + """ if network_group_tunnel_vm_file.endswith(".py"): create_network_group_tunnel_vm_from_dsl( diff --git a/calm/dsl/cli/project_commands.py b/calm/dsl/cli/project_commands.py index 7553a515..ff7a4d3c 100644 --- a/calm/dsl/cli/project_commands.py +++ b/calm/dsl/cli/project_commands.py @@ -9,8 +9,9 @@ delete_project, update_project_from_dsl, update_project_using_cli_switches, + decompile_project_command, ) -from .main import create, get, update, delete, describe, compile +from .main import create, get, update, delete, describe, compile, decompile from calm.dsl.log import get_logging_handle LOG = get_logging_handle(__name__) @@ -64,6 +65,27 @@ def _compile_project_command(project_file, out): compile_project_command(project_file, out) +@decompile.command("project", experimental=True) +@click.argument("name", required=False) +@click.option( + "--file", + "-f", + "project_file", + type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True), + help="Path to Project file", +) +@click.option( + "--dir", + "-d", + "project_dir", + default=None, + help="Project directory location used for placing decompiled entities", +) +def _decompile_project_command(name, project_file, project_dir): + """Decompiles project present on server or json file""" + decompile_project_command(name, project_file, project_dir) + + @create.command("project") @click.option( "--file", @@ -86,12 +108,19 @@ def _compile_project_command(project_file, out): default=False, help="if true, cache is not updated for project", ) -def _create_project(project_file, project_name, description, no_cache_update): +@click.option( + "--force", + "-fc", + is_flag=True, + default=False, + help="Deletes existing project with the same name before create, if entities are not associated with it.", +) +def _create_project(project_file, project_name, description, no_cache_update, force): """Creates a project""" if project_file.endswith(".py"): create_project_from_dsl( - project_file, project_name, description, no_cache_update + project_file, project_name, description, no_cache_update, force ) else: LOG.error("Unknown file format") diff --git a/calm/dsl/cli/projects.py b/calm/dsl/cli/projects.py index 1dfe0404..f2d355ea 100644 --- a/calm/dsl/cli/projects.py +++ b/calm/dsl/cli/projects.py @@ -1,4 +1,5 @@ from inspect import getargs +import os import time import click import arrow @@ -19,14 +20,23 @@ from calm.dsl.log import get_logging_handle from calm.dsl.providers import get_provider from calm.dsl.builtins.models.helper.common import get_project -from calm.dsl.cli.quotas import ( +from calm.dsl.builtins import ( + get_valid_identifier, + MetadataType, +) +from calm.dsl.decompile.main import init_decompile_context +from calm.dsl.decompile.decompile_render import create_project_dir +from calm.dsl.decompile.file_handler import get_project_dir +from calm.dsl.builtins.models.helper.quotas import ( _set_quota_state, get_quota_uuid_at_project, create_quota_at_project, set_quota_at_project, + read_quota_resources, ) from calm.dsl.store import Cache, Version from calm.dsl.constants import CACHE, PROJECT_TASK, QUOTA +from calm.dsl.builtins.models.project import ProjectType LOG = get_logging_handle(__name__) @@ -391,7 +401,7 @@ def update_project(project_uuid, project_payload): def create_project_from_dsl( - project_file, project_name, description="", no_cache_update=False + project_file, project_name, description="", no_cache_update=False, force=False ): """Steps: 1. Creation of project without env @@ -400,6 +410,34 @@ def create_project_from_dsl( """ client = get_api_client() + if force: + project_name_uuid_map = client.project.get_name_uuid_map() + project_id = project_name_uuid_map.get(project_name) + if project_id: + entities = get_projects_usage(project_name) + if entities: + click.echo(highlight_text("\n-------- Projects usage --------\n")) + for entity in entities: + click.echo( + highlight_text(list(entity.keys())[0]) + + ": " + + highlight_text(list(entity.values())[0]) + ) + click.echo( + highlight_text( + f"\nProject with name {project_name} has entities associated with it, project creation with same name cannot be forced.\n" + ) + ) + sys.exit(-1) + else: + LOG.info( + f"Forcing the project create with name {project_name} by deleting the existing project with same name" + ) + delete_project([project_name]) + else: + LOG.info( + f"Project with same name {project_name} does not exist in system, no need of forcing the project create" + ) user_project_module = get_project_module_from_file(project_file) UserProject = get_project_class_from_module(user_project_module) @@ -692,17 +730,30 @@ def describe_project(project_name, out): if not accounts: click.echo(highlight_text("No provider's account registered")) - quota_resources = project_resources.get("resource_domain", {}).get("resources", []) + project_uuid = project["metadata"]["uuid"] + quota_entities = {"project": project_uuid} + + context_obj = get_context() + policy_config = context_obj.get_policy_config() + + # Project level quota values are migrated to Quota API when policy engine is enabled + quota_resources = {} + if policy_config.get("policy_status", "False") == "False": + LOG.info("No Quota Values fetched as policy engine is disabled.") + + # Reading project level quota from quota api if policy engine is enabled + else: + quota_resources = read_quota_resources(client, project_name, quota_entities) + if quota_resources: click.echo("\nQuotas: \n-------") - for qr in quota_resources: - qk = qr["resource_type"] - qv = qr["limit"] - if qr["units"] == "BYTES": + for qk, qv in quota_resources.items(): + qv = qv if qv != -1 else "NA" + if qv != "NA" and qk in QUOTA.RESOURCES_WITH_BYTES_UNIT: qv = qv // 1073741824 qv = str(qv) + " (GiB)" - click.echo("\t{} : {}".format(qk, highlight_text(qv))) + click.echo("\t{} : {}".format(QUOTA.RESOURCES[qk], highlight_text(qv))) def delete_project(project_names, no_cache_update=False): @@ -1503,3 +1554,90 @@ def get_project_usage_payload(project_payload, old_project_payload): } return project_usage_payload + + +def decompile_project_command(name, project_file, project_dir=None): + """helper to decompile project""" + if name and project_file: + LOG.error("Please provide either project file location or server project name") + sys.exit("Both project name and file location provided.") + init_decompile_context() + + if name: + decompile_project_from_server(name=name, project_dir=project_dir) + + elif project_file: + decompile_project_from_file(filename=project_file, project_dir=project_dir) + else: + LOG.error("Please provide either project file location or server project name") + sys.exit("Project name or file location not provided.") + + +def decompile_project_from_server(name, project_dir): + """decompiles the project by fetching it from server""" + + client = get_api_client() + + LOG.info("Fetching project '{}' details".format(name)) + project = get_project(name) + + _decompile_project(project_payload=project, project_dir=project_dir) + + +def decompile_project_from_file(filename, project_dir): + """decompile project from local project file""" + + project_payload = json.loads(open(filename).read()) + _decompile_project(project_payload=project_payload, project_dir=project_dir) + + +def _decompile_project(project_payload, project_dir): + """decompiles the project from payload""" + + try: + project_name = project_payload["status"].get("name", "DslProject") + except: + LOG.debug("Failed to get default project name.") + project_description = project_payload["status"].get("description", "") + + LOG.info("Decompiling project {}".format(project_name)) + project_cls = ProjectType.decompile(project_payload) + + project_cls.__name__ = get_valid_identifier(project_name) + project_cls.__doc__ = project_description + + create_project_dir( + project_cls=project_cls, + project_dir=project_dir, + ) + click.echo( + "\nSuccessfully decompiled. Directory location: {}. Project location: {}".format( + highlight_text(get_project_dir()), + highlight_text(os.path.join(get_project_dir(), "project.py")), + ) + ) + + +def get_projects_usage(project_name, filter={"filter": {}}): + client = get_api_client() + project_name_uuid_map = client.project.get_name_uuid_map() + project_id = project_name_uuid_map.get(project_name) + res, err = client.project.usage(project_id, filter) + if err: + LOG.error(err) + sys.exit(-1) + + entities = [] + + def collect_entities(usage): + for entity_name, count in usage.items(): + if entity_name not in ["environment", "marketplace_item"]: + if isinstance(count, dict): + collect_entities(count) + continue + if count > 0: + entities.append({entity_name: count}) + + res = res.json() + collect_entities(res["status"]["usage"]) + return entities diff --git a/calm/dsl/cli/scheduler.py b/calm/dsl/cli/scheduler.py index c1d62b0d..d32780ff 100644 --- a/calm/dsl/cli/scheduler.py +++ b/calm/dsl/cli/scheduler.py @@ -376,6 +376,7 @@ def describe_job_command(job_name, out): if expiry_time == "": click.echo("Ends: {}".format(highlight_text("Never"))) else: + expiry_time = int(expiry_time) past = arrow.get(expiry_time).humanize() click.echo( "Ends On: {} ({})".format( diff --git a/calm/dsl/constants.py b/calm/dsl/constants.py index 89b5ac6c..2969e61e 100644 --- a/calm/dsl/constants.py +++ b/calm/dsl/constants.py @@ -27,6 +27,7 @@ class ENTITY: POLICY_EVENT = "policy_event" POLICY_ACTION_TYPE = "policy_action_type" POLICY_ATTRIBUTES = "policy_attributes" + PROTECTION_POLICY = "app_protection_policy" API_ENTITY_KIND_MAP = { "cluster": ENTITY.AHV_CLUSTER, @@ -172,6 +173,14 @@ class TYPE: class AHV: VLAN_1211 = "vlan1211" + NAME = { + TYPE.AHV: "Nutanix", + TYPE.VMWARE: "Vmware", + TYPE.AWS: "Aws", + TYPE.AZURE: "Azure", + TYPE.GCP: "Gcp", + } + class QUOTA(object): class STATE(object): @@ -183,6 +192,10 @@ class ENTITY(object): CLUSTER = "cluster" PROJECT = "project" + RESOURCES = {"disk": "STORAGE", "vcpu": "VCPUS", "memory": "MEMORY"} + + RESOURCES_WITH_BYTES_UNIT = ["disk", "memory"] + class ENTITY: "Entity constants" @@ -203,3 +216,77 @@ class OPENAPI_TYPE: class DSL_CONFIG: EMPTY_PROJECT_NAME = "-" EMPTY_PROJECT_MESSAGE = "Project configuration not available. Use command `calm set config -pj ` to set it." + + +class SUBSTRATE: + POWER_ON = "action_poweron" + POWER_OFF = "action_poweroff" + RESTART = "action_restart" + CHECK_LOGIN = "action_check_login" + VM_POWER_ACTIONS = { + "__vm_power_on__": POWER_ON, + "__vm_power_off__": POWER_OFF, + "__vm_restart__": RESTART, + "__vm_check_login__": CHECK_LOGIN, + } + VM_POWER_ACTIONS_REV = dict((v, k) for k, v in VM_POWER_ACTIONS.items()) + POWER_ACTION_CAMEL_CASE = { + POWER_ON: "PowerOn", + POWER_OFF: "PowerOff", + RESTART: "Restart", + } + + +class TASKS: + class TASK_TYPES: + GENERIC_OPERATION = "GENERIC_OPERATION" + VMOPERATION_NUTANIX = "VMOPERATION_NUTANIX" + VMOPERATION_VCENTER = "VMOPERATION_VCENTER" + VMOPERATION_AWS_VM = "VMOPERATION_AWS_VM" + VMOPERATION_AZURE_VM = "VMOPERATION_AZURE_VM" + VMOPERATION_GCP_VM = "VMOPERATION_GCP_VM" + PROVISION_NUTANIX = "PROVISION_NUTANIX" + PROVISION_VCENTER = "PROVISION_VCENTER" + ROVISION_AWS_VM = "PROVISION_AWS_VM" + PROVISION_GCP_VM = "PROVISION_GCP_VM" + + UPDATE_NUTANIX = "UPDATE_NUTANIX" + CHECK_LOGIN = "CHECK_LOGIN" + + VM_OPERATION = { + PROVIDER.TYPE.AHV: VMOPERATION_NUTANIX, + PROVIDER.TYPE.VMWARE: VMOPERATION_VCENTER, + PROVIDER.TYPE.AWS: VMOPERATION_AWS_VM, + PROVIDER.TYPE.AZURE: VMOPERATION_AZURE_VM, + PROVIDER.TYPE.GCP: VMOPERATION_GCP_VM, + } + + +class READINESS_PROBE: + ADDRESS = { + PROVIDER.TYPE.AHV: "@@{platform.status.resources.nic_list[0].ip_endpoint_list[0].ip}@@", + PROVIDER.TYPE.VMWARE: "@@{platform.ipAddressList[0]}@@", + PROVIDER.TYPE.AWS: "@@{public_ip_address}@@", + PROVIDER.TYPE.AZURE: "@@{platform.publicIPAddressList[0]}@@", + PROVIDER.TYPE.GCP: "@@{platform.networkInterfaces[0].accessConfigs[0].natIP}@@", + } + + +class CONFIG_TYPE: + class SNAPSHOT: + AHV = "AHV_SNAPSHOT" + VMWARE = "VMWARE_SNAPSHOT" + TYPE = [AHV, VMWARE] + + class RESTORE: + AHV = "AHV_RESTORE" + VMWARE = "VMWARE_RESTORE" + TYPE = [AHV, VMWARE] + + CONFIG_TYPE_MAP = { + "AHV_VM_snapshot": SNAPSHOT.AHV, + "VMWARE_VM_snapshot": SNAPSHOT.VMWARE, + "AHV_VM_restore": RESTORE.AHV, + "VMWARE_VM_restore": RESTORE.VMWARE, + "patch": "PATCH", + } diff --git a/calm/dsl/db/table_config.py b/calm/dsl/db/table_config.py index cb44ec50..f38e5240 100644 --- a/calm/dsl/db/table_config.py +++ b/calm/dsl/db/table_config.py @@ -83,6 +83,22 @@ def get_detail_dict(self): ) ) + @classmethod + def get_detail_dict_list(cls, qeury_obj): + """ + This helper returns multiple matching instance for a given query + Args: + query_obj (pewee.ModelSelect object): containing multiple matching instance + object for a query + Returns: + entity_details (list): list of dict containing each entity data fetched from database + """ + raise NotImplementedError( + "'get_detail_dict_list' helper not implemented for {} table".format( + cls.get_cache_type() + ) + ) + @classmethod def get_provider_plugin(self, provider_type="AHV_VM"): """returns the provider plugin""" @@ -2885,7 +2901,7 @@ class Meta: class AppProtectionPolicyCache(CacheTableBase): - __cache_type__ = "app_protection_policy" + __cache_type__ = CACHE.ENTITY.PROTECTION_POLICY feature_min_version = "3.3.0" is_policy_required = False name = CharField() @@ -2909,6 +2925,13 @@ def get_detail_dict(self, *args, **kwargs): "last_update_time": self.last_update_time, } + @classmethod + def get_detail_dict_list(cls, query_obj, *args, **kwargs): + entity_details = [] + for entity in query_obj: + entity_details.append(entity.get_detail_dict()) + return entity_details + @classmethod def clear(cls): """removes entire data from table""" @@ -3039,6 +3062,15 @@ def get_entity_data(cls, name, **kwargs): except DoesNotExist: return None + @classmethod + def get_entity_data_using_uuid(cls, uuid, **kwargs): + try: + query_obj = super().select().where(cls.uuid == uuid) + return cls.get_detail_dict_list(query_obj) + + except DoesNotExist: + return dict() + class Meta: database = dsl_database primary_key = CompositeKey("name", "uuid", "rule_uuid") @@ -4684,10 +4716,27 @@ class Meta: class VersionTable(BaseModel): name = CharField() version = CharField() + pc_ip = CharField() last_update_time = DateTimeField(default=datetime.datetime.now()) + @classmethod + def clear(cls): + """removes entire data from table""" + for db_entity in cls.select(): + db_entity.delete_instance() + + @classmethod + def get_entity_data(cls, name): + query_obj = {"name": name} + + try: + entity = super().get(**query_obj) + return entity.get_detail_dict() + except DoesNotExist: + return dict() + def get_detail_dict(self): - return {"name": self.name, "version": self.version} + return {"name": self.name, "pc_ip": self.pc_ip, "version": self.version} def highlight_text(text, **kwargs): diff --git a/calm/dsl/decompile/action.py b/calm/dsl/decompile/action.py index c2ac9834..5058dd1d 100644 --- a/calm/dsl/decompile/action.py +++ b/calm/dsl/decompile/action.py @@ -4,16 +4,25 @@ from calm.dsl.decompile.task import render_task_template from calm.dsl.decompile.parallel_task import render_parallel_task_template from calm.dsl.decompile.task_tree import render_task_tree_template +from calm.dsl.decompile.endpoint import render_endpoint from calm.dsl.decompile.variable import render_variable_template from calm.dsl.builtins import action, ActionType +from calm.dsl.constants import SUBSTRATE from calm.dsl.log import get_logging_handle +from calm.dsl.decompile.ref_dependency import get_power_action_substrate_map LOG = get_logging_handle(__name__) RUNBOOK_ACTION_MAP = {} def render_action_template( - cls, entity_context="", CONFIG_SPEC_MAP={}, context="", secrets_dict=[] + cls, + entity_context="", + CONFIG_SPEC_MAP={}, + context="", + secrets_dict=[], + endpoints=[], + ep_list=[], ): global RUNBOOK_ACTION_MAP @@ -64,14 +73,52 @@ def render_action_template( ) ) - if not (variables or tasks): + # not returning vm power actions, even if they don't have tasks, to include in + # substrate class after decompilation this is required to give valid reference + # to custom actions which use them in profile/service level + if not (variables or tasks) and ( + cls.name not in list(SUBSTRATE.VM_POWER_ACTIONS.keys()) + ): return "" + """ + Brief: + 1. yields following in substrate class if any custom action uses power actions: + def __vm_power_on__(): + pass + 2. yields "" if no custom action uses power actions. + + Detail: + Only vm power actions skip previous check to include empty definition power actions + in substrate class. e.g. + def __vm_power_on__(): + pass + This is necessary to give valid reference to custom actions using these power + actions in profile/service level. But if there are no custom actions using + power actions in any level then we don't need to include empty definition power actions. + Therefore, returning without making power actions definition in this case by using + get_power_action_substrate_map() which returns empty dict if there are no custom actions using + power actions. + """ + if cls.name in list(SUBSTRATE.VM_POWER_ACTIONS.keys()) and ( + not get_power_action_substrate_map() + ): + return "" + + # get rendered endpoints to be rendered by blueprint + for ind, task in enumerate(runbook.tasks): + ep = task.exec_target_reference + if ep: + if ep.name in ep_list: + continue + endpoints.append(render_endpoint(ep)) + ep_list.append(ep.name) user_attrs = { "name": cls.__name__, "description": cls.__doc__ or "", "tasks": tasks, "variables": variables, + "endpoints": endpoints, } gui_display_name = getattr(cls, "name", "") or cls.__name__ diff --git a/calm/dsl/decompile/ahv_vm_nic.py b/calm/dsl/decompile/ahv_vm_nic.py index 9a786a86..0004ae35 100644 --- a/calm/dsl/decompile/ahv_vm_nic.py +++ b/calm/dsl/decompile/ahv_vm_nic.py @@ -20,6 +20,11 @@ def render_ahv_vm_nic(cls): network_function_nic_type = cls.network_function_nic_type user_attrs = {} + ip_endpoints = [] + for ip in cls.ip_endpoint_list: + ip_endpoints.append((ip.get_dict()["ip"])) + if ip_endpoints: + user_attrs["ip_endpoint_list"] = ip_endpoints subnet_uuid = subnet_ref.get("uuid", "") if subnet_uuid.startswith("@@{") and subnet_uuid.endswith("}@@"): user_attrs["subnet_name"] = subnet_uuid diff --git a/calm/dsl/decompile/ahv_vm_resources.py b/calm/dsl/decompile/ahv_vm_resources.py index 49f5f0b3..624d784e 100644 --- a/calm/dsl/decompile/ahv_vm_resources.py +++ b/calm/dsl/decompile/ahv_vm_resources.py @@ -46,5 +46,9 @@ def render_ahv_vm_resources(cls, boot_config, vm_name_prefix=""): cls.guest_customization, vm_name_prefix=vm_name_prefix ) + user_attrs["boot_type"] = "LEGACY" # default boot type is legacy + if user_attrs.get("boot_config", {}): + user_attrs["boot_type"] = user_attrs["boot_config"].get("boot_type", None) + text = render_template(schema_file="ahv_vm_resources.py.jinja2", obj=user_attrs) return text.strip() diff --git a/calm/dsl/decompile/bp_file_helper.py b/calm/dsl/decompile/bp_file_helper.py index acda165e..be770698 100644 --- a/calm/dsl/decompile/bp_file_helper.py +++ b/calm/dsl/decompile/bp_file_helper.py @@ -1,4 +1,6 @@ import click +import sys +import traceback import os from io import StringIO import json @@ -18,11 +20,14 @@ from calm.dsl.decompile.blueprint import render_blueprint_template from calm.dsl.decompile.metadata import render_metadata_template from calm.dsl.decompile.variable import get_secret_variable_files +from calm.dsl.decompile.ref_dependency import update_entity_gui_dsl_name from calm.dsl.decompile.file_handler import get_local_dir from calm.dsl.builtins import BlueprintType, ServiceType, PackageType from calm.dsl.builtins import DeploymentType, ProfileType, SubstrateType from calm.dsl.builtins import get_valid_identifier from calm.dsl.log import get_logging_handle +from calm.dsl.builtins import ConfigAttrs +from calm.dsl.decompile.config_spec import render_config_attr_template LOG = get_logging_handle(__name__) @@ -44,7 +49,9 @@ def render_bp_file_template( user_attrs["description"] = cls.__doc__ secrets_dict = [] - + # endpoints contains rendered endpoints, and ep_list contains the names in a list to avoid duplication + endpoints = [] + ep_list = [] # Find default cred default_cred = cls.default_cred default_cred_name = getattr(default_cred, "name", "") or getattr( @@ -112,6 +119,10 @@ def render_bp_file_template( deployments.extend(profile.deployments) for dep in deployments: add_edges(entity_edges, dep.get_ref().name, profile.get_ref().name) + for patch_config_attr in profile.patch_list: + entity_name_text_map[ + get_valid_identifier(patch_config_attr.patch_attrs[0].__name__) + ] = patch_config_attr.patch_attrs[0] for deployment in deployments: entity_name_text_map[deployment.get_ref().name] = deployment @@ -142,10 +153,10 @@ def render_bp_file_template( if contains_encrypted_secrets: try: secret_val = cred_file_dict[file_name] - except Exception: - import pdb - - pdb.set_trace() + except Exception as exp: + LOG.debug("Got traceback\n{}".format(traceback.format_exc())) + LOG.error("Secret value not found due to {}".format(exp)) + sys.exit(-1) else: secret_val = click.prompt( "\nValue for {}".format(file_name), @@ -160,23 +171,70 @@ def render_bp_file_template( dependepent_entities = [] dependepent_entities = get_ordered_entities(entity_name_text_map, entity_edges) + # Constructing map of patch attribute class name to update config name + patch_attr_update_config_map = {} + for k, v in enumerate(dependepent_entities): + if isinstance(v, ProfileType): + if not v.patch_list: + continue + for update_config in v.patch_list: + patch_attr_name = update_config.patch_attrs[0].__name__ + update_config_name = get_valid_identifier(update_config.__name__) + patch_attr_update_config_map[patch_attr_name] = update_config_name + + # Constructing reverse map of above + update_config_patch_attr_map = dict( + (v, k) for k, v in patch_attr_update_config_map.items() + ) + + # Setting dsl class and gui display name of entity in beginning. + # Case: when vm power actions are used in service level then dsl class name of substrate is needed. + # As service class is rendered before substrate we need to explicitly create substrate ui dsl map initially. + # This will help in targetting correct substrate to vm power actions + # TODO move all gui to dsl class mapping to entity.py + for k, v in enumerate(dependepent_entities): + update_entity_gui_dsl_name(v.get_gui_name(), v.__name__) + # Rendering templates for k, v in enumerate(dependepent_entities): if isinstance(v, ServiceType): - dependepent_entities[k] = render_service_template(v, secrets_dict) + dependepent_entities[k] = render_service_template( + v, secrets_dict, endpoints=endpoints, ep_list=ep_list + ) + + elif isinstance(v, ConfigAttrs): + dependepent_entities[k] = render_config_attr_template( + v, + patch_attr_update_config_map, + secrets_dict, + endpoints=endpoints, + ep_list=ep_list, + ) elif isinstance(v, PackageType): - dependepent_entities[k] = render_package_template(v, secrets_dict) + dependepent_entities[k] = render_package_template( + v, secrets_dict, endpoints=endpoints, ep_list=ep_list + ) elif isinstance(v, ProfileType): - dependepent_entities[k] = render_profile_template(v, secrets_dict) + dependepent_entities[k] = render_profile_template( + v, + update_config_patch_attr_map, + secrets_dict, + endpoints=endpoints, + ep_list=ep_list, + ) elif isinstance(v, DeploymentType): dependepent_entities[k] = render_deployment_template(v) elif isinstance(v, SubstrateType): dependepent_entities[k] = render_substrate_template( - v, vm_images=vm_images, secrets_dict=secrets_dict + v, + vm_images=vm_images, + secrets_dict=secrets_dict, + endpoints=endpoints, + ep_list=ep_list, ) is_any_secret_value_available = False @@ -191,7 +249,7 @@ def render_bp_file_template( blueprint = render_blueprint_template(cls) - # Rendere blueprint metadata + # Render blueprint metadata metadata_str = render_metadata_template(metadata_obj) user_attrs.update( @@ -203,6 +261,7 @@ def render_bp_file_template( "blueprint": blueprint, "metadata": metadata_str, "contains_encrypted_secrets": contains_encrypted_secrets, + "endpoints": endpoints, } ) diff --git a/calm/dsl/decompile/config_spec.py b/calm/dsl/decompile/config_spec.py index e8ceab85..f47ef8f7 100644 --- a/calm/dsl/decompile/config_spec.py +++ b/calm/dsl/decompile/config_spec.py @@ -1,9 +1,20 @@ +import re +import json + from calm.dsl.decompile.render import render_template from calm.dsl.builtins import ConfigSpecType, get_valid_identifier +from calm.dsl.constants import PROVIDER from calm.dsl.log import get_logging_handle +from calm.dsl.builtins import ConfigAttrs +from calm.dsl.decompile.action import render_action_template +from calm.dsl.decompile.ahv_vm_disk import render_ahv_vm_disk +from calm.dsl.decompile.ahv_vm_nic import render_ahv_vm_nic +from calm.dsl.decompile.ref_dependency import get_entity_gui_dsl_name LOG = get_logging_handle(__name__) +CONFIG_SPEC_MAP = {} + def render_restore_config_template(cls, entity_context): LOG.debug("Rendering {} restore config template".format(cls.__name__)) @@ -14,10 +25,22 @@ def render_restore_config_template(cls, entity_context): user_attrs = dict() user_attrs["name"] = _user_attrs["name"] or cls.__name__ attrs = _user_attrs["attrs_list"][0] - user_attrs["target"] = get_valid_identifier( - attrs["target_any_local_reference"]["name"] - ) - user_attrs["delete_vm_post_restore"] = attrs["delete_vm_post_restore"] + user_attrs["target"] = attrs["target_any_local_reference"]["name"] + + # Mapping target to it's corresponding dsl class + user_attrs["target"] = get_entity_gui_dsl_name(user_attrs["target"]) + + user_attrs["description"] = attrs.get("snapshot_description", "") + user_attrs["delete_vm_post_restore"] = attrs.get("delete_vm_post_restore", None) + + if _user_attrs["type"] == "AHV_RESTORE": + user_attrs["provider"] = "Ahv" + user_attrs["delete_vm_post_restore"] = attrs["delete_vm_post_restore"] + elif _user_attrs["type"] == "VMWARE_RESTORE": + user_attrs["provider"] = "Vmware" + else: + LOG.warning("Given snapshot type not supported for decompilation") + text = render_template(schema_file="restore_config.py.jinja2", obj=user_attrs) return text.strip() @@ -34,15 +57,184 @@ def render_snapshot_config_template(cls, entity_context, CONFIG_SPEC_MAP): _user_attrs["config_references"][0].name ]["local_name"] attrs = _user_attrs["attrs_list"][0] - user_attrs["target"] = get_valid_identifier( - attrs["target_any_local_reference"]["name"] - ) + user_attrs["target"] = attrs["target_any_local_reference"]["name"] + + # Mapping target to it's corresponding dsl class + user_attrs["target"] = get_entity_gui_dsl_name(user_attrs["target"]) + user_attrs["num_of_replicas"] = attrs["num_of_replicas"] + user_attrs["description"] = attrs.get("snapshot_description", "") + user_attrs["snapshot_location_type"] = attrs.get("snapshot_location_type", None) + + if _user_attrs["type"] == "AHV_SNAPSHOT": + user_attrs["provider"] = "Ahv" + elif _user_attrs["type"] == "VMWARE_SNAPSHOT": + user_attrs["provider"] = "Vmware" + else: + LOG.warning("Given snapshot type not supported for decompilation") - # TODO fix App Protection policy model, decompilation is wrong and in compilation also metadata project is not considered - # if attrs.get("app_protection_policy_reference", {}).get("name", {}): - # user_attrs["policy"] = attrs["app_protection_policy_reference"]["name"] - # if attrs.get("app_protection_rule_reference", {}).get("name", {}): - # user_attrs["rule"] = attrs["app_protection_rule_reference"]["name"] + if attrs.get("app_protection_policy_reference", {}).get("name", {}): + user_attrs["policy"] = attrs["app_protection_policy_reference"]["name"] + if attrs.get("app_protection_rule_reference", {}).get("name", {}): + user_attrs["rule"] = attrs["app_protection_rule_reference"]["name"] text = render_template(schema_file="snapshot_config.py.jinja2", obj=user_attrs) return text.strip() + + +def render_patch_field_ahv_nic(cls): + LOG.debug("Rendering patch field ahv nic template") + + _user_attrs = cls.get_user_attrs() + nic_value = cls.nic_value + + if cls.operation == "add": + _user_attrs["nic_data"] = render_ahv_vm_nic(nic_value) + + text = render_template(schema_file="patch_field_ahv_nic.py.jinja2", obj=_user_attrs) + return text.strip() + + +def render_patch_field_ahv_disk(cls): + LOG.debug("Rendering patch field ahv disk template") + + _user_attrs = cls.get_user_attrs() + disk_value = cls.disk_value + + if cls.disk_operation == "add": + _user_attrs["disk_data"] = render_ahv_vm_disk(disk_value, {}) + + # converting values from miB to giB + _user_attrs["value"] = ( + str(int(_user_attrs["value"]) // 1024) if _user_attrs["value"] else "" + ) + _user_attrs["max_val"] = ( + str(int(_user_attrs["max_value"]) // 1024) if _user_attrs["max_value"] else "" + ) + _user_attrs["min_val"] = ( + str(int(_user_attrs["min_value"]) // 1024) if _user_attrs["min_value"] else "" + ) + + text = render_template( + schema_file="patch_field_ahv_disk.py.jinja2", obj=_user_attrs + ) + return text.strip() + + +def render_patch_field_category_template(cls): + LOG.debug("Rendering patch field category template") + category_value = cls["value"] + + # convert category_value string ('TemplateType:Vm') to dictionary {'TemplateType': 'Vm'} using regex + pattern = r"(\w+):(\w+)" + category_value = re.sub(pattern, r'{"\1": "\2"}', category_value) + cls["value"] = json.loads(category_value) + + text = render_template(schema_file="patch_field_category.py.jinja2", obj=cls) + return text.strip() + + +def render_update_config_template(cls, patch_attr_name): + LOG.debug("Rendering {} patch config template".format(cls.__name__)) + if not isinstance(cls, ConfigSpecType): + raise TypeError("{} is not of type {}".format(cls, ConfigSpecType)) + + _user_attrs = cls.get_user_attrs() + user_attrs = dict() + user_attrs["name"] = _user_attrs["name"] or cls.__name__ + attrs = _user_attrs["attrs_list"][0] + user_attrs["target"] = attrs["target_any_local_reference"]["name"] + + # Mapping target to it's corresponding dsl class + user_attrs["target"] = get_entity_gui_dsl_name(user_attrs["target"]) + user_attrs["patch_attr"] = patch_attr_name + + text = render_template(schema_file="update_config.py.jinja2", obj=user_attrs) + return text.strip() + + +def render_config_attr_template( + cls, patch_attr_update_config_map, secrets_dict=[], endpoints=[], ep_list=[] +): + + LOG.debug("Rendering {} Update Config Attr template".format(cls.__name__)) + if not isinstance(cls, ConfigAttrs): + raise TypeError("{} is not of type {}".format(cls, ConfigAttrs)) + + # Entity context + entity_context = "UpdateConfigAttr_" + cls.__name__ + context = "update_config_attr." + (getattr(cls, "name", "") or cls.__name__) + "." + + user_attrs = cls.get_user_attrs() + user_attrs["name"] = get_valid_identifier( + patch_attr_update_config_map[cls.__name__] + "_Update" + cls.__name__ + ) + user_attrs["description"] = cls.__doc__ or "" + user_attrs["disk_delete"] = cls.disk_delete + user_attrs["categories_delete"] = cls.categories_delete + user_attrs["nic_delete"] = cls.nic_delete + user_attrs["categories_add"] = cls.categories_add + memory_obj = user_attrs.get("memory", None) + + if memory_obj: + # max_value, min_value are strictly of type int + memory_obj.max_value = int(memory_obj.max_value / 1024) + memory_obj.min_value = int(memory_obj.min_value / 1024) + user_attrs["memory"] = memory_obj.get_dict() + + # Converting memory values from mib to GiB + value = user_attrs["memory"]["value"] + + if value: + value = int(value) / 1024.0 + # Handling case with trailing zero decimal. Converts 2.0 to "2", 2.5 to "2.5" + user_attrs["memory"]["value"] = ( + str(int(value)) if value.is_integer() else str(value) + ) + + vcpu_obj = user_attrs.get("vcpu", None) + if vcpu_obj: + user_attrs["vcpu"] = vcpu_obj.get_dict() + + numsocket_obj = user_attrs.get("numsocket", None) + if numsocket_obj: + user_attrs["numsocket"] = numsocket_obj.get_dict() + + category_list = [] + for _, entity in enumerate(user_attrs.get("categories", [])): + if entity: + category_list.append(render_patch_field_category_template(entity)) + + disk_list = [] + for _, entity in enumerate(user_attrs.get("disks", [])): + if entity: + disk_list.append(render_patch_field_ahv_disk(entity)) + + nic_list = [] + for _, entity in enumerate(user_attrs.get("nics", [])): + if entity: + nic_list.append(render_patch_field_ahv_nic(entity)) + + action_list = [] + for action in user_attrs.get("actions", []): + action_list.append( + render_action_template( + action, + entity_context, + CONFIG_SPEC_MAP, + secrets_dict=secrets_dict, + context=context, + endpoints=endpoints, + ep_list=ep_list, + ) + ) + + user_attrs["actions"] = action_list + user_attrs["category_list"] = ", ".join( + category for category in category_list if category + ) + user_attrs["disk_list"] = ", ".join(disk_list) + user_attrs["nic_list"] = ", ".join(nic for nic in nic_list if nic) + + text = render_template("update_config_attr.py.jinja2", obj=user_attrs) + + return text.strip() diff --git a/calm/dsl/decompile/decompile_render.py b/calm/dsl/decompile/decompile_render.py index 197bd75a..1415068a 100644 --- a/calm/dsl/decompile/decompile_render.py +++ b/calm/dsl/decompile/decompile_render.py @@ -4,7 +4,14 @@ from calm.dsl.log import get_logging_handle from calm.dsl.decompile.bp_file_helper import render_bp_file_template from calm.dsl.decompile.runbook import render_runbook_template -from calm.dsl.decompile.file_handler import init_bp_dir, init_runbook_dir +from calm.dsl.decompile.file_handler import ( + init_bp_dir, + init_runbook_dir, + init_environment_dir, + init_project_dir, +) +from calm.dsl.decompile.environments import render_environment_template +from calm.dsl.decompile.projects import render_project_template LOG = get_logging_handle(__name__) @@ -23,6 +30,20 @@ def create_runbook_file(dir_name, runbook_data): fd.write(runbook_data) +def create_project_file(dir_name, project_data): + + project_path = os.path.join(dir_name, "project.py") + with open(project_path, "w") as fd: + fd.write(project_data) + + +def create_environment_file(dir_name, environment_data): + + environment_path = os.path.join(dir_name, "environment.py") + with open(environment_path, "w") as fd: + fd.write(environment_data) + + def create_bp_dir( bp_cls=None, bp_dir=None, @@ -73,3 +94,49 @@ def create_runbook_dir( runbook_data = format_str(runbook_data, mode=FileMode()) LOG.info("Creating runbook file") create_runbook_file(runbook_dir, runbook_data) + + +def create_project_dir( + project_cls=None, + project_dir=None, + credentials=None, +): + if not project_dir: + project_dir = os.path.join(os.getcwd(), project_cls.__name__) + + LOG.info("Creating project directory") + _, _, _, _ = init_project_dir(project_dir) + LOG.info("Rendering project file template") + project_data = render_project_template( + project_cls=project_cls, + credentials=credentials, + ) + + LOG.info("Formatting project file using black") + project_data = format_str(project_data, mode=FileMode()) + LOG.info("Creating project file") + create_project_file(project_dir, project_data) + + +def create_environment_dir( + environment_cls=None, + environment_dir=None, + metadata_obj=None, + credentials=None, +): + if not environment_dir: + environment_dir = os.path.join(os.getcwd(), environment_cls.__name__) + + LOG.info("Creating environment directory") + _, _, _, _ = init_environment_dir(environment_dir) + LOG.info("Rendering environment file template") + environment_data = render_environment_template( + environment_cls=environment_cls, + credentials=credentials, + metadata_obj=metadata_obj, + ) + + LOG.info("Formatting environment file using black") + environment_data = format_str(environment_data, mode=FileMode()) + LOG.info("Creating environment file") + create_environment_file(environment_dir, environment_data) diff --git a/calm/dsl/decompile/environments.py b/calm/dsl/decompile/environments.py new file mode 100644 index 00000000..94d9e89d --- /dev/null +++ b/calm/dsl/decompile/environments.py @@ -0,0 +1,94 @@ +from calm.dsl.decompile.render import render_template +from calm.dsl.decompile.ref import render_ref_template +from calm.dsl.decompile.provider import render_provider_template +from calm.dsl.decompile.substrate import render_substrate_template +from calm.dsl.builtins.models.environment import EnvironmentType +from .decompile_helpers import process_variable_name +from calm.dsl.decompile.variable import get_secret_variable_files +from calm.dsl.builtins import get_valid_identifier + +from calm.dsl.decompile.credential import ( + render_credential_template, + get_cred_files, + get_cred_var_name, +) + +from calm.dsl.log import get_logging_handle + +LOG = get_logging_handle(__name__) + + +def render_environment_template( + environment_cls, + metadata_obj=None, + entity_context="", + CONFIG_SPEC_MAP={}, + credentials=[], +): + LOG.debug("Rendering {} environment template".format(environment_cls.__name__)) + if not isinstance(environment_cls, EnvironmentType): + raise TypeError("{} is not of type {}".format(environment_cls, environment)) + + # Update entity context + entity_context = entity_context + "_Environment_" + environment_cls.__name__ + + environment_name = getattr(environment_cls, "name", "") or environment_cls.__name__ + + rendered_credential_list = [] + credentials_list = [] + for cred in credentials: + rendered_credential_list.append(render_credential_template(cred)) + credentials_list.append(get_cred_var_name(cred.name)) + + # Getting the local files used for secrets + secret_files = get_secret_variable_files() + secret_files.extend(get_cred_files()) + + class_name = "ENV_{}".format(get_valid_identifier(environment_cls.__name__)) + + user_attrs = { + "name": class_name, + "credentials": rendered_credential_list, + "credentials_list": credentials_list, + "secret_files": secret_files, + } + + rendered_substrates_list = [] + + # holds substrate class names to include in decompiled 'Environment' class + substrates_list = [] + + substrate_name_counter = 1 + + if environment_cls.substrates: + for substrate in environment_cls.substrates: + if substrate.__name__ in substrates_list: + new_name = "{}_{}".format(substrate.name, str(substrate_name_counter)) + substrate.__name__ = get_valid_identifier( + new_name + ) # creating valid python class name for substrate + rendered_substrates_list.append(render_substrate_template(substrate)) + substrates_list.append(substrate.__name__) + substrate_name_counter += 1 + else: + substrate.__name__ = get_valid_identifier( + substrate.name + ) # creating valid python class name for substrate + rendered_substrates_list.append(render_substrate_template(substrate)) + substrates_list.append(substrate.__name__) + + user_attrs["substrates"] = rendered_substrates_list + user_attrs["substrates_list"] = substrates_list + + rendered_providers_list = [] + if environment_cls.providers: + for provider in environment_cls.providers: + rendered_providers_list.append(render_provider_template(provider)) + user_attrs["providers"] = rendered_providers_list + + gui_display_name = getattr(environment_cls, "name", "") or environment_cls.__name__ + if gui_display_name != environment_cls.__name__: + user_attrs["gui_display_name"] = gui_display_name + + text = render_template(schema_file="environments.py.jinja2", obj=user_attrs) + return text.strip() diff --git a/calm/dsl/decompile/file_handler.py b/calm/dsl/decompile/file_handler.py index ef277d02..a70492ae 100644 --- a/calm/dsl/decompile/file_handler.py +++ b/calm/dsl/decompile/file_handler.py @@ -46,6 +46,46 @@ def make_runbook_dirs(runbook_dir): return (runbook_dir, local_dir, scripts_dir) +def make_project_dirs(project_dir): + + if not os.path.isdir(project_dir): + os.makedirs(project_dir) + + local_dir = os.path.join(project_dir, LOCAL_DIR_KEY) + if not os.path.isdir(local_dir): + os.makedirs(local_dir) + + spec_dir = os.path.join(project_dir, SPECS_DIR_KEY) + if not os.path.isdir(spec_dir): + os.makedirs(spec_dir) + + scripts_dir = os.path.join(project_dir, SCRIPTS_DIR_KEY) + if not os.path.isdir(scripts_dir): + os.makedirs(scripts_dir) + + return (project_dir, local_dir, spec_dir, scripts_dir) + + +def make_environment_dirs(environment_dir): + + if not os.path.isdir(environment_dir): + os.makedirs(environment_dir) + + local_dir = os.path.join(environment_dir, LOCAL_DIR_KEY) + if not os.path.isdir(local_dir): + os.makedirs(local_dir) + + spec_dir = os.path.join(environment_dir, SPECS_DIR_KEY) + if not os.path.isdir(spec_dir): + os.makedirs(spec_dir) + + scripts_dir = os.path.join(environment_dir, SCRIPTS_DIR_KEY) + if not os.path.isdir(scripts_dir): + os.makedirs(scripts_dir) + + return (environment_dir, local_dir, spec_dir, scripts_dir) + + def init_bp_dir(bp_dir): global LOCAL_DIR, SCRIPTS_DIR, SPECS_DIR, BP_DIR @@ -62,6 +102,24 @@ def init_runbook_dir(runbook_dir): return (RUNBOOK_DIR, LOCAL_DIR, SCRIPTS_DIR) +def init_project_dir(project_dir): + + global LOCAL_DIR, SCRIPTS_DIR, SPECS_DIR, PROJECT_DIR + PROJECT_DIR, LOCAL_DIR, SPECS_DIR, SCRIPTS_DIR = make_project_dirs(project_dir) + + return (PROJECT_DIR, LOCAL_DIR, SPECS_DIR, SCRIPTS_DIR) + + +def init_environment_dir(environment_dir): + + global LOCAL_DIR, SCRIPTS_DIR, SPECS_DIR, ENVIRONMENT_DIR + ENVIRONMENT_DIR, LOCAL_DIR, SPECS_DIR, SCRIPTS_DIR = make_environment_dirs( + environment_dir + ) + + return (ENVIRONMENT_DIR, LOCAL_DIR, SPECS_DIR, SCRIPTS_DIR) + + def get_bp_dir(): return BP_DIR @@ -70,6 +128,18 @@ def get_runbook_dir(): return RUNBOOK_DIR +def get_project_dir(): + return PROJECT_DIR + + +def get_environment_dir(): + return ENVIRONMENT_DIR + + +def get_environment_dir(): + return ENVIRONMENT_DIR + + def get_local_dir(): return LOCAL_DIR diff --git a/calm/dsl/decompile/main.py b/calm/dsl/decompile/main.py index 891839f9..68702430 100644 --- a/calm/dsl/decompile/main.py +++ b/calm/dsl/decompile/main.py @@ -1,5 +1,6 @@ from calm.dsl.decompile.action import init_action_globals from calm.dsl.decompile.credential import init_cred_globals +from calm.dsl.decompile.ndb import init_NDB_globals from calm.dsl.decompile.variable import init_variable_globals from calm.dsl.decompile.ref_dependency import init_ref_dependency_globals from calm.dsl.decompile.file_handler import init_file_globals @@ -13,3 +14,4 @@ def init_decompile_context(): init_file_globals() init_ref_dependency_globals() init_variable_globals() + init_NDB_globals() diff --git a/calm/dsl/decompile/ndb.py b/calm/dsl/decompile/ndb.py index 4182d532..cde98ba3 100644 --- a/calm/dsl/decompile/ndb.py +++ b/calm/dsl/decompile/ndb.py @@ -1,5 +1,6 @@ import sys import json +import os from calm.dsl.db.table_config import ResourceTypeCache from calm.dsl.builtins.models.ndb import ( @@ -16,11 +17,14 @@ from calm.dsl.builtins.models.helper import common as common_helper from calm.dsl.constants import CACHE from calm.dsl.store import Cache +from calm.dsl.tools import get_escaped_quotes_string +from calm.dsl.decompile.file_handler import get_local_dir from calm.dsl.log import get_logging_handle LOG = get_logging_handle(__name__) KEY_SEPERATOR = "_" +NDB_FILES = [] entities_map = { NutanixDBConst.Attrs.DATABASE: { @@ -83,7 +87,10 @@ } -def set_ndb_calm_reference(inarg_var_name, inarg_var_value): +def set_ndb_calm_reference(inarg_var_name, inarg_var_value, secret_file_name=""): + + # Adding backslash if quotes present in string + inarg_var_value = get_escaped_quotes_string(inarg_var_value) if not common_helper.is_not_macro(inarg_var_value): return {"value": inarg_var_value, "type": "Non_Ref"} @@ -129,7 +136,12 @@ def set_ndb_calm_reference(inarg_var_name, inarg_var_value): ) return {"value": [], "type": "Non_Ref"} else: - return {"value": inarg_var_value, "type": "Non_Ref"} + _type = "Non_Ref" + if secret_file_name: + create_file_from_file_name(secret_file_name) + inarg_var_value = secret_file_name + _type = "Non_Ref_Secret" + return {"value": inarg_var_value, "type": _type} def create_ndb_task_user_attrs( @@ -167,6 +179,8 @@ def create_ndb_task_user_attrs( for inarg in inarg_list: modified_var_name = "" + secret_file_name = "" + modified_task_name = task_name.lower().replace(" ", "_") if len(inarg["name"]) > len(rt_task) + 2: modified_var_name = inarg["name"][len(rt_task) + 2 :] @@ -176,10 +190,19 @@ def create_ndb_task_user_attrs( HIDDEN_SUFFIX ) ): + if inarg.get("type", "") == "SECRET": + secret_file_name = "{}_{}_{}_{}".format( + NutanixDBConst.NDB, + modified_task_name, + DatabaseServer.name, + database_server_reverse_field_map[modified_var_name], + ) database_server_attrs[ database_server_reverse_field_map[modified_var_name] ] = set_ndb_calm_reference( - database_server_reverse_field_map[modified_var_name], inarg["value"] + database_server_reverse_field_map[modified_var_name], + inarg["value"], + secret_file_name, ) elif ( modified_var_name in database_reverse_field_map @@ -187,10 +210,19 @@ def create_ndb_task_user_attrs( HIDDEN_SUFFIX ) ): + if inarg.get("type", "") == "SECRET": + secret_file_name = "{}_{}_{}_{}".format( + NutanixDBConst.NDB, + modified_task_name, + Database.name, + database_reverse_field_map[modified_var_name], + ) database_attrs[ database_reverse_field_map[modified_var_name] ] = set_ndb_calm_reference( - database_reverse_field_map[modified_var_name], inarg["value"] + database_reverse_field_map[modified_var_name], + inarg["value"], + secret_file_name, ) elif ( modified_var_name in time_machine_reverse_field_map @@ -198,10 +230,19 @@ def create_ndb_task_user_attrs( HIDDEN_SUFFIX ) ): + if inarg.get("type", "") == "SECRET": + secret_file_name = "{}_{}_{}_{}".format( + NutanixDBConst.NDB, + modified_task_name, + TimeMachine.name, + time_machine_reverse_field_map[modified_var_name], + ) time_machine_attrs[ time_machine_reverse_field_map[modified_var_name] ] = set_ndb_calm_reference( - time_machine_reverse_field_map[modified_var_name], inarg["value"] + time_machine_reverse_field_map[modified_var_name], + inarg["value"], + secret_file_name, ) elif modified_var_name in tag_reverse_field_map and not tag_reverse_field_map[ modified_var_name @@ -364,11 +405,13 @@ def get_schema_file_and_user_attrs_for_postgres_create_snapshot( def get_schema_file_and_user_attrs(task_name, attrs, account_name): + resource_type_name = attrs.get("resource_type_reference", {}).get("name", "") action_name = attrs.get("action_reference", {}).get("name", "") resource_type_cached_data = ResourceTypeCache.get_entity_data( name=resource_type_name, provider_name="NDB" ) + if not resource_type_cached_data: LOG.error("resource_type not found in NDB provider") sys.exit( @@ -409,3 +452,28 @@ def get_schema_file_and_user_attrs(task_name, attrs, account_name): action_name, resource_type_cached_data["name"] ) ) + + +def create_file_from_file_name(file_name): + """create a file on local directory and add to global file stack for given file name""" + file_loc = os.path.join(get_local_dir(), file_name) + + # Storing empty value in the file + with open(file_loc, "w+") as fd: + fd.write("") + + NDB_FILES.append(file_name) + + +def get_NDB_files(): + """Returns the NDB files created for NDB secrets value""" + + global NDB_FILES + return NDB_FILES + + +def init_NDB_globals(): + """Reinitialises global vars used for NDB secrets value""" + + global NDB_FILES + NDB_FILES = [] diff --git a/calm/dsl/decompile/package.py b/calm/dsl/decompile/package.py index 3160f8f8..9552b720 100644 --- a/calm/dsl/decompile/package.py +++ b/calm/dsl/decompile/package.py @@ -9,7 +9,7 @@ LOG = get_logging_handle(__name__) -def render_package_template(cls, secrets_dict=[]): +def render_package_template(cls, secrets_dict=[], endpoints=[], ep_list=[]): LOG.debug("Rendering {} package template".format(cls.__name__)) if not isinstance(cls, PackageType): @@ -53,6 +53,8 @@ def render_package_template(cls, secrets_dict=[]): entity_context, secrets_dict=secrets_dict, context=context, + endpoints=endpoints, + ep_list=ep_list, ) ) @@ -65,6 +67,8 @@ def render_package_template(cls, secrets_dict=[]): entity_context, secrets_dict=secrets_dict, context=context, + endpoints=endpoints, + ep_list=ep_list, ) ) diff --git a/calm/dsl/decompile/profile.py b/calm/dsl/decompile/profile.py index 71c83d23..89fa9416 100644 --- a/calm/dsl/decompile/profile.py +++ b/calm/dsl/decompile/profile.py @@ -1,11 +1,12 @@ from calm.dsl.decompile.render import render_template -from calm.dsl.builtins import ProfileType +from calm.dsl.builtins import ProfileType, get_valid_identifier from calm.dsl.decompile.action import render_action_template from calm.dsl.decompile.variable import render_variable_template from calm.dsl.decompile.ref_dependency import update_profile_name from calm.dsl.decompile.config_spec import ( render_snapshot_config_template, render_restore_config_template, + render_update_config_template, ) from calm.dsl.log import get_logging_handle @@ -14,7 +15,9 @@ CONFIG_SPEC_MAP = {} -def render_profile_template(cls, secrets_dict=[]): +def render_profile_template( + cls, update_config_patch_attr_map, secrets_dict=[], endpoints=[], ep_list=[] +): LOG.debug("Rendering {} profile template".format(cls.__name__)) if not isinstance(cls, ProfileType): @@ -56,12 +59,20 @@ def render_profile_template(cls, secrets_dict=[]): render_snapshot_config_template(entity, entity_context, CONFIG_SPEC_MAP) ) update_config_list = [] - for idx, entity in enumerate(user_attrs.get("update_configs", [])): + for idx, entity in enumerate(user_attrs.get("patch_list", [])): CONFIG_SPEC_MAP[entity.name] = { "global_name": "{}.update_configs[{}]".format(cls.__name__, idx), "local_name": "update_configs[{}]".format(idx), } - update_config_list.append(render_update_config_template(entity, entity_context)) + + entity_name = get_valid_identifier(entity.name) + patch_attr_name = update_config_patch_attr_map[entity_name] + patch_attr_name = get_valid_identifier( + entity_name + "_Update" + patch_attr_name + ) + update_config_list.append( + render_update_config_template(entity, patch_attr_name) + ) action_list = [] for action in user_attrs.get("actions", []): @@ -72,6 +83,8 @@ def render_profile_template(cls, secrets_dict=[]): CONFIG_SPEC_MAP, secrets_dict=secrets_dict, context=context, + endpoints=endpoints, + ep_list=ep_list, ) ) @@ -90,6 +103,7 @@ def render_profile_template(cls, secrets_dict=[]): user_attrs["variables"] = variable_list user_attrs["deployments"] = ", ".join(deployment_list) user_attrs["actions"] = action_list + user_attrs["patch_list"] = ", ".join(update_config_list) user_attrs["restore_configs"] = ", ".join(restore_config_list) user_attrs["snapshot_configs"] = ", ".join(snapshot_config_list) diff --git a/calm/dsl/decompile/projects.py b/calm/dsl/decompile/projects.py new file mode 100644 index 00000000..21e45625 --- /dev/null +++ b/calm/dsl/decompile/projects.py @@ -0,0 +1,61 @@ +from calm.dsl.decompile.render import render_template +from calm.dsl.decompile.ref import render_ref_template +from calm.dsl.decompile.provider import render_provider_template +from calm.dsl.decompile.user import render_user_template +from calm.dsl.decompile.quotas import render_quotas_template +from calm.dsl.builtins.models.project import ProjectType +from .decompile_helpers import process_variable_name +from calm.dsl.builtins import get_valid_identifier +from calm.dsl.store import Cache + +from calm.dsl.decompile.credential import ( + render_credential_template, + get_cred_files, + get_cred_var_name, +) + +from calm.dsl.log import get_logging_handle + +LOG = get_logging_handle(__name__) + + +def render_project_template( + project_cls, + entity_context="", + CONFIG_SPEC_MAP={}, + credentials=[], +): + LOG.debug("Rendering {} project template".format(project_cls.__name__)) + if not isinstance(project_cls, ProjectType): + sys.exit("{} is not of type {}".format(project_cls, project)) + # Update entity context + entity_context = entity_context + "_Project_" + project_cls.__name__ + + project_name = getattr(project_cls, "name", "") or project_cls.__name__ + + user_attrs = { + "name": project_cls.__name__, + } + + rendered_providers_list = [] + if project_cls.providers: + for provider in project_cls.providers: + rendered_providers_list.append(render_provider_template(provider)) + user_attrs["providers"] = rendered_providers_list + + rendered_users_list = [] + if project_cls.users: + for user in project_cls.users: + rendered_users_list.append(render_user_template(user)) + user_attrs["users"] = rendered_users_list + + if project_cls.quotas: + quotas = render_quotas_template(project_cls.quotas) + user_attrs["quotas"] = quotas + + gui_display_name = getattr(project_cls, "name", "") or project_cls.__name__ + if gui_display_name != project_cls.__name__: + user_attrs["gui_display_name"] = gui_display_name + + text = render_template(schema_file="projects.py.jinja2", obj=user_attrs) + return text.strip() diff --git a/calm/dsl/decompile/provider.py b/calm/dsl/decompile/provider.py new file mode 100644 index 00000000..c3e8af1b --- /dev/null +++ b/calm/dsl/decompile/provider.py @@ -0,0 +1,21 @@ +from calm.dsl.decompile.render import render_template +from calm.dsl.log import get_logging_handle + +LOG = get_logging_handle(__name__) + + +def render_provider_template(cls): + LOG.debug("Rendering {} provider template".format(cls.type)) + + if cls.type == "nutanix_pc": + schema_file = "provider_ntnx.py.jinja2" + elif cls.type == "aws": + schema_file = "provider_aws.py.jinja2" + elif cls.type == "gcp": + schema_file = "provider_gcp.py.jinja2" + elif cls.type == "vmware": + schema_file = "provider_vmware.py.jinja2" + + user_attrs = cls.get_user_attrs() + text = render_template(schema_file=schema_file, obj=user_attrs) + return text.strip() diff --git a/calm/dsl/decompile/quotas.py b/calm/dsl/decompile/quotas.py new file mode 100644 index 00000000..481a55fc --- /dev/null +++ b/calm/dsl/decompile/quotas.py @@ -0,0 +1,17 @@ +from calm.dsl.decompile.render import render_template +from calm.dsl.log import get_logging_handle + +LOG = get_logging_handle(__name__) + + +def render_quotas_template(cls): + LOG.debug("Rendering quotas template") + + schema_file = "quotas.py.jinja2" + user_attrs = { + "vcpus": cls["vcpus"], + "storage": cls["storage"], + "memory": cls["memory"], + } + text = render_template(schema_file=schema_file, obj=user_attrs) + return text.strip() diff --git a/calm/dsl/decompile/ref.py b/calm/dsl/decompile/ref.py index a535e631..78605e4c 100644 --- a/calm/dsl/decompile/ref.py +++ b/calm/dsl/decompile/ref.py @@ -5,7 +5,7 @@ get_service_name, get_endpoint_name, get_profile_name, - get_substrate_name, + get_entity_gui_dsl_name, ) from calm.dsl.decompile.ref_dependency import get_package_name, get_deployment_name @@ -35,7 +35,7 @@ def render_ref_template(cls): if cls_name: user_attrs["name"] = cls_name elif kind == "app_substrate": - cls_name = get_substrate_name(user_attrs["name"]) + cls_name = get_entity_gui_dsl_name(user_attrs["name"]) if cls_name: user_attrs["name"] = cls_name elif kind == "app_blueprint_deployment": diff --git a/calm/dsl/decompile/ref_dependency.py b/calm/dsl/decompile/ref_dependency.py index a822afc4..8e58b361 100644 --- a/calm/dsl/decompile/ref_dependency.py +++ b/calm/dsl/decompile/ref_dependency.py @@ -1,10 +1,12 @@ +import types from .decompile_helpers import process_variable_name SERVICE_NAME_MAP = {} PROFILE_NAME_MAP = {} -SUBSTRATE_NAME_MAP = {} +ENTITY_GUI_DSL_NAME_MAP = {} PACKAGE_NAME_MAP = {} DEPLOYMENT_NAME_MAP = {} +POWER_ACTION_SUBSTRATE_MAP = {} def get_service_name(name): @@ -34,17 +36,16 @@ def update_profile_name(ui_name, dsl_name): PROFILE_NAME_MAP[ui_name] = dsl_name -def get_substrate_name(name): - """returns the class name used for entity ref""" +def get_entity_gui_dsl_name(ui_name): - global SUBSTRATE_NAME_MAP - return SUBSTRATE_NAME_MAP.get(name, None) + global ENTITY_GUI_DSL_NAME_MAP + return ENTITY_GUI_DSL_NAME_MAP.get(ui_name, None) -def update_substrate_name(ui_name, dsl_name): +def update_entity_gui_dsl_name(ui_name, dsl_name): - global SUBSTRATE_NAME_MAP - SUBSTRATE_NAME_MAP[ui_name] = dsl_name + global ENTITY_GUI_DSL_NAME_MAP + ENTITY_GUI_DSL_NAME_MAP[ui_name] = dsl_name def get_package_name(name): @@ -77,12 +78,34 @@ def update_deployment_name(ui_name, dsl_name): DEPLOYMENT_NAME_MAP[ui_name] = dsl_name +def get_power_action_target_substrate(runbook_name): + """returns the substrate name used for power action runbook""" + + global POWER_ACTION_SUBSTRATE_MAP + return POWER_ACTION_SUBSTRATE_MAP.get(runbook_name, None) + + +def get_power_action_substrate_map(): + global POWER_ACTION_SUBSTRATE_MAP + + # returning immutable proxy of dict to prevent modification of original dict. + return types.MappingProxyType(POWER_ACTION_SUBSTRATE_MAP) + + +def update_power_action_target_substrate(runbook_name, substrate_name): + """updates power action runbook name to substrate name mapping""" + + global POWER_ACTION_SUBSTRATE_MAP + POWER_ACTION_SUBSTRATE_MAP[runbook_name] = substrate_name + + def init_ref_dependency_globals(): - global SERVICE_NAME_MAP, PROFILE_NAME_MAP, SUBSTRATE_NAME_MAP, PACKAGE_NAME_MAP, DEPLOYMENT_NAME_MAP + global SERVICE_NAME_MAP, PROFILE_NAME_MAP, ENTITY_GUI_DSL_NAME_MAP, PACKAGE_NAME_MAP, DEPLOYMENT_NAME_MAP, POWER_ACTION_SUBSTRATE_MAP SERVICE_NAME_MAP = {} PROFILE_NAME_MAP = {} - SUBSTRATE_NAME_MAP = {} + ENTITY_GUI_DSL_NAME_MAP = {} PACKAGE_NAME_MAP = {} DEPLOYMENT_NAME_MAP = {} + POWER_ACTION_SUBSTRATE_MAP = {} diff --git a/calm/dsl/decompile/runbook.py b/calm/dsl/decompile/runbook.py index 117ac047..8f55d7b8 100644 --- a/calm/dsl/decompile/runbook.py +++ b/calm/dsl/decompile/runbook.py @@ -12,6 +12,8 @@ get_cred_files, get_cred_var_name, ) +from calm.dsl.decompile.ndb import get_NDB_files + from calm.dsl.decompile.decompile_helpers import process_variable_name from calm.dsl.builtins import CalmEndpoint as Endpoint from calm.dsl.builtins.models.runbook import RunbookType, runbook @@ -85,9 +87,17 @@ def render_runbook_template( ) variables = [] for variable in runbook_cls.variables: - variables.append(render_variable_template(variable, entity_context)) + variables.append( + render_variable_template( + variable, + entity_context, + credentials_list=credentials_list, + rendered_credential_list=rendered_credential_list, + ) + ) secret_files = get_secret_variable_files() secret_files.extend(get_cred_files()) + secret_files.extend(get_NDB_files()) if not (variables or tasks): return "" import_status = False diff --git a/calm/dsl/decompile/schemas/ahv_direct_egress_nic.py.jinja2 b/calm/dsl/decompile/schemas/ahv_direct_egress_nic.py.jinja2 index 9e26a7cc..529bae90 100644 --- a/calm/dsl/decompile/schemas/ahv_direct_egress_nic.py.jinja2 +++ b/calm/dsl/decompile/schemas/ahv_direct_egress_nic.py.jinja2 @@ -1,5 +1,5 @@ {%- macro ahv_direct_egress_nic(obj) -%} -AhvVmNic.DirectNic.egress("{{obj.subnet_name}}" {%- if obj.cluster_name %}, cluster="{{obj.cluster_name}}"{%- elif obj.vpc_name %}, vpc="{{obj.vpc_name}}"{%- endif %}) +AhvVmNic.DirectNic.egress("{{obj.subnet_name}}" {%- if obj.cluster_name %}, cluster="{{obj.cluster_name}}"{%- elif obj.vpc_name %}, vpc="{{obj.vpc_name}}"{%- endif %} {%- if obj.ip_endpoint_list %}, ip_endpoints={{obj.ip_endpoint_list}}{%- endif %}) {% endmacro %} {{ ahv_direct_egress_nic(obj) }} diff --git a/calm/dsl/decompile/schemas/ahv_direct_ingress_nic.py.jinja2 b/calm/dsl/decompile/schemas/ahv_direct_ingress_nic.py.jinja2 index 51009bb5..048cabd1 100644 --- a/calm/dsl/decompile/schemas/ahv_direct_ingress_nic.py.jinja2 +++ b/calm/dsl/decompile/schemas/ahv_direct_ingress_nic.py.jinja2 @@ -1,5 +1,5 @@ {%- macro ahv_direct_ingress_nic(obj) -%} -AhvVmNic.DirectNic.ingress("{{obj.subnet_name}}" {%- if obj.cluster_name %}, cluster="{{obj.cluster_name}}"{%- elif obj.vpc_name %}, vpc="{{obj.vpc_name}}"{%- endif %}) +AhvVmNic.DirectNic.ingress("{{obj.subnet_name}}" {%- if obj.cluster_name %}, cluster="{{obj.cluster_name}}"{%- elif obj.vpc_name %}, vpc="{{obj.vpc_name}}"{%- endif %} {%- if obj.ip_endpoint_list %}, ip_endpoints={{obj.ip_endpoint_list}}{%- endif %}) {% endmacro %} {{ ahv_direct_ingress_nic(obj) }} diff --git a/calm/dsl/decompile/schemas/ahv_direct_tap_nic.py.jinja2 b/calm/dsl/decompile/schemas/ahv_direct_tap_nic.py.jinja2 index 59a876bc..c7695ece 100644 --- a/calm/dsl/decompile/schemas/ahv_direct_tap_nic.py.jinja2 +++ b/calm/dsl/decompile/schemas/ahv_direct_tap_nic.py.jinja2 @@ -1,5 +1,5 @@ {%- macro ahv_direct_tap_nic(obj) -%} -AhvVmNic.DirectNic.tap("{{obj.subnet_name}}" {%- if obj.cluster_name %}, cluster="{{obj.cluster_name}}"{%- elif obj.vpc_name %}, vpc="{{obj.vpc_name}}"{%- endif %}) +AhvVmNic.DirectNic.tap("{{obj.subnet_name}}" {%- if obj.cluster_name %}, cluster="{{obj.cluster_name}}"{%- elif obj.vpc_name %}, vpc="{{obj.vpc_name}}"{%- endif %} {%- if obj.ip_endpoint_list %}, ip_endpoints={{obj.ip_endpoint_list}}{%- endif %}) {% endmacro %} {{ ahv_direct_tap_nic(obj) }} diff --git a/calm/dsl/decompile/schemas/ahv_normal_egress_nic.py.jinja2 b/calm/dsl/decompile/schemas/ahv_normal_egress_nic.py.jinja2 index f0d1ee59..36080244 100644 --- a/calm/dsl/decompile/schemas/ahv_normal_egress_nic.py.jinja2 +++ b/calm/dsl/decompile/schemas/ahv_normal_egress_nic.py.jinja2 @@ -1,5 +1,5 @@ {%- macro ahv_normal_egress_nic(obj) -%} -AhvVmNic.NormalNic.egress("{{obj.subnet_name}}" {%- if obj.cluster_name %}, cluster="{{obj.cluster_name}}"{%- elif obj.vpc_name %}, vpc="{{obj.vpc_name}}"{%- endif %}) +AhvVmNic.NormalNic.egress("{{obj.subnet_name}}" {%- if obj.cluster_name %}, cluster="{{obj.cluster_name}}"{%- elif obj.vpc_name %}, vpc="{{obj.vpc_name}}"{%- endif %} {%- if obj.ip_endpoint_list %}, ip_endpoints={{obj.ip_endpoint_list}}{%- endif %}) {% endmacro %} {{ ahv_normal_egress_nic(obj) }} diff --git a/calm/dsl/decompile/schemas/ahv_normal_ingress_nic.py.jinja2 b/calm/dsl/decompile/schemas/ahv_normal_ingress_nic.py.jinja2 index a9743c5f..2b18f421 100644 --- a/calm/dsl/decompile/schemas/ahv_normal_ingress_nic.py.jinja2 +++ b/calm/dsl/decompile/schemas/ahv_normal_ingress_nic.py.jinja2 @@ -1,5 +1,5 @@ {%- macro ahv_normal_ingress_nic(obj) -%} -AhvVmNic.NormalNic.ingress("{{obj.subnet_name}}" {%- if obj.cluster_name %}, cluster="{{obj.cluster_name}}"{%- elif obj.vpc_name %}, vpc="{{obj.vpc_name}}"{%- endif %}) +AhvVmNic.NormalNic.ingress("{{obj.subnet_name}}" {%- if obj.cluster_name %}, cluster="{{obj.cluster_name}}"{%- elif obj.vpc_name %}, vpc="{{obj.vpc_name}}"{%- endif %} {%- if obj.ip_endpoint_list %}, ip_endpoints={{obj.ip_endpoint_list}}{%- endif %}) {% endmacro %} {{ ahv_normal_ingress_nic(obj) }} diff --git a/calm/dsl/decompile/schemas/ahv_normal_tap_nic.py.jinja2 b/calm/dsl/decompile/schemas/ahv_normal_tap_nic.py.jinja2 index ae18bef6..7d6bfd8c 100644 --- a/calm/dsl/decompile/schemas/ahv_normal_tap_nic.py.jinja2 +++ b/calm/dsl/decompile/schemas/ahv_normal_tap_nic.py.jinja2 @@ -1,5 +1,5 @@ {%- macro ahv_normal_tap_nic(obj) -%} -AhvVmNic.NormalNic.tap("{{obj.subnet_name}}" {%- if obj.cluster_name %}, cluster="{{obj.cluster_name}}"{%- elif obj.vpc_name %}, vpc="{{obj.vpc_name}}"{%- endif %}) +AhvVmNic.NormalNic.tap("{{obj.subnet_name}}" {%- if obj.cluster_name %}, cluster="{{obj.cluster_name}}"{%- elif obj.vpc_name %}, vpc="{{obj.vpc_name}}"{%- endif %} {%- if obj.ip_endpoint_list %}, ip_endpoints={{obj.ip_endpoint_list}}{%- endif %}) {% endmacro %} {{ ahv_normal_tap_nic(obj) }} diff --git a/calm/dsl/decompile/schemas/ahv_vm_resources.py.jinja2 b/calm/dsl/decompile/schemas/ahv_vm_resources.py.jinja2 index ac077671..850f8924 100644 --- a/calm/dsl/decompile/schemas/ahv_vm_resources.py.jinja2 +++ b/calm/dsl/decompile/schemas/ahv_vm_resources.py.jinja2 @@ -9,6 +9,8 @@ class {{obj.name}}(AhvVmResources): {% if obj.gpus %}gpus = [{{obj.gpus}}]{% endif %} {% if obj.guest_customization %}guest_customization = {{obj.guest_customization}}{% endif %} {% if obj.serial_ports %}serial_ports = {{obj.serial_ports}}{% endif %} + {% if obj.power_state %}power_state = "{{obj.power_state}}"{% endif %} + {% if obj.boot_type %}boot_type = "{{obj.boot_type}}"{% endif %} {% endmacro %} {{ ahv_vm_resources(obj) }} diff --git a/calm/dsl/decompile/schemas/bp_file_helper.py.jinja2 b/calm/dsl/decompile/schemas/bp_file_helper.py.jinja2 index 2dfce84e..eb55337f 100644 --- a/calm/dsl/decompile/schemas/bp_file_helper.py.jinja2 +++ b/calm/dsl/decompile/schemas/bp_file_helper.py.jinja2 @@ -9,7 +9,7 @@ import json #no_qa import os #no_qa from calm.dsl.builtins import * #no_qa - +from calm.dsl.runbooks import CalmEndpoint as Endpoint # Secret Variables {% if obj.contains_encrypted_secrets %} # Note: Don't modify file data, as it is the encoded secrets fetched from the server @@ -23,6 +23,13 @@ from calm.dsl.builtins import * #no_qa {{cred}} {%- endfor %} +{%- if obj.endpoints %} +# Endpoints +{%- for endpoint in obj.endpoints %} +{{endpoint}} +{%- endfor %} +{% endif %} + {% for vm_image in obj.vm_images %} {{vm_image}} {% endfor %} diff --git a/calm/dsl/decompile/schemas/environments.py.jinja2 b/calm/dsl/decompile/schemas/environments.py.jinja2 new file mode 100644 index 00000000..b5141f41 --- /dev/null +++ b/calm/dsl/decompile/schemas/environments.py.jinja2 @@ -0,0 +1,33 @@ +{% macro environment(obj) %} +# THIS FILE IS AUTOMATICALLY GENERATED. +# Disclaimer: Please test this file before using in production. +""" +Generated environment DSL (.py) +""" +import os + +from calm.dsl.builtins import * +{% if obj.credentials %} +# Secret Variables +{%- for var_file in obj.secret_files %} +{{var_file}} = read_local_file('{{var_file}}') +{%- endfor %} +{%- endif %} +{%- for cred in obj.credentials %} +{{cred}} +{%- endfor %} +{% for entity in obj.substrates %} +{{entity}} +{% endfor %} +class {{obj.name}}(Environment): + {% if obj.substrates_list %}substrates = {{obj.substrates_list | replace("'","")}} {% endif %} + {% if obj.credentials_list %}credentials = {{obj.credentials_list | replace("'","")}}{% endif %} + {% if obj.providers %} + providers = [ + {% for entity in obj.providers %} + {{entity}} + {% endfor %} + ] + {% endif %} +{% endmacro %} +{{ environment(obj) }} \ No newline at end of file diff --git a/calm/dsl/decompile/schemas/ndb_postgres_clone.py.jinja2 b/calm/dsl/decompile/schemas/ndb_postgres_clone.py.jinja2 index 4ea490cc..7546dffd 100644 --- a/calm/dsl/decompile/schemas/ndb_postgres_clone.py.jinja2 +++ b/calm/dsl/decompile/schemas/ndb_postgres_clone.py.jinja2 @@ -1,7 +1,7 @@ {% macro database_server_clone(obj) %} DatabaseServer.Postgres.Clone( {%- for key, val in obj.items() %} - {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% else %} "{{val.value}}" {% endif %}, + {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% elif val.type == "Non_Ref_Secret" %}{{val.value}}{% else %} "{{val.value}}" {% endif %}, {%- endfor %} ) {% endmacro %} @@ -9,7 +9,7 @@ DatabaseServer.Postgres.Clone( {% macro database_clone(obj) %} Database.Postgres.Clone( {%- for key, val in obj.items() %} - {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% else %} "{{val.value}}" {% endif %}, + {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% elif val.type == "Non_Ref_Secret" %}{{val.value}}{% else %} "{{val.value}}" {% endif %}, {%- endfor %} ) {% endmacro %} @@ -17,7 +17,7 @@ Database.Postgres.Clone( {% macro time_machine_clone(obj) %} TimeMachine.Postgres.Clone( {%- for key, val in obj.items() %} - {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% else %} "{{val.value}}" {% endif %}, + {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% elif val.type == "Non_Ref_Secret" %}{{val.value}}{% else %} "{{val.value}}" {% endif %}, {%- endfor %} ) {% endmacro %} diff --git a/calm/dsl/decompile/schemas/ndb_postgres_create.py.jinja2 b/calm/dsl/decompile/schemas/ndb_postgres_create.py.jinja2 index 1b6ba6f5..774f22fc 100644 --- a/calm/dsl/decompile/schemas/ndb_postgres_create.py.jinja2 +++ b/calm/dsl/decompile/schemas/ndb_postgres_create.py.jinja2 @@ -1,7 +1,7 @@ {% macro database_server_create(obj) %} DatabaseServer.Postgres.Create( {%- for key, val in obj.items() %} - {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% else %} "{{val.value}}" {% endif %}, + {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% elif val.type == "Non_Ref_Secret" %}{{val.value}}{% else %} "{{val.value}}" {% endif %}, {%- endfor %} ) {% endmacro %} @@ -9,7 +9,7 @@ DatabaseServer.Postgres.Create( {% macro database_create(obj) %} Database.Postgres.Create( {%- for key, val in obj.items() %} - {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% else %} "{{val.value}}" {% endif %}, + {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% elif val.type == "Non_Ref_Secret" %}{{val.value}}{% else %} "{{val.value}}" {% endif %}, {%- endfor %} ) {% endmacro %} @@ -17,7 +17,7 @@ Database.Postgres.Create( {% macro time_machine_create(obj) %} TimeMachine.Postgres.Create( {%- for key, val in obj.items() %} - {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% else %} "{{val.value}}" {% endif %}, + {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% elif val.type == "Non_Ref_Secret" %}{{val.value}}{% else %} "{{val.value}}" {% endif %}, {%- endfor %} ) {% endmacro %} diff --git a/calm/dsl/decompile/schemas/ndb_postgres_create_snapshot.py.jinja2 b/calm/dsl/decompile/schemas/ndb_postgres_create_snapshot.py.jinja2 index 24102d83..82a5d2b8 100644 --- a/calm/dsl/decompile/schemas/ndb_postgres_create_snapshot.py.jinja2 +++ b/calm/dsl/decompile/schemas/ndb_postgres_create_snapshot.py.jinja2 @@ -1,7 +1,7 @@ {% macro database_create_snapshot(obj) %} Database.Postgres.CreateSnapshot( {%- for key, val in obj.items() %} - {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% else %} "{{val.value}}" {% endif %}, + {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% elif val.type == "Non_Ref_Secret" %}{{val.value}}{% else %} "{{val.value}}" {% endif %}, {%- endfor %} ) {% endmacro %} diff --git a/calm/dsl/decompile/schemas/ndb_postgres_delete.py.jinja2 b/calm/dsl/decompile/schemas/ndb_postgres_delete.py.jinja2 index b1f10284..af95a898 100644 --- a/calm/dsl/decompile/schemas/ndb_postgres_delete.py.jinja2 +++ b/calm/dsl/decompile/schemas/ndb_postgres_delete.py.jinja2 @@ -1,7 +1,7 @@ {% macro database_delete(obj) %} Database.Postgres.Delete( {%- for key, val in obj.items() %} - {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% else %} "{{val.value}}" {% endif %}, + {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% elif val.type == "Non_Ref_Secret" %}{{val.value}}{% else %} "{{val.value}}" {% endif %}, {%- endfor %} ) {% endmacro %} diff --git a/calm/dsl/decompile/schemas/ndb_postgres_restore.py.jinja2 b/calm/dsl/decompile/schemas/ndb_postgres_restore.py.jinja2 index 70f16e84..645fb880 100644 --- a/calm/dsl/decompile/schemas/ndb_postgres_restore.py.jinja2 +++ b/calm/dsl/decompile/schemas/ndb_postgres_restore.py.jinja2 @@ -1,7 +1,7 @@ {% macro database_restore(obj) %} Database.Postgres.RestoreFromTimeMachine( {%- for key, val in obj.items() %} - {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% else %} "{{val.value}}" {% endif %}, + {{ key }}={% if val.type == "Ref" %}{{val.ref}}("{{val.value}}"){% elif val.type == "Non_Ref_Secret" %}{{val.value}}{% else %} "{{val.value}}" {% endif %}, {%- endfor %} ) {% endmacro %} diff --git a/calm/dsl/decompile/schemas/patch_field_ahv_disk.py.jinja2 b/calm/dsl/decompile/schemas/patch_field_ahv_disk.py.jinja2 new file mode 100644 index 00000000..6b3fe124 --- /dev/null +++ b/calm/dsl/decompile/schemas/patch_field_ahv_disk.py.jinja2 @@ -0,0 +1,7 @@ +{%- macro patch_field_ahv_disk(obj) %} +{% if obj.disk_operation == "delete" %}PatchField.Ahv.Disks.{{obj.disk_operation}}(index={{obj.index}}){% endif %} +{% if obj.disk_operation == "modify" %}PatchField.Ahv.Disks.{{obj.disk_operation}}(index={{obj.index}}, editable={{obj.editable}}{% if obj.operation %}, operation="{{obj.operation}}"{% endif %}{% if obj.value %}, value='{{obj.value}}'{% endif %}{% if obj.max_val %}, max_val={{obj.max_val}}{% endif %}{% if obj.min_val %}, min_val={{obj.min_val}}{% endif %}){% endif %} +{% if obj.disk_operation == "add" %}PatchField.Ahv.Disks.{{obj.disk_operation}}({{obj.disk_data}}){% endif %} +{%- endmacro %} + +{{ patch_field_ahv_disk(obj) }} diff --git a/calm/dsl/decompile/schemas/patch_field_ahv_nic.py.jinja2 b/calm/dsl/decompile/schemas/patch_field_ahv_nic.py.jinja2 new file mode 100644 index 00000000..e4dbdeee --- /dev/null +++ b/calm/dsl/decompile/schemas/patch_field_ahv_nic.py.jinja2 @@ -0,0 +1,6 @@ +{%- macro patch_field_ahv_nic(obj) %} +{% if obj.operation == "delete" %}PatchField.Ahv.Nics.{{obj.operation}}(index={{obj.index}}){% endif %} +{% if obj.operation == "add" %}PatchField.Ahv.Nics.{{obj.operation}}({{obj.nic_data}}){% endif %} +{%- endmacro %} + +{{ patch_field_ahv_nic(obj) }} diff --git a/calm/dsl/decompile/schemas/patch_field_category.py.jinja2 b/calm/dsl/decompile/schemas/patch_field_category.py.jinja2 new file mode 100644 index 00000000..a1a61ca4 --- /dev/null +++ b/calm/dsl/decompile/schemas/patch_field_category.py.jinja2 @@ -0,0 +1,5 @@ +{%- macro patch_field_category(obj) %} +{%if obj.operation != "modify" %}PatchField.Ahv.Category.{{obj.operation}}({{obj.value}}){% endif %} +{%- endmacro %} + +{{ patch_field_category(obj) }} diff --git a/calm/dsl/decompile/schemas/profile.py.jinja2 b/calm/dsl/decompile/schemas/profile.py.jinja2 index 333a4744..d299b6f6 100644 --- a/calm/dsl/decompile/schemas/profile.py.jinja2 +++ b/calm/dsl/decompile/schemas/profile.py.jinja2 @@ -5,6 +5,7 @@ class {{obj.name}}(Profile): {% if obj.gui_display_name %}name="{{obj.gui_display_name}}"{% endif %} {% if obj.environments %}environments = [Ref.Environment(name="{{obj.environments[0]}}")]{% endif %} {% if obj.deployments %}deployments = [{{obj.deployments}}]{%- endif %} + {% if obj.patch_list %}patch_list = [{{obj.patch_list}}]{%- endif %} {% if obj.restore_configs %}restore_configs = [{{obj.restore_configs}}]{%- endif %} {% if obj.snapshot_configs %}snapshot_configs = [{{obj.snapshot_configs}}]{%- endif %} {% for variable in obj.variables %} diff --git a/calm/dsl/decompile/schemas/projects.py.jinja2 b/calm/dsl/decompile/schemas/projects.py.jinja2 new file mode 100644 index 00000000..1d8388e0 --- /dev/null +++ b/calm/dsl/decompile/schemas/projects.py.jinja2 @@ -0,0 +1,26 @@ +{% macro project(obj) %} +# THIS FILE IS AUTOMATICALLY GENERATED. +# Disclaimer: Please test this file before using in production. +""" +Generated project DSL (.py) +Decompiles project's providers, users and quotas if available. +""" +from calm.dsl.builtins import Project +from calm.dsl.builtins import Provider, Ref + +class {{obj.name}}(Project): + {% if obj.providers %} + providers = [ + {% for entity in obj.providers %} + {{entity}} + {% endfor %} + ] + {% endif %} + {% if obj.users %} + users = [{%- for user in obj.users%}{{user}}, {%- endfor %}] + {% endif %} + {% if obj.quotas %} + quotas = {{obj.quotas}} + {% endif %} +{% endmacro %} +{{ project(obj) }} \ No newline at end of file diff --git a/calm/dsl/decompile/schemas/provider_aws.py.jinja2 b/calm/dsl/decompile/schemas/provider_aws.py.jinja2 new file mode 100644 index 00000000..a04fee58 --- /dev/null +++ b/calm/dsl/decompile/schemas/provider_aws.py.jinja2 @@ -0,0 +1,6 @@ +{% macro aws_provider(obj) %} + Provider.Aws( + {% if obj.account_reference %}account=Ref.Account("{{obj.account_reference}}"),{% endif %} + ), +{% endmacro %} +{{ aws_provider(obj) }} \ No newline at end of file diff --git a/calm/dsl/decompile/schemas/provider_gcp.py.jinja2 b/calm/dsl/decompile/schemas/provider_gcp.py.jinja2 new file mode 100644 index 00000000..9f4858d7 --- /dev/null +++ b/calm/dsl/decompile/schemas/provider_gcp.py.jinja2 @@ -0,0 +1,6 @@ +{% macro gcp_provider(obj) %} + Provider.Gcp( + {% if obj.account_reference %}account=Ref.Account("{{obj.account_reference}}"),{% endif %} + ), +{% endmacro %} +{{ gcp_provider(obj) }} \ No newline at end of file diff --git a/calm/dsl/decompile/schemas/provider_ntnx.py.jinja2 b/calm/dsl/decompile/schemas/provider_ntnx.py.jinja2 new file mode 100644 index 00000000..1d8768ca --- /dev/null +++ b/calm/dsl/decompile/schemas/provider_ntnx.py.jinja2 @@ -0,0 +1,13 @@ +{% macro ntnx_provider(obj) %} + Provider.Ntnx( + {% if obj.account_reference %}account=Ref.Account("{{obj.account_reference}}"),{% endif %} + {% if obj.subnet_reference_list %}subnets=[ + {%- for subnet in obj.subnet_reference_list %} + Ref.Subnet( + {% if subnet.name %}name="{{subnet.name}}"{% endif %} + {% if subnet.cluster %}, cluster="{{subnet.cluster}}"{% endif %} + ),{%- endfor %} + ],{% endif %} + ), +{% endmacro %} +{{ ntnx_provider(obj) }} \ No newline at end of file diff --git a/calm/dsl/decompile/schemas/provider_vmware.py.jinja2 b/calm/dsl/decompile/schemas/provider_vmware.py.jinja2 new file mode 100644 index 00000000..ba2bdb4b --- /dev/null +++ b/calm/dsl/decompile/schemas/provider_vmware.py.jinja2 @@ -0,0 +1,6 @@ +{% macro vmware_provider(obj) %} + Provider.Vmware( + {% if obj.account_reference %}account=Ref.Account("{{obj.account_reference}}"),{% endif %} + ), +{% endmacro %} +{{ vmware_provider(obj) }} \ No newline at end of file diff --git a/calm/dsl/decompile/schemas/quotas.py.jinja2 b/calm/dsl/decompile/schemas/quotas.py.jinja2 new file mode 100644 index 00000000..84809924 --- /dev/null +++ b/calm/dsl/decompile/schemas/quotas.py.jinja2 @@ -0,0 +1,4 @@ +{% macro quotas(obj) %} + {"vcpus": {{obj.vcpus}}, "storage": {{obj.storage}}, "memory": {{obj.memory}}} +{% endmacro %} +{{ quotas(obj) }} \ No newline at end of file diff --git a/calm/dsl/decompile/schemas/restore_config.py.jinja2 b/calm/dsl/decompile/schemas/restore_config.py.jinja2 index 9415ab42..345f80b2 100644 --- a/calm/dsl/decompile/schemas/restore_config.py.jinja2 +++ b/calm/dsl/decompile/schemas/restore_config.py.jinja2 @@ -1,5 +1,9 @@ {%- macro restore_config(obj) %} -AppProtection.RestoreConfig(name='{{obj.name}}', target=ref({{obj.target}}), delete_vm_post_restore={{obj.delete_vm_post_restore}}) +{% if obj.provider == "Ahv" %} +AppProtection.RestoreConfig{% if obj.provider %}.{{obj.provider}}{% endif %}(name='{{obj.name}}', target=ref({{obj.target}}), delete_vm_post_restore={{obj.delete_vm_post_restore}}{% if obj.description %}, description="{{obj.description}}"{% endif %}) +{% else %} +AppProtection.RestoreConfig{% if obj.provider %}.{{obj.provider}}{% endif %}(name='{{obj.name}}', target=ref({{obj.target}}){% if obj.description %}, description="{{obj.description}}"{% endif %}) +{% endif %} {%- endmacro %} {{ restore_config(obj) }} diff --git a/calm/dsl/decompile/schemas/snapshot_config.py.jinja2 b/calm/dsl/decompile/schemas/snapshot_config.py.jinja2 index 1008b6cf..721cf258 100644 --- a/calm/dsl/decompile/schemas/snapshot_config.py.jinja2 +++ b/calm/dsl/decompile/schemas/snapshot_config.py.jinja2 @@ -1,12 +1,12 @@ {%- macro snapshot_config(obj) %} {% if obj.policy %} {% if obj.rule %} -AppProtection.SnapshotConfig(name='{{obj.name}}', target=ref({{obj.target}}), restore_config=ref({{obj.restore_config}}), policy=AppProtection.ProtectionPolicy('{{obj.policy}}', rule='{{obj.rule}}')) +AppProtection.SnapshotConfig{% if obj.provider %}.{{obj.provider}}{% endif %}(name='{{obj.name}}', target=ref({{obj.target}}), num_of_replicas="{{obj.num_of_replicas}}", restore_config=ref({{obj.restore_config}}), policy=AppProtection.ProtectionPolicy('{{obj.policy}}', rule='{{obj.rule}}'){% if obj.snapshot_location_type %}, snapshot_location_type="{{obj.snapshot_location_type}}"{% endif %}{% if obj.description %}, description="{{obj.description}}"{% endif %}) {% else %} -AppProtection.SnapshotConfig(name='{{obj.name}}', target=ref({{obj.target}}), restore_config=ref({{obj.restore_config}}), policy=AppProtection.ProtectionPolicy('{{obj.policy}}')) +AppProtection.SnapshotConfig{% if obj.provider %}.{{obj.provider}}{% endif %}(name='{{obj.name}}', target=ref({{obj.target}}), num_of_replicas="{{obj.num_of_replicas}}", restore_config=ref({{obj.restore_config}}), policy=AppProtection.ProtectionPolicy('{{obj.policy}}'){% if obj.snapshot_location_type %}, snapshot_location_type="{{obj.snapshot_location_type}}"{% endif %}{% if obj.description %}, description="{{obj.description}}"{% endif %}) {% endif %} {% else %} -AppProtection.SnapshotConfig(name='{{obj.name}}', target=ref({{obj.target}}), restore_config=ref({{obj.restore_config}})) +AppProtection.SnapshotConfig{% if obj.provider %}.{{obj.provider}}{% endif %}(name='{{obj.name}}', target=ref({{obj.target}}), num_of_replicas="{{obj.num_of_replicas}}", restore_config=ref({{obj.restore_config}}){% if obj.snapshot_location_type %}, snapshot_location_type="{{obj.snapshot_location_type}}"{% endif %}{% if obj.description %}, description="{{obj.description}}"{% endif %}) {% endif %} {%- endmacro %} diff --git a/calm/dsl/decompile/schemas/task_exec_powershell.py.jinja2 b/calm/dsl/decompile/schemas/task_exec_powershell.py.jinja2 index b8118901..0de7de08 100644 --- a/calm/dsl/decompile/schemas/task_exec_powershell.py.jinja2 +++ b/calm/dsl/decompile/schemas/task_exec_powershell.py.jinja2 @@ -1,12 +1,12 @@ {%- macro exec_powershell_task(obj) -%} {%- if obj.cred is not defined and obj.target is not defined %} -CalmTask.Exec.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}) +CalmTask.Exec.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- elif obj.cred is not defined %} -CalmTask.Exec.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}, target={{obj.target}}) +CalmTask.Exec.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}, target={{obj.target}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- elif obj.target is not defined %} -CalmTask.Exec.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}) +CalmTask.Exec.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- else %} -CalmTask.Exec.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, target={{obj.target}}) +CalmTask.Exec.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, target={{obj.target}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- endif %} {%- endmacro %} diff --git a/calm/dsl/decompile/schemas/task_exec_ssh.py.jinja2 b/calm/dsl/decompile/schemas/task_exec_ssh.py.jinja2 index dd58ce2e..c8ecbce7 100644 --- a/calm/dsl/decompile/schemas/task_exec_ssh.py.jinja2 +++ b/calm/dsl/decompile/schemas/task_exec_ssh.py.jinja2 @@ -1,12 +1,12 @@ {%- macro exec_ssh_task(obj) -%} {%- if obj.cred is not defined and obj.target is not defined %} -CalmTask.Exec.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}) +CalmTask.Exec.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %} ) {%- elif obj.cred is not defined %} -CalmTask.Exec.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}, target={{obj.target}}) +CalmTask.Exec.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}, target={{obj.target}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- elif obj.target is not defined %} -CalmTask.Exec.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}) +CalmTask.Exec.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- else %} -CalmTask.Exec.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, target={{obj.target}}) +CalmTask.Exec.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, target={{obj.target}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- endif %} {%- endmacro %} diff --git a/calm/dsl/decompile/schemas/task_http_delete.py.jinja2 b/calm/dsl/decompile/schemas/task_http_delete.py.jinja2 index 21e863de..2d9c60f0 100644 --- a/calm/dsl/decompile/schemas/task_http_delete.py.jinja2 +++ b/calm/dsl/decompile/schemas/task_http_delete.py.jinja2 @@ -1,12 +1,12 @@ {%- macro http_delete_task(obj) -%} {%- if obj.target is not defined and obj.attrs.request_body is not defined %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.delete({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.delete({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %}{% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- elif obj.target is not defined %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.delete({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}body=json.dumps({{obj.attrs.request_body}}), headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.delete({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}body=json.dumps({{obj.attrs.request_body}}), headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %}{% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- elif obj.attrs.request_body is not defined %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.delete({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.delete({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %}{% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- else %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.delete({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}body=json.dumps({{obj.attrs.request_body}}), headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.delete({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}body=json.dumps({{obj.attrs.request_body}}), headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %}{% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- endif %} {%- endmacro %} diff --git a/calm/dsl/decompile/schemas/task_http_get.py.jinja2 b/calm/dsl/decompile/schemas/task_http_get.py.jinja2 index 91a3719a..a391069f 100644 --- a/calm/dsl/decompile/schemas/task_http_get.py.jinja2 +++ b/calm/dsl/decompile/schemas/task_http_get.py.jinja2 @@ -1,8 +1,8 @@ {%- macro http_get_task(obj) -%} {%- if obj.target is not defined %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.get({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference %}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.get({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference %}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- else %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.get({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference %}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.get({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %}{% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference %}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- endif %} {%- endmacro %} diff --git a/calm/dsl/decompile/schemas/task_http_post.py.jinja2 b/calm/dsl/decompile/schemas/task_http_post.py.jinja2 index 39b50e4a..79ac5296 100644 --- a/calm/dsl/decompile/schemas/task_http_post.py.jinja2 +++ b/calm/dsl/decompile/schemas/task_http_post.py.jinja2 @@ -1,12 +1,12 @@ {%- macro http_post_task(obj) -%} {%- if obj.target is not defined and obj.attrs.request_body is not defined %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.post({%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.post({%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- elif obj.target is not defined %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.post({%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}body=json.dumps({{obj.attrs.request_body}}), headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.post({%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}body=json.dumps({{obj.attrs.request_body}}), headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- elif obj.attrs.request_body is not defined %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.post({%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.post({%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- else %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.post({%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}body=json.dumps({{obj.attrs.request_body}}), headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.post({%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}body=json.dumps({{obj.attrs.request_body}}), headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- endif %} {%- endmacro %} diff --git a/calm/dsl/decompile/schemas/task_http_put.py.jinja2 b/calm/dsl/decompile/schemas/task_http_put.py.jinja2 index 4557927a..c414408c 100644 --- a/calm/dsl/decompile/schemas/task_http_put.py.jinja2 +++ b/calm/dsl/decompile/schemas/task_http_put.py.jinja2 @@ -1,12 +1,12 @@ {%- macro http_put_task(obj) -%} {%- if obj.target is not defined and obj.attrs.request_body is not defined %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.put({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.put({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- elif obj.target is not defined %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.put({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}body=json.dumps({{obj.attrs.request_body}}), headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.put({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}body=json.dumps({{obj.attrs.request_body}}), headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}'{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- elif obj.attrs.request_body is not defined %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.put({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.put({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- else %} -{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.put({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}body=json.dumps({{obj.attrs.request_body}}), headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) +{%- if obj.calm_var_task %}CalmVarTask{%- else %}CalmTask{%- endif %}.HTTP.put({%- if obj.relative_url %}relative_url={%- endif %}{%- if obj.attrs.url %}'{{obj.attrs.url}}', {%- endif %}body=json.dumps({{obj.attrs.request_body}}), headers={{obj.headers}}, secret_headers={{obj.secret_headers}} , content_type='{{obj.attrs.content_type}}', verify={{obj.attrs.tls_verify}}, status_mapping={{obj.status_mapping}}, response_paths={{obj.response_paths}}, name='{{obj.name}}', target={{obj.target}}{%- if obj.cred %}, cred={{obj.cred}}{%- endif %} {% if obj.credentials_list %}, credential={{obj.credentials_list | replace("'","")}}{% endif %} {%- if obj.attrs.tunnel_reference%}, tunnel=Ref.Tunnel(name="{{ obj.attrs.tunnel_reference.name }}"){%- endif %} {%- if obj.attrs.relative_url %}, relative_url='{{obj.attrs.relative_url}}'{%- endif %}) {%- endif %} {%- endmacro %} diff --git a/calm/dsl/decompile/schemas/task_power_action_call_runbook.py.jinja2 b/calm/dsl/decompile/schemas/task_power_action_call_runbook.py.jinja2 new file mode 100644 index 00000000..09198f3a --- /dev/null +++ b/calm/dsl/decompile/schemas/task_power_action_call_runbook.py.jinja2 @@ -0,0 +1,5 @@ +{%- macro power_action_runbook_task(obj) -%} +{{obj.target_substrate}}.{{obj.action}}(name='{{obj.name}}', target=ref({{obj.target}})) +{%- endmacro %} + +{{ power_action_runbook_task(obj) }} \ No newline at end of file diff --git a/calm/dsl/decompile/schemas/task_setvariable_powershell.py.jinja2 b/calm/dsl/decompile/schemas/task_setvariable_powershell.py.jinja2 index f464b439..44b8e6a4 100644 --- a/calm/dsl/decompile/schemas/task_setvariable_powershell.py.jinja2 +++ b/calm/dsl/decompile/schemas/task_setvariable_powershell.py.jinja2 @@ -1,14 +1,14 @@ {%- macro setvariable_powershell_task(obj) -%} {%- if obj.cred is not defined and obj.target and obj.variables is not defined %} -CalmTask.SetVariable.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}) +CalmTask.SetVariable.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- elif obj.cred is not defined %} -CalmTask.SetVariable.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}} {%- if obj.target %}, target={{obj.target}}{% endif %}, variables={{obj.variables}}) +CalmTask.SetVariable.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}} {%- if obj.target %}, target={{obj.target}}{% endif %}, variables={{obj.variables}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- elif obj.target is not defined %} -CalmTask.SetVariable.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, variables={{obj.variables}}) +CalmTask.SetVariable.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, variables={{obj.variables}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- elif obj.variables is not defined %} -CalmTask.SetVariable.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, target={{obj.target}}) +CalmTask.SetVariable.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, target={{obj.target}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- else %} -CalmTask.SetVariable.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, target={{obj.target}}, variables={{obj.variables}}) +CalmTask.SetVariable.powershell(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, target={{obj.target}}, variables={{obj.variables}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- endif %} {%- endmacro %} diff --git a/calm/dsl/decompile/schemas/task_setvariable_ssh.py.jinja2 b/calm/dsl/decompile/schemas/task_setvariable_ssh.py.jinja2 index 0ddf5123..82fa8933 100644 --- a/calm/dsl/decompile/schemas/task_setvariable_ssh.py.jinja2 +++ b/calm/dsl/decompile/schemas/task_setvariable_ssh.py.jinja2 @@ -1,14 +1,14 @@ {%- macro setvariable_ssh_task(obj) -%} {%- if obj.cred is not defined and obj.target and obj.variables is not defined %} -CalmTask.SetVariable.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}) +CalmTask.SetVariable.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- elif obj.cred is not defined %} -CalmTask.SetVariable.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}} {%- if obj.target %}, target={{obj.target}}{% endif %}, variables={{obj.variables}}) +CalmTask.SetVariable.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}} {%- if obj.target %}, target={{obj.target}}{% endif %}, variables={{obj.variables}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- elif obj.target is not defined %} -CalmTask.SetVariable.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, variables={{obj.variables}}) +CalmTask.SetVariable.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, variables={{obj.variables}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- elif obj.variables is not defined %} -CalmTask.SetVariable.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, target={{obj.target}}) +CalmTask.SetVariable.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, target={{obj.target}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- else %} -CalmTask.SetVariable.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, target={{obj.target}}, variables={{obj.variables}}) +CalmTask.SetVariable.ssh(name='{{obj.name}}', filename={{obj.attrs.script_file}}, cred={{obj.cred}}, target={{obj.target}}, variables={{obj.variables}} {%- if obj.target_endpoint %}, target_endpoint={{obj.target_endpoint}} {%- endif %}) {%- endif %} {%- endmacro %} diff --git a/calm/dsl/decompile/schemas/task_while_loop.py.jinja2 b/calm/dsl/decompile/schemas/task_while_loop.py.jinja2 index 84a7ab00..5a2de13c 100644 --- a/calm/dsl/decompile/schemas/task_while_loop.py.jinja2 +++ b/calm/dsl/decompile/schemas/task_while_loop.py.jinja2 @@ -1,5 +1,5 @@ {%- macro while_task(obj) -%} -CalmTask.Loop(name='{{obj.name}}', iterations={{obj.attrs.iterations}}, exit_condition={{obj.attrs.exit_condition_type}}, loop_variable='{{obj.attrs.loop_variable}}') +CalmTask.Loop(name='{{obj.name}}', iterations="{{obj.attrs.iterations}}", exit_condition={{obj.attrs.exit_condition_type}}, loop_variable='{{obj.attrs.loop_variable}}') {%- endmacro %} {{ while_task(obj) }} diff --git a/calm/dsl/decompile/schemas/update_config.py.jinja2 b/calm/dsl/decompile/schemas/update_config.py.jinja2 new file mode 100644 index 00000000..20dff713 --- /dev/null +++ b/calm/dsl/decompile/schemas/update_config.py.jinja2 @@ -0,0 +1,5 @@ +{%- macro patch_config(obj) %} +AppEdit.UpdateConfig(name='{{obj.name}}', target=ref({{obj.target}}), patch_attrs={{obj.patch_attr}}) +{%- endmacro %} + +{{ patch_config(obj) }} diff --git a/calm/dsl/decompile/schemas/update_config_attr.py.jinja2 b/calm/dsl/decompile/schemas/update_config_attr.py.jinja2 new file mode 100644 index 00000000..44a0bfa9 --- /dev/null +++ b/calm/dsl/decompile/schemas/update_config_attr.py.jinja2 @@ -0,0 +1,21 @@ +{%- macro update_config_attr(obj) %} +class {{obj.name}}(AhvUpdateConfigAttrs): + {% if obj.description %}"""{{obj.description}}"""{% endif %} + + {% if obj.memory and obj.memory.operation %}memory = PatchField.Ahv.memory(value="{{obj.memory.value}}", operation="{{obj.memory.operation}}"{% if obj.memory.max_value %}, max_val={{obj.memory.max_value}}{% endif %}{% if obj.memory.min_value %}, min_val={{obj.memory.min_value}}{% endif %}, editable={{obj.memory.editable}}){% endif %} + {% if obj.vcpu and obj.vcpu.operation %}vcpu = PatchField.Ahv.vcpu(value="{{obj.vcpu.value}}", operation="{{obj.vcpu.operation}}"{% if obj.vcpu.max_value %}, max_val={{obj.vcpu.max_value}}{% endif %}{% if obj.vcpu.min_value %}, min_val={{obj.vcpu.min_value}}{% endif %}, editable={{obj.vcpu.editable}}){% endif %} + {% if obj.numsocket and obj.numsocket.operation %}numsocket = PatchField.Ahv.numsocket(value="{{obj.numsocket.value}}", operation="{{obj.numsocket.operation}}"{% if obj.numsocket.max_value %}, max_val={{obj.numsocket.max_value}}{% endif %}{% if obj.numsocket.min_value%}, min_val={{obj.numsocket.min_value}}{% endif %}, editable={{obj.numsocket.editable}}){% endif %} + {% if obj.disk_delete %}disk_delete = {{obj.disk_delete}}{% endif %} + {% if obj.nic_delete %}nic_delete = {{obj.nic_delete}}{% endif %} + {% if obj.categories_delete %}categories_delete = {{obj.categories_delete}}{% endif %} + {% if obj.categories_add %}categories_add = {{obj.categories_add}}{% endif %} + {% if obj.nic_list %}nics = [{{obj.nic_list}}]{% endif %} + {% if obj.disk_list %}disks = [{{obj.disk_list}}]{% endif %} + {% if obj.category_list %}categories = [{{obj.category_list}}]{% endif %} + +{% for action in obj.actions %} +{{action | indent( width=4, first=True)}} +{%- endfor %} +{%- endmacro %} + +{{ update_config_attr(obj) }} diff --git a/calm/dsl/decompile/schemas/user.py.jinja2 b/calm/dsl/decompile/schemas/user.py.jinja2 new file mode 100644 index 00000000..d6eaca15 --- /dev/null +++ b/calm/dsl/decompile/schemas/user.py.jinja2 @@ -0,0 +1,4 @@ +{% macro user(obj) %} + {% if obj.name %}Ref.User(name="{{obj.name}}"){% endif %} +{% endmacro %} +{{ user(obj) }} \ No newline at end of file diff --git a/calm/dsl/decompile/service.py b/calm/dsl/decompile/service.py index 1404c71c..f8575aff 100644 --- a/calm/dsl/decompile/service.py +++ b/calm/dsl/decompile/service.py @@ -9,7 +9,7 @@ LOG = get_logging_handle(__name__) -def render_service_template(cls, secrets_dict=[]): +def render_service_template(cls, secrets_dict=[], endpoints=[], ep_list=[]): LOG.debug("Rendering {} service template".format(cls.__name__)) if not isinstance(cls, ServiceType): @@ -66,7 +66,12 @@ def render_service_template(cls, secrets_dict=[]): for entity in user_attrs.get("actions", []): rendered_txt = render_action_template( - entity, entity_context, context=context, secrets_dict=secrets_dict + entity, + entity_context, + context=context, + secrets_dict=secrets_dict, + endpoints=endpoints, + ep_list=ep_list, ) if rendered_txt: action_list.append(rendered_txt) diff --git a/calm/dsl/decompile/substrate.py b/calm/dsl/decompile/substrate.py index 277b3919..33fea278 100644 --- a/calm/dsl/decompile/substrate.py +++ b/calm/dsl/decompile/substrate.py @@ -7,14 +7,15 @@ from calm.dsl.decompile.file_handler import get_specs_dir, get_specs_dir_key from calm.dsl.builtins import SubstrateType, get_valid_identifier from calm.dsl.decompile.ahv_vm import render_ahv_vm -from calm.dsl.decompile.ref_dependency import update_substrate_name from calm.dsl.store import Cache from calm.dsl.log import get_logging_handle LOG = get_logging_handle(__name__) -def render_substrate_template(cls, vm_images=[], secrets_dict=[]): +def render_substrate_template( + cls, vm_images=[], secrets_dict=[], endpoints=[], ep_list=[] +): LOG.debug("Rendering {} substrate template".format(cls.__name__)) if not isinstance(cls, SubstrateType): @@ -35,9 +36,6 @@ def render_substrate_template(cls, vm_images=[], secrets_dict=[]): if gui_display_name != cls.__name__: user_attrs["gui_display_name"] = gui_display_name - # updating ui and dsl name mapping - update_substrate_name(gui_display_name, cls.__name__) - provider_spec_editables = user_attrs.get("provider_spec_editables", {}) create_spec_editables = provider_spec_editables.get("create_spec", {}) readiness_probe_editables = provider_spec_editables.get("readiness_probe", {}) @@ -113,13 +111,22 @@ def render_substrate_template(cls, vm_images=[], secrets_dict=[]): # Actions action_list = [] system_actions = {v: k for k, v in SubstrateType.ALLOWED_FRAGMENT_ACTIONS.items()} + power_actions = {v: k for k, v in SubstrateType.ALLOWED_SYSTEM_ACTIONS.items()} for action in user_attrs.get("actions", []): if action.__name__ in list(system_actions.keys()): action.name = system_actions[action.__name__] action.__name__ = system_actions[action.__name__] + elif action.__name__ in list(power_actions.keys()): + action.name = power_actions[action.__name__] + action.__name__ = power_actions[action.__name__] action_list.append( render_action_template( - action, entity_context, context=context, secrets_dict=secrets_dict + action, + entity_context, + context=context, + secrets_dict=secrets_dict, + endpoints=endpoints, + ep_list=ep_list, ) ) diff --git a/calm/dsl/decompile/task.py b/calm/dsl/decompile/task.py index 20300c01..61264cae 100644 --- a/calm/dsl/decompile/task.py +++ b/calm/dsl/decompile/task.py @@ -4,10 +4,19 @@ from calm.dsl.decompile.render import render_template from calm.dsl.decompile.ndb import get_schema_file_and_user_attrs from calm.dsl.decompile.ref import render_ref_template -from calm.dsl.decompile.credential import get_cred_var_name +from calm.dsl.builtins import CredentialType +from calm.dsl.decompile.credential import ( + get_cred_var_name, + render_credential_template, +) from calm.dsl.decompile.file_handler import get_scripts_dir, get_scripts_dir_key from calm.dsl.builtins import TaskType from calm.dsl.db.table_config import AccountCache +from calm.dsl.constants import SUBSTRATE +from calm.dsl.decompile.ref_dependency import ( + get_entity_gui_dsl_name, + get_power_action_target_substrate, +) from calm.dsl.builtins.models.task import EXIT_CONDITION_MAP from calm.dsl.log import get_logging_handle @@ -22,6 +31,8 @@ def render_task_template( CONFIG_SPEC_MAP={}, context="", secrets_dict=[], + credentials_list=[], + rendered_credential_list=[], ): LOG.debug("Rendering {} task template".format(cls.name)) if not isinstance(cls, TaskType): @@ -36,7 +47,10 @@ def render_task_template( user_attrs = cls.get_user_attrs() user_attrs["name"] = cls.name target = getattr(cls, "target_any_local_reference", None) - if target: # target will be modified to have correct name(DSL name) + endpoint_target = getattr(cls, "exec_target_reference", None) + if endpoint_target: # if target is endpoint target then use that + user_attrs["target_endpoint"] = render_ref_template(endpoint_target) + elif target: # target will be modified to have correct name(DSL name) user_attrs["target"] = render_ref_template(target) cred = cls.attrs.get("login_credential_local_reference", None) @@ -145,6 +159,17 @@ def render_task_template( getattr(auth_cred, "name", "") or auth_cred.__name__ ) ) + elif auth_type == "basic": + cred_dict = { + "username": auth_obj["username"], + "password": auth_obj["password"], + "type": "PASSWORD", + } + cred = CredentialType.decompile(cred_dict) + rendered_credential_list.append(render_credential_template(cred)) + cred = get_cred_var_name(cred.name) + user_attrs["credentials_list"] = cred + credentials_list.append(cred) user_attrs["response_paths"] = attrs.get("response_paths", {}) method = attrs["method"] @@ -168,14 +193,40 @@ def render_task_template( schema_file = "task_http_delete.py.jinja2" elif cls.type == "CALL_RUNBOOK": + is_power_action = False runbook = cls.attrs["runbook_reference"] runbook_name = getattr(runbook, "name", "") or runbook.__name__ - user_attrs = { - "name": cls.name, - "action": RUNBOOK_ACTION_MAP[runbook_name], - "target": target.name, - } - schema_file = "task_call_runbook.py.jinja2" + + # constructing user_attrs for power action runbooks of substrate + for action_name in list(SUBSTRATE.VM_POWER_ACTIONS_REV.keys()): + if action_name in runbook_name and "substrate" in runbook_name: + gui_substrate_name = get_power_action_target_substrate(runbook_name) + + # mapping correct dsl class name using gui name found above + substrate = get_entity_gui_dsl_name(gui_substrate_name) + if not substrate: + raise ValueError("Target substrate not found") + user_attrs = { + "name": cls.name, + "action": SUBSTRATE.VM_POWER_ACTIONS_REV[action_name], + "target_substrate": substrate, + "target": target.name, + } + is_power_action = True + break + + # fallback to default user_attrs for backward compatibility + if not is_power_action: + user_attrs = { + "name": cls.name, + "action": RUNBOOK_ACTION_MAP[runbook_name], + "target": target.name, + } + + if is_power_action: + schema_file = "task_power_action_call_runbook.py.jinja2" + else: + schema_file = "task_call_runbook.py.jinja2" elif cls.type == "CALL_CONFIG": config_name = cls.attrs["config_spec_reference"] diff --git a/calm/dsl/decompile/user.py b/calm/dsl/decompile/user.py new file mode 100644 index 00000000..3ec41c0f --- /dev/null +++ b/calm/dsl/decompile/user.py @@ -0,0 +1,13 @@ +from calm.dsl.decompile.render import render_template +from calm.dsl.log import get_logging_handle + +LOG = get_logging_handle(__name__) + + +def render_user_template(cls): + LOG.debug("Rendering {} user template".format(cls["name"])) + + schema_file = "user.py.jinja2" + user_attrs = {"name": cls["name"]} + text = render_template(schema_file=schema_file, obj=user_attrs) + return text.strip() diff --git a/calm/dsl/decompile/variable.py b/calm/dsl/decompile/variable.py index d3aaccfd..58b64ee6 100644 --- a/calm/dsl/decompile/variable.py +++ b/calm/dsl/decompile/variable.py @@ -10,7 +10,14 @@ SECRET_VAR_FILES = [] -def render_variable_template(cls, entity_context, context="", secrets_dict=[]): +def render_variable_template( + cls, + entity_context, + context="", + secrets_dict=[], + credentials_list=[], + rendered_credential_list=[], +): LOG.debug("Rendering {} variable template".format(cls.__name__)) if not isinstance(cls, VariableType): @@ -44,6 +51,8 @@ def render_variable_template(cls, entity_context, context="", secrets_dict=[]): if cls.regex: regex = cls.regex.get_dict() user_attrs["regex"] = regex.get("value", None) + if user_attrs["regex"]: + user_attrs["regex"] = repr(user_attrs["regex"]).strip("'") user_attrs["validate_regex"] = regex.get("should_validate", False) else: @@ -147,7 +156,10 @@ def render_variable_template(cls, entity_context, context="", secrets_dict=[]): task = TaskType.decompile(options) task.__name__ = "SampleTask" user_attrs["value"] = render_task_template( - task, entity_context=entity_context + task, + entity_context=entity_context, + credentials_list=credentials_list, + rendered_credential_list=rendered_credential_list, ) if data_type == "BASE": diff --git a/calm/dsl/init/blueprint/ahv_blueprint.py.jinja2 b/calm/dsl/init/blueprint/ahv_blueprint.py.jinja2 index 6609d163..8cd85b17 100644 --- a/calm/dsl/init/blueprint/ahv_blueprint.py.jinja2 +++ b/calm/dsl/init/blueprint/ahv_blueprint.py.jinja2 @@ -1,4 +1,4 @@ -{% macro BlueprintTemplate(bp_name, subnet_name, cluster_name) -%} +{% macro BlueprintTemplate(bp_name, subnet_name, cluster_name, vpc_name) -%} # THIS FILE IS AUTOMATICALLY GENERATED. """ Sample Calm DSL for {{bp_name}} blueprint @@ -50,6 +50,7 @@ from calm.dsl.builtins import action, parallel, ref, basic_cred from calm.dsl.builtins import read_local_file from calm.dsl.builtins import vm_disk_package, AhvVmDisk, AhvVmNic from calm.dsl.builtins import AhvVmGC, AhvVmResources, AhvVm +from calm.dsl.builtins import AhvVmGC, AhvVmResources, AhvVm, Ref # SSH Credentials @@ -154,7 +155,10 @@ class {{bp_name}}VmResources(AhvVmResources): disks = [ AhvVmDisk.Disk.Scsi.cloneFromVMDiskPackage(CentosPackage, bootable=True), ] - nics = [AhvVmNic.DirectNic.ingress(subnet="{{subnet_name}}", cluster="{{cluster_name}}")] + {% if vpc_name != "" %}nics = [AhvVmNic.NormalNic.ingress(subnet="{{subnet_name}}", vpc="{{vpc_name}}")] + {% else %}nics = [AhvVmNic.DirectNic.ingress(subnet="{{subnet_name}}", cluster="{{cluster_name}}")] + {% endif %} + guest_customization = AhvVmGC.CloudInit( config={ @@ -173,6 +177,7 @@ class {{bp_name}}Vm(AhvVm): resources = {{bp_name}}VmResources categories = {"AppFamily": "Demo", "AppType": "Default"} + cluster = Ref.Cluster(name="{{cluster_name}}") class {{bp_name}}Substrate(Substrate): @@ -246,4 +251,4 @@ class {{bp_name}}(Blueprint): {%- endmacro %} -{{BlueprintTemplate(bp_name, subnet_name, cluster_name)}} +{{BlueprintTemplate(bp_name, subnet_name, cluster_name, vpc_name)}} diff --git a/calm/dsl/init/blueprint/ahv_single_vm_blueprint.py.jinja2 b/calm/dsl/init/blueprint/ahv_single_vm_blueprint.py.jinja2 index a6ca1f40..a6f4cf8b 100644 --- a/calm/dsl/init/blueprint/ahv_single_vm_blueprint.py.jinja2 +++ b/calm/dsl/init/blueprint/ahv_single_vm_blueprint.py.jinja2 @@ -1,4 +1,4 @@ -{% macro BlueprintTemplate(bp_name, subnet_name, cluster_name, vm_image) -%} +{% macro BlueprintTemplate(bp_name, subnet_name, cluster_name, vm_image, vpc_name) -%} # THIS FILE IS AUTOMATICALLY GENERATED. """ Sample Calm DSL for {{bp_name}} blueprint @@ -50,6 +50,7 @@ from calm.dsl.builtins import AhvVmResources, ahv_vm from calm.dsl.builtins import AhvVmDisk, AhvVmNic, AhvVmGC from calm.dsl.builtins import VmProfile, VmBlueprint from calm.dsl.builtins import Metadata +from calm.dsl.builtins import Ref # SSH Credentials @@ -69,7 +70,9 @@ class {{bp_name}}VmResources(AhvVmResources): disks = [ AhvVmDisk.Disk.Scsi.cloneFromImageService("{{vm_image}}", bootable=True), ] - nics = [AhvVmNic.DirectNic.ingress(subnet="{{subnet_name}}", cluster="{{cluster_name}}")] + {% if vpc_name != "" %}nics = [AhvVmNic.NormalNic.ingress(subnet="{{subnet_name}}", vpc="{{vpc_name}}")] + {% else %}nics = [AhvVmNic.DirectNic.ingress(subnet="{{subnet_name}}", cluster="{{cluster_name}}")] + {% endif %} guest_customization = AhvVmGC.CloudInit( config={ @@ -91,7 +94,7 @@ class {{bp_name}}Profile(VmProfile): var2 = Variable.Simple("sample_val2", runtime=True) # Vm Spec for Substrate - provider_spec = ahv_vm(resources={{bp_name}}VmResources, name="{{bp_name}}Vm") + provider_spec = ahv_vm(resources={{bp_name}}VmResources, name="{{bp_name}}Vm", cluster=Ref.Cluster(name="{{cluster_name}}")) # Package Actions @action @@ -148,4 +151,4 @@ class {{bp_name}}Metadata(Metadata): {%- endmacro %} -{{BlueprintTemplate(bp_name, subnet_name, cluster_name, vm_image)}} +{{BlueprintTemplate(bp_name, subnet_name, cluster_name, vm_image, vpc_name)}} diff --git a/calm/dsl/init/blueprint/render.py b/calm/dsl/init/blueprint/render.py index acc22592..57a0c084 100644 --- a/calm/dsl/init/blueprint/render.py +++ b/calm/dsl/init/blueprint/render.py @@ -86,16 +86,39 @@ def render_ahv_template(template, bp_name): ) sys.exit(-1) - cluster_name = subnet_cache_data["cluster_name"] default_subnet = subnet_cache_data["name"] - LOG.info( - "Using Nutanix PC account {}, cluster {}, subnet {}".format( - account_name, cluster_name, default_subnet + + if subnet_cache_data["subnet_type"] == "OVERLAY": + vpc_name = subnet_cache_data["vpc_name"] + cluster_data = account_cache_data.get("data", {}).get("clusters", {}) + if cluster_data: + cluster_name = list({v: k for k, v in cluster_data.items()}.keys())[0] + else: + LOG.error( + "No clusters attached to {}(uuid = {})".format( + account_name, account_uuid + ) + ) + sys.exit(-1) + LOG.info( + "Using Nutanix PC account: {}, vpc: {}, overlay_subnet: {}".format( + account_name, vpc_name, default_subnet + ) + ) + else: + vpc_name = "" + cluster_name = subnet_cache_data["cluster_name"] + LOG.info( + "Using Nutanix PC account: {}, cluster: {}, subnet: {}".format( + account_name, cluster_name, default_subnet + ) ) - ) LOG.info("Rendering ahv template") text = template.render( - bp_name=bp_name, subnet_name=default_subnet, cluster_name=cluster_name + bp_name=bp_name, + subnet_name=default_subnet, + cluster_name=cluster_name, + vpc_name=vpc_name, ) return text.strip() + os.linesep @@ -173,9 +196,34 @@ def render_single_vm_bp_ahv_template(template, bp_name): ) sys.exit(-1) - cluster_name = subnet_cache_data["cluster_name"] default_subnet = subnet_cache_data["name"] + if subnet_cache_data["subnet_type"] == "OVERLAY": + vpc_name = subnet_cache_data["vpc_name"] + cluster_data = account_cache_data.get("data", {}).get("clusters", {}) + if cluster_data: + cluster_name = list({v: k for k, v in cluster_data.items()}.keys())[0] + else: + LOG.error( + "No clusters attached to {}(uuid = {})".format( + account_name, account_uuid + ) + ) + sys.exit(-1) + LOG.info( + "Using Nutanix PC account: {}, vpc: {}, overlay_subnet: {}".format( + account_name, vpc_name, default_subnet + ) + ) + else: + vpc_name = "" + cluster_name = subnet_cache_data["cluster_name"] + LOG.info( + "Using Nutanix PC account: {}, cluster: {}, subnet: {}".format( + account_name, cluster_name, default_subnet + ) + ) + # Fetch image for vm AhvVmProvider = get_provider("AHV_VM") AhvObj = AhvVmProvider.get_api_obj() @@ -203,17 +251,13 @@ def render_single_vm_bp_ahv_template(template, bp_name): LOG.error("No Disk image found on account(uuid='{}')".format(account_uuid)) sys.exit(-1) - LOG.info( - "Using Nutanix PC account {}, cluster {}, subnet {}".format( - account_name, cluster_name, default_subnet - ) - ) LOG.info("Rendering ahv template") text = template.render( bp_name=bp_name, subnet_name=default_subnet, cluster_name=cluster_name, vm_image=vm_image, + vpc_name=vpc_name, ) return text.strip() + os.linesep diff --git a/calm/dsl/providers/plugins/ahv_vm/ahv_vm_provider_spec.yaml.jinja2 b/calm/dsl/providers/plugins/ahv_vm/ahv_vm_provider_spec.yaml.jinja2 index 938086cb..97f79d5a 100644 --- a/calm/dsl/providers/plugins/ahv_vm/ahv_vm_provider_spec.yaml.jinja2 +++ b/calm/dsl/providers/plugins/ahv_vm/ahv_vm_provider_spec.yaml.jinja2 @@ -410,6 +410,7 @@ properties: power_state: type: string enum: [ON, OFF] + default: ON gpu_list: type: array items: diff --git a/calm/dsl/providers/plugins/ahv_vm/constants.py b/calm/dsl/providers/plugins/ahv_vm/constants.py index 7a3fa29d..5a1ea1e2 100644 --- a/calm/dsl/providers/plugins/ahv_vm/constants.py +++ b/calm/dsl/providers/plugins/ahv_vm/constants.py @@ -10,7 +10,11 @@ class AHV: GUEST_CUSTOMIZATION_SCRIPT_TYPES = ["cloud_init", "sysprep"] SYS_PREP_INSTALL_TYPES = ["FRESH", "PREPARED"] - BOOT_TYPES = {"Legacy BIOS": "LEGACY", "UEFI": "UEFI"} + BOOT_TYPES = { + "Legacy BIOS": "LEGACY", + "UEFI": "UEFI", + "UEFI SECURE BOOT": "SECURE_BOOT", + } OPERATION_TYPES = { "DISK": ["CLONE_FROM_IMAGE", "ALLOCATE_STORAGE_CONTAINER"], "CDROM": ["CLONE_FROM_IMAGE", "EMPTY_CDROM"], diff --git a/calm/dsl/providers/plugins/ahv_vm/main.py b/calm/dsl/providers/plugins/ahv_vm/main.py index bc7ee220..db9fb25d 100644 --- a/calm/dsl/providers/plugins/ahv_vm/main.py +++ b/calm/dsl/providers/plugins/ahv_vm/main.py @@ -15,6 +15,9 @@ from .constants import AHV as AhvConstants +from calm.dsl.store import Cache +from calm.dsl.constants import CACHE + LOG = get_logging_handle(__name__) Provider = get_provider_interface() @@ -1261,6 +1264,7 @@ def create_spec(client): # As account_uuid is required for versions>2.9.0 account_uuid = "" + accounts = [] is_host_pc = True payload = {"length": 250, "filter": "type==nutanix_pc"} res, err = client.account.list(payload) @@ -1271,8 +1275,29 @@ def create_spec(client): for entity in res["entities"]: entity_id = entity["metadata"]["uuid"] if entity_id in reg_accounts: - account_uuid = entity_id - break + accounts.append( + {"name": entity["metadata"]["name"], "uuid": entity["metadata"]["uuid"]} + ) + + if len(accounts) > 1: + click.echo("Choose from given accounts:") + for ind, account in enumerate(accounts): + click.echo( + "\t {}. {}".format(str(ind + 1), highlight_text(account["name"])) + ) + + while True: + ind = click.prompt("\nEnter the index of account", default=1) + if (ind > len(accounts)) or (ind <= 0): + click.echo("Invalid index !!! ") + else: + account_uuid = accounts[ind - 1]["uuid"] + click.echo( + "{} selected".format(highlight_text(accounts[ind - 1]["name"])) + ) + break + elif len(accounts) == 1: + account_uuid = accounts[0]["uuid"] # TODO Host PC dependency for categories call due to bug https://jira.nutanix.com/browse/CALM-17213 if account_uuid: @@ -1513,6 +1538,8 @@ def create_spec(client): boot_type = AhvConstants.BOOT_TYPES[boot_types[res - 1]] if boot_type == AhvConstants.BOOT_TYPES["UEFI"]: spec["resources"]["boot_config"]["boot_type"] = boot_type + elif boot_type == AhvConstants.BOOT_TYPES["UEFI SECURE BOOT"]: + spec["resources"]["boot_config"]["boot_type"] = boot_type click.echo("{} selected".format(highlight_text(boot_type))) break @@ -1615,13 +1642,29 @@ def create_spec(client): nics = nics["entities"] click.echo("\nChoose from given subnets:") for ind, nic in enumerate(nics): - click.echo( - "\t {}. {} ({})".format( - str(ind + 1), - highlight_text(nic["status"]["name"]), - highlight_text(nic["status"]["cluster_reference"]["name"]), + name = Cache.get_entity_data_using_uuid + if nic["status"]["resources"]["subnet_type"] == "VLAN": + click.echo( + "\t {}. {} ({})".format( + str(ind + 1), + highlight_text(nic["status"]["name"]), + highlight_text(nic["status"]["cluster_reference"]["name"]), + ) + ) + else: + uuid_for_vpc_name = nic["status"]["resources"]["vpc_reference"][ + "uuid" + ] + vpc_name_for_overlay = Cache.get_entity_data_using_uuid( + entity_type=CACHE.ENTITY.AHV_VPC, uuid=uuid_for_vpc_name + ) + click.echo( + "\t {}. {} ({})".format( + str(ind + 1), + highlight_text(nic["status"]["name"]), + highlight_text(vpc_name_for_overlay["name"]), + ) ) - ) spec["resources"]["nic_list"] = [] while True: diff --git a/calm/dsl/providers/plugins/vmware_vm/vmware_vm_provider_spec.yaml.jinja2 b/calm/dsl/providers/plugins/vmware_vm/vmware_vm_provider_spec.yaml.jinja2 index 5e5ab831..c49d77c2 100644 --- a/calm/dsl/providers/plugins/vmware_vm/vmware_vm_provider_spec.yaml.jinja2 +++ b/calm/dsl/providers/plugins/vmware_vm/vmware_vm_provider_spec.yaml.jinja2 @@ -412,6 +412,7 @@ properties: default: 0 power_state: type: string + enum: [ON, OFF, poweron, poweroff] default: poweron type: type: string diff --git a/calm/dsl/store/version.py b/calm/dsl/store/version.py index 2efaeb90..e2b4acd0 100644 --- a/calm/dsl/store/version.py +++ b/calm/dsl/store/version.py @@ -2,53 +2,74 @@ from calm.dsl.db import get_db_handle from calm.dsl.api import get_api_client +from calm.dsl.config import get_context +from calm.dsl.db.table_config import VersionTable +from calm.dsl.log import get_logging_handle + +LOG = get_logging_handle(__name__) class Version: """Version class Implementation""" @classmethod - def create(cls, name="", version=""): + def create(cls, name="", pc_ip="", version=""): """Store the uuid of entity in cache""" db = get_db_handle() - db.version_table.create(name=name, version=version) + db.version_table.create(name=name, pc_ip=pc_ip, version=version) @classmethod def get_version(cls, name): """Returns the version of entity present""" - db = get_db_handle() + db_handle = get_db_handle() try: - entity = db.version_table.get(db.version_table.name == name) + entity = db_handle.version_table.get(db_handle.version_table.name == name) return entity.version - except peewee.DoesNotExist: + except (peewee.OperationalError, peewee.DoesNotExist): return None @classmethod - def sync(cls): + def get_version_data(cls, name): + """Returns the data stored in version cache for name supplied""" db = get_db_handle() - for entity in db.version_table.select(): - query = db.version_table.delete().where( - db.version_table.name == entity.name - ) - query.execute() - - client = get_api_client() - - # Update calm version - res, err = client.version.get_calm_version() - calm_version = res.content.decode("utf-8") - cls.create("Calm", calm_version) - - # Update pc_version of PC(if host exist) - res, err = client.version.get_pc_version() - if not err: - res = res.json() - pc_version = res["version"] - cls.create("PC", pc_version) + try: + entity = db.version_table.get_entity_data(name) + return entity + except: + return {} + + @classmethod + def sync(cls): + try: + db = get_db_handle() + db.version_table.clear() + + client = get_api_client() + ContextObj = get_context() + server_config = ContextObj.get_server_config() + pc_ip = server_config["pc_ip"] + + # Update calm version + res, err = client.version.get_calm_version() + calm_version = res.content.decode("utf-8") + cls.create("Calm", pc_ip, calm_version) + + # Update pc_version of PC(if host exist) + res, err = client.version.get_pc_version() + if not err: + res = res.json() + pc_version = res["version"] + cls.create("PC", pc_ip, pc_version) + + except (peewee.OperationalError, peewee.IntegrityError): + db_handle = get_db_handle() + db_handle.db.drop_tables([VersionTable]) + db_handle.db.create_tables([VersionTable]) + cls.sync() @classmethod def get_cache_type(cls): diff --git a/calm/dsl/tools/__init__.py b/calm/dsl/tools/__init__.py index 250700e6..571d2ace 100644 --- a/calm/dsl/tools/__init__.py +++ b/calm/dsl/tools/__init__.py @@ -1,6 +1,6 @@ from .ping import ping from .validator import StrictDraft7Validator -from .utils import get_module_from_file, make_file_dir +from .utils import get_module_from_file, make_file_dir, get_escaped_quotes_string __all__ = [ @@ -9,4 +9,5 @@ "StrictDraft7Validator", "get_module_from_file", "make_file_dir", + "get_escaped_quotes_string", ] diff --git a/calm/dsl/tools/utils.py b/calm/dsl/tools/utils.py index 21dd26fe..f8996b2a 100644 --- a/calm/dsl/tools/utils.py +++ b/calm/dsl/tools/utils.py @@ -36,3 +36,14 @@ def get_module_from_file(module_name, file): sys.exit(-1) return user_module + + +def get_escaped_quotes_string(val): + """Returns a string with backslash support""" + + if not isinstance(val, str): + return val + + val = val.replace('"', '\\"') + val = val.replace("'", "\\'") + return val diff --git a/docs/Blueprints/ahv_update_config/README.md b/docs/Blueprints/ahv_update_config/README.md new file mode 100644 index 00000000..93c97901 --- /dev/null +++ b/docs/Blueprints/ahv_update_config/README.md @@ -0,0 +1,44 @@ +Supports addition of action in patch configs (update configs) for AHV using `AhvUpdateConfigAttrs` class. For further details about this class refer [here](../../../release-notes/3.3.0/README.md#ahvupdateconfigattrs) + +#### Example +```python +class AhvUpdateAttrs(AhvUpdateConfigAttrs): + memory = PatchField.Ahv.memory(value="2", operation="equal", max_val=0, min_val=0, editable=False) + vcpu = PatchField.Ahv.vcpu(value="2", operation="equal", max_val=0, min_val=0) + numsocket = PatchField.Ahv.numsocket(value="2", operation="equal", max_val=0, min_val=0) + disk_delete = True + categories_delete = True + nic_delete = True + categories_add = True + nics = [ + PatchField.Ahv.Nics.delete(index=1, editable=True), + PatchField.Ahv.Nics.add( + AhvVmNic.DirectNic.ingress( + subnet="nested_vms", cluster="auto_cluster_prod_1a5e1b6769ad" + ), + editable=False, + ), + ] + disks = [ + PatchField.Ahv.Disks.delete(index=1), + PatchField.Ahv.Disks.modify( + index=2, editable=True, value="2", operation="equal", max_val=4, min_val=1 + ), + PatchField.Ahv.Disks.add( + AhvVmDisk.Disk.Pci.allocateOnStorageContainer(10), + editable=False, + ), + ] + categories = [ + PatchField.Ahv.Category.add({"TemplateType": "Vm"}), + PatchField.Ahv.Category.delete({"AppFamily": "Demo", "AppType": "Default"}), + ] + + @action + def app_edit_action_first(): + Task.Exec.escript(name="Task1", script="print 'Hello!'") + Task.Exec.escript(name="Task2", script="print 'Hello2!'") + Task.Exec.escript(name="Task3", script="print 'Hello3!'") +``` + +Note: only sequential tasks are supported in action. \ No newline at end of file diff --git a/docs/Blueprints/downloadable_images/README.md b/docs/Blueprints/downloadable_images/README.md new file mode 100644 index 00000000..213ade00 --- /dev/null +++ b/docs/Blueprints/downloadable_images/README.md @@ -0,0 +1,19 @@ +For details on Simple Blueprint Models refer [here](../../../release-notes/3.2.0/README.md#simpleblueprint-model) + +Allows you to add `packages` attribute which specifies downloadable images to be used in blueprint. See following example to use downloadable images in simple blueprint model: + +``` +DISK_IMAGE_SOURCE = "" +DownloadableDiskPackage = vm_disk_package( + name="", + config={"image": {"source": DISK_IMAGE_SOURCE}}, +) + +class SampleSimpleBlueprint(SimpleBlueprint): + """SimpleBlueprint configuration""" + + deployments = [MySqlDeployment] + environments = [Ref.Environment(name=ENV_NAME)] + packages = [DownloadableDiskPackage] # add downloadable image packages here + +``` \ No newline at end of file diff --git a/docs/Blueprints/snapshot_restore/README.md b/docs/Blueprints/snapshot_restore/README.md new file mode 100644 index 00000000..0d2d3c5d --- /dev/null +++ b/docs/Blueprints/snapshot_restore/README.md @@ -0,0 +1,24 @@ +For AHV snapshot restore refer [here](../../../release-notes/3.3.0/README.md) + +For VMware snapshot restore refer below: + +1. `AppProtection.SnapshotConfig`: Contains classes `AppProtection.SnapshotConfig.Ahv` and `AppProtection.SnapshotConfig.Vmware` for nutanix and vmware provider respectively. `AppProtection.SnapshotConfig` defaults to Ahv class for backware compatibility. + +2. `AppProtection.RestoreConfig`: Contains classes `AppProtection.RestoreConfig.Ahv` and `AppProtection.RestoreConfig.Vmware` for nutanix and vmware provider respectively. `AppProtection.RestoreConfig` defaults to Ahv class for backware compatibility. + +Sample Profile class containing snapshot/restore configs for VMWARE provider. +```python +from calm.dsl.builtins import AppProtection +class VmwareProfile(Profile): + + deployments = [VmwDeployment] + restore_configs = [ + AppProtection.RestoreConfig.Vmware(name="r1", + target=ref(VmwDeployment)) + ] + snapshot_configs = [ + AppProtection.SnapshotConfig.Vmware(name="s1", + restore_config=ref(restore_configs[0]), + policy=AppProtection.ProtectionPolicy("policy1", rule_name="rule_name")) + ] +``` \ No newline at end of file diff --git a/docs/Job_scheduler/README.md b/docs/Job_scheduler/README.md new file mode 100644 index 00000000..2dd3c3a1 --- /dev/null +++ b/docs/Job_scheduler/README.md @@ -0,0 +1,51 @@ +For further details on Job scheduler refer [here](../../release-notes/3.4.0/README.md) + +#### Recurring Job in Scheduler with non expiration Application Action as an executable + +- The Runbook `test_no_expiration_app_job` will be executed from `2022-03-01 23:17:15` with no expiry time. +- Skip passing expiry_time parameter to set no expiration in job. + + from calm.dsl.builtins import Job, JobScheduler + + start_date = "2022-03-01 23:17:15" + cron = "50 23 * * *" + time_zone = "Asia/Calcutta" + + APP_NAME = "job_recurring_no_expiration_app_action" + + class JobRecurring(Job): + """ + Recurring Job with no expiration to Start action on app. + Note: Skip passing expiry_time parameter to set no expiration in job. + """ + + name = "test_no_expiration_app_job" + schedule_info = JobScheduler.ScheduleInfo.recurring( + cron, start_date, time_zone=time_zone + ) + executable = JobScheduler.Exec.app_action(APP_NAME, "Start") + +#### Recurring Job in Scheduler with non expiration Runbook as an executable. + +- The Runbook `job_recurring_no_expiration_runbook` will be executed from March 08 2022 Asia/Calcutta with no expiry time. +- Skip passing expiry_time parameter to set no expiration in job. + + from calm.dsl.builtins import Job, JobScheduler + + start_date = "2022-03-08 19:14:00" + cron = "50 23 * * *" + time_zone = "Asia/Calcutta" + + RUNBOOK_NAME = "job_recurring_no_expiration_runbook" + + class JobRecurring(Job): + """ + Recurring job with no expiration to execute runbook. + Note: Skip passing expiry_time parameter to set no expiration in job. + """ + + name = "test_no_expiration_rb_job" + schedule_info = JobScheduler.ScheduleInfo.recurring( + cron, start_date, time_zone=time_zone + ) + executable = JobScheduler.Exec.runbook(RUNBOOK_NAME, False) \ No newline at end of file diff --git a/docs/Power-Actions-in-Blueprint/README.md b/docs/Power-Actions-in-Blueprint/README.md new file mode 100644 index 00000000..fbde436f --- /dev/null +++ b/docs/Power-Actions-in-Blueprint/README.md @@ -0,0 +1,152 @@ +# VM Power actions support in DSL + +1. Allows you to provide the Power On, Power Off option in the config for AHV, VMWARE providers +2. Allows you to create post create actions in substrate +3. Allows you to add VM Power ON/ Power Off/ Check Login actions in Custom actions for AHV, VMWARE, AZURE, AWS, GCP at service/profile level +4. Allows you to create VM Power ON/ Power Off/ Check Login actions in Post Create tasks. + +## Support to provide the Power On, Power Off option in the config for AHV, VMWARE providers + +To enable power state in ahv, vmware config similar to below, you can use power_on, power_off attributes of AhvVmResources class for AHV only or specify in yaml for AHV, VMware. + +![power_state](images/power_state.png "power_state_image") + +```python + +class ResourceClass(AhvVmResources): + memory = 1 + vCPUs = 1 + cores_per_vCPU = 1 + disks = [AhvVmDisk.Disk.Scsi.cloneFromImageService(CENTOS_CI, bootable=True)] + nics = [AhvVmNic.NormalNic.ingress(SUBNET_NAME, cluster=CLUSTER)] + power_state = "ON" # specify it here + +``` + +```yaml +resources: + account_uuid: f87469b4-cc06-4f0e-b51b-99b9b3fa8b41 + power_state: poweron # specify it here + controller_list: [] + cpu_hot_add: false + disk_list: [] + memory_hot_plug: false + memory_size_mib: 3072 + nic_list: + - net_name: key-vim.host.PortGroup-vlan.112 + nic_type: e1000 + type: '' + num_sockets: 1 + num_vcpus_per_socket: 1 + +``` + +Note: +- Valid power state values for vmware: [ON, OFF, poweron, poweroff] +- Valid power state values for ahv: [ON, OFF] + +This is with reference to what fields of power state are supported in CALM Backend. + +## Support to create post create actions in substrate + +Added `__post_create__` function in Substrate class. This will allow user to create tasks that will be executed post vm creation. To add tasks in post create follow below example: + +```python +class VM1(Substrate): + """AHV VM Substrate""" + + provider_type = "AHV_VM" + provider_spec = AhvVm1 + + @action + def __post_create__(): + Task.Exec.escript( + name="Task1", script="print 'Post create task runs after VM is created'" + ) +``` + +## Support to add VM Power ON/ Power Off/ Check Login actions in Custom actions for AHV, VMWARE, AZURE, AWS, GCP at service/profile level + +Supports `__vm_power_on`, `__vm_power_off__`, `__vm_restart__`, `__vm_check_login__` functions in Substrate class to create actions containing runbook for action_poweron, action_poweroff, action_restart, action_check_login these are default system level actions which are called as runbook reference while inserting vm power actions in custom actions at profile/service level. This support is provided for AHV, VMWARE, AZURE, AWS, GCP cloud providers. + +Use the following example to create vm power actions at service/profile level: + +```python + +class VM1(Substrate): + + provider_type = "AHV_VM" + provider_spec = AhvVm1 + + @action + def __vm_power_on__(): + pass + + @action + def __vm_power_off__(): + pass + + @action + def __vm_restart__(): + pass + + @action + def __vm_check_login__(): + pass + + +class Profile1(Profile): + + deployments = [Deployment1] + + @action + def custom_profile_action_1(): + VM1.__vm_power_off__(name="VM1PowerOffTask") + VM1.__vm_power_on__(name="VM1PowerOnTask") + VM1.__vm_restart__(name="VM1PowerRestart") + VM1.__vm_check_login__(name="VM1CheckLogin") +``` + +- `__vm_power_on__` and other vm power actions should be defined at Substrate class to use it further in profile/service level. +- If these vm power actions are not defined at Substrate class they can’t be used in other classes to create custom actions. +- Default definition of vm power actions is given above it should not be modified. Any overriding of these methods will raise an - error stating override not allowed. +- Default definition itself takes care of addition of necessary tasks needed for proper functioning. +- Use following rule to create custom action at service/profile level + +```python +@action +def (): + .<__power_action_name__>(name="Task Name", target=ref()) + +# For example: +@action +def action1(): + Substrate1.__power_on__(name="Task1", target=ref(Service1)) + +``` + +- action_name is valid name of action that contains task to call vm power actions. This mimics the behaviour of UI side. +- If target is omitted it will take service that is coupled with substrate by default + +- Valid targets are: + - Service class name for a substrate at service/profile level actions + - Deployment class name for a substrate at profile level actions + - Profile level actions can have both deployment and service target. By default service target will be used. + +## Support to create post create actions in substrate + +Post create actions of substrate as defined above can also contain vm power actions in them. For example: + +```python +class VM1(Substrate): + + provider_type = "AHV_VM" + provider_spec = AhvVm1 + + @action + def __post_create__(): + VM1.__vm_power_off__(name="PowerOnTask", target=ref(VM1)) +``` + +Note: Target for this should always be the Substrate class containing it. + diff --git a/docs/Power-Actions-in-Blueprint/images/power_state.png b/docs/Power-Actions-in-Blueprint/images/power_state.png new file mode 100644 index 00000000..aab5ea35 Binary files /dev/null and b/docs/Power-Actions-in-Blueprint/images/power_state.png differ diff --git a/examples/NetworkGroupTunnelReset/network_group_tunnel.py b/examples/NetworkGroupTunnelReset/network_group_tunnel.py new file mode 100644 index 00000000..aa335e83 --- /dev/null +++ b/examples/NetworkGroupTunnelReset/network_group_tunnel.py @@ -0,0 +1,12 @@ +""" +Spec file to be used for resetting(recreating) the tunnel VM in given tunnel +Command: ` calm reset network-group-tunnel-vm -f -n ` +""" + +from calm.dsl.builtins import NetworkGroupTunnelVMSpec + +class NewNetworkGroupTunnel2(NetworkGroupTunnelVMSpec): + """Network group tunnel spec for reset""" + cluster = "auto_cluster_prod_4f4d4cfae296" + subnet = "test3" + type = "AHV" diff --git a/examples/Simple_blueprint/simple_bp_with_downloadable_image.py b/examples/Simple_blueprint/simple_bp_with_downloadable_image.py new file mode 100644 index 00000000..c278eab4 --- /dev/null +++ b/examples/Simple_blueprint/simple_bp_with_downloadable_image.py @@ -0,0 +1,62 @@ +import json + +from calm.dsl.builtins import SimpleDeployment, SimpleBlueprint +from calm.dsl.builtins import Ref, Metadata +from calm.dsl.builtins import read_local_file, basic_cred +from calm.dsl.builtins import vm_disk_package, AhvVmDisk, AhvVmNic +from calm.dsl.builtins import AhvVmResources, AhvVm + +DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) + +CRED_USERNAME = read_local_file(".tests/username") +CRED_PASSWORD = read_local_file(".tests/password") + +# OS Image details for VM +CENTOS_IMAGE_SOURCE = "http://download.nutanix.com/calm/CentOS-7-x86_64-1810.qcow2" +CentosPackage = vm_disk_package( + name="centos_disk", + config={"image": {"source": CENTOS_IMAGE_SOURCE}}, +) + +# project constants +PROJECT = DSL_CONFIG["PROJECTS"]["PROJECT1"] +PROJECT_NAME = PROJECT["NAME"] +NTNX_LOCAL_ACCOUNT = DSL_CONFIG["ACCOUNTS"]["NTNX_LOCAL_AZ"] +SUBNET_UUID = NTNX_LOCAL_ACCOUNT["SUBNETS"][0]["UUID"] + +Centos = basic_cred(CRED_USERNAME, CRED_PASSWORD, name="default cred", default=True) + +class HelloVmResources(AhvVmResources): + + memory = 4 + vCPUs = 2 + cores_per_vCPU = 1 + disks = [ + AhvVmDisk.Disk.Scsi.cloneFromVMDiskPackage(CentosPackage, bootable=True), + ] + nics = [AhvVmNic.DirectNic.ingress(subnet="subnet_name", cluster="cluster_name")] + + +class HelloVm(AhvVm): + + resources = HelloVmResources + categories = {"AppFamily": "Demo", "AppType": "Default"} + + +class VmDeployment(SimpleDeployment): + """Single VM service""" + + provider_spec = HelloVm + + +class SimpleLampBlueprint(SimpleBlueprint): + """Simple blueprint Spec""" + + credentials = [Centos] + deployments = [VmDeployment] + packages = [CentosPackage] # add downloadable image packages here + + +class BpMetadata(Metadata): + + project = Ref.Project(PROJECT_NAME) \ No newline at end of file diff --git a/release-notes/3.7.2.2/README.md b/release-notes/3.7.2.2/README.md new file mode 100644 index 00000000..790fcebb --- /dev/null +++ b/release-notes/3.7.2.2/README.md @@ -0,0 +1,57 @@ + +# Major Feats + +1. Added support for **project decompilation** [[see details](../../README.md#projects)], **environment decompilation** [[see details](../../README.md#environments)] + +2. Added support to **create vm power actions** in blueprint. [see details](../../docs/Power-Actions-in-Blueprint/README.md) + +3. Added support to create actions in `AhvUpdateConfigAttrs` class (Patch config actions). [see details](../../docs/Blueprints/ahv_update_config/README.md) + +4. Added support to **create vmware snapshot configs**. [see details](../../docs/Blueprints/snapshot_restore/README.md) + +5. Added support for **downloadable images in simple bp model**. [see details](../../docs/Blueprints/downloadable_images/README.md) + +6. [#252](https://github.com/nutanix/calm-dsl/issues/252) Added `calm unpublish marketplace` command to support unpublishing from all projects, specific projects, all versions of marketplace items. + +7. [#264](https://github.com/nutanix/calm-dsl/issues/264) Added no expiration as an option for recurring jobs. [see details](../../docs/Job_scheduler/README.md) + +# Bug Fixes +- [#291](https://github.com/nutanix/calm-dsl/issues/291) **Fixed decompilation of regex strings with backslashes** . Multiline type profile variable with a regex validation string including tokens such as \r, \n, \t will now get properly escaped during decompile. +- [#289](https://github.com/nutanix/calm-dsl/issues/289) Added support to **decompile UEFI boot_type in blueprint** +- [#283](https://github.com/nutanix/calm-dsl/issues/283) **Cluster macro decompile** issue fixed. +- [#273](https://github.com/ideadevice/calm-dsl/issues/273) Added support to **decompile blueprint with vm power actions.** +- [#144](https://github.com/nutanix/calm-dsl/issues/144) Updates version cache when config_file is supplied in cli options. + +- [#213](https://github.com/nutanix/calm-dsl/issues/213) Fix static ip address decompilation assosciated with nics in the schema +- [#255](https://github.com/nutanix/calm-dsl/issues/255) Fixed `--with_endpoints` option when publishing a runbook to retain the endpoints as expected +- [#177](https://github.com/nutanix/calm-dsl/issues/177) Fixed endpoint targets decompile in blueprint tasks. +- Decompile issue fixed when package_element is of type CUSTOM and has call_runbook_tasks. +- Fixed decompile failure for while loop tasks. +- Added support to decompile, compile and create dynamic variables that use http task with basic auth. +- Added `-fc/--force` flag to create projects and environments. +- Fixes for [#192](https://github.com/nutanix/calm-dsl/issues/192), [#226](https://github.com/nutanix/calm-dsl/issues/226), [#150](https://github.com/nutanix/calm-dsl/issues/150), [#50](https://github.com/nutanix/calm-dsl/issues/50) +- Added support of `--all_projects` to command `calm update marketplace` +- Fixed calm get apps command to list apps with delete state. +- Fixed describe project command to list correct quotas assigned to a project. +- Added support which allows to publish bp/runbook to the marketplace attached with all projects and in approval pending state through `--all_projects/-ap` flag. + +           + `calm publish bp -v --all_projects` + + +           + `calm publish runbook -v --all_projects` + +- Added `--remove-project/rp` flag to remove projects while approving an MPI. + +           + `calm approve marketplace bp -v --remove-project ` + + +           + `calm approve marketplace runbook -v --remove-project ` + +- Added option to choose multiple Nutanix accounts associated with the project, if present, while creating AHV provider spec. +- Added `nutanix_pc` as account type to list down nutanix accounts. e.g. `calm get accounts --type nutanix_pc` + + diff --git a/test_decompile_env/assets/style.css b/test_decompile_env/assets/style.css new file mode 100644 index 00000000..3edac88e --- /dev/null +++ b/test_decompile_env/assets/style.css @@ -0,0 +1,186 @@ +body { + font-family: Helvetica, Arial, sans-serif; + font-size: 12px; + /* do not increase min-width as some may use split screens */ + min-width: 800px; + color: #999; +} + +h1 { + font-size: 24px; + color: black; +} + +h2 { + font-size: 16px; + color: black; +} + +p { + color: black; +} + +a { + color: #999; +} + +table { + border-collapse: collapse; +} + +/****************************** + * SUMMARY INFORMATION + ******************************/ +#environment td { + padding: 5px; + border: 1px solid #E6E6E6; +} +#environment tr:nth-child(odd) { + background-color: #f6f6f6; +} + +/****************************** + * TEST RESULT COLORS + ******************************/ +span.passed, +.passed .col-result { + color: green; +} + +span.skipped, +span.xfailed, +span.rerun, +.skipped .col-result, +.xfailed .col-result, +.rerun .col-result { + color: orange; +} + +span.error, +span.failed, +span.xpassed, +.error .col-result, +.failed .col-result, +.xpassed .col-result { + color: red; +} + +/****************************** + * RESULTS TABLE + * + * 1. Table Layout + * 2. Extra + * 3. Sorting items + * + ******************************/ +/*------------------ + * 1. Table Layout + *------------------*/ +#results-table { + border: 1px solid #e6e6e6; + color: #999; + font-size: 12px; + width: 100%; +} +#results-table th, +#results-table td { + padding: 5px; + border: 1px solid #E6E6E6; + text-align: left; +} +#results-table th { + font-weight: bold; +} + +/*------------------ + * 2. Extra + *------------------*/ +.log { + background-color: #e6e6e6; + border: 1px solid #e6e6e6; + color: black; + display: block; + font-family: "Courier New", Courier, monospace; + height: 230px; + overflow-y: scroll; + padding: 5px; + white-space: pre-wrap; +} +.log:only-child { + height: inherit; +} + +div.image { + border: 1px solid #e6e6e6; + float: right; + height: 240px; + margin-left: 5px; + overflow: hidden; + width: 320px; +} +div.image img { + width: 320px; +} + +div.video { + border: 1px solid #e6e6e6; + float: right; + height: 240px; + margin-left: 5px; + overflow: hidden; + width: 320px; +} +div.video video { + overflow: hidden; + width: 320px; + height: 240px; +} + +.collapsed { + display: none; +} + +.expander::after { + content: " (show details)"; + color: #BBB; + font-style: italic; + cursor: pointer; +} + +.collapser::after { + content: " (hide details)"; + color: #BBB; + font-style: italic; + cursor: pointer; +} + +/*------------------ + * 3. Sorting items + *------------------*/ +.sortable { + cursor: pointer; +} + +.sort-icon { + font-size: 0px; + float: left; + margin-right: 5px; + margin-top: 5px; + /*triangle*/ + width: 0; + height: 0; + border-left: 8px solid transparent; + border-right: 8px solid transparent; +} +.inactive .sort-icon { + /*finish triangle*/ + border-top: 8px solid #E6E6E6; +} +.asc.active .sort-icon { + /*finish triangle*/ + border-bottom: 8px solid #999; +} +.desc.active .sort-icon { + /*finish triangle*/ + border-top: 8px solid #999; +} diff --git a/test_decompile_env/environment.py b/test_decompile_env/environment.py new file mode 100644 index 00000000..dfc9dad6 --- /dev/null +++ b/test_decompile_env/environment.py @@ -0,0 +1,66 @@ +# THIS FILE IS AUTOMATICALLY GENERATED. +# Disclaimer: Please test this file before using in production. +""" +Generated environment DSL (.py) +""" + +from calm.dsl.builtins import * + +# Secret Variables +BP_CRED_test_creds_PASSWORD = read_local_file("BP_CRED_test_creds_PASSWORD") +BP_CRED_test_creds = basic_cred( + "admin", + BP_CRED_test_creds_PASSWORD, + name="test_creds", + type="PASSWORD", +) + + +class vmcalm_array_indexcalm_timeResources(AhvVmResources): + + memory = 1 + vCPUs = 1 + cores_per_vCPU = 1 + disks = [ + AhvVmDisk.Disk.Scsi.cloneFromImageService("Centos7HadoopMaster", bootable=True) + ] + + +class vmcalm_array_indexcalm_time(AhvVm): + + name = "vm-@@{calm_array_index}@@-@@{calm_time}@@" + resources = vmcalm_array_indexcalm_timeResources + cluster = Ref.Cluster(name="auto_cluster_prod_4faf4699cdea") + + +class Untitled(Substrate): + + account = Ref.Account("NTNX_LOCAL_AZ") + os_type = "Linux" + provider_type = "AHV_VM" + provider_spec = vmcalm_array_indexcalm_time + + readiness_probe = readiness_probe( + connection_type="SSH", + disabled=True, + retries="5", + connection_port=22, + delay_secs="0", + ) + + +class ENV_test_decompile_env(Environment): + substrates = [Untitled] + credentials = [BP_CRED_test_creds] + + providers = [ + Provider.Ntnx( + account=Ref.Account("NTNX_LOCAL_AZ"), + subnets=[ + Ref.Subnet( + name="vlan.154", + cluster="auto_cluster_prod_4faf4699cdea", + ) + ], + ), + ] diff --git a/test_decompile_env/test-result.html b/test_decompile_env/test-result.html new file mode 100644 index 00000000..ac76878e --- /dev/null +++ b/test_decompile_env/test-result.html @@ -0,0 +1,273 @@ + + + + + Test Report + + + +

test-result.html

+

Report generated on 02-Nov-2023 at 15:18:32 by pytest-html v3.1.1

+

Environment

+ + + + + + + + + + + + +
Packages{"pluggy": "0.13.1", "py": "1.11.0", "pytest": "5.3.5"}
PlatformLinux-3.10.0-1160.71.1.el7.x86_64-x86_64-with-centos-7.9.2009-Core
Plugins{"cov": "2.11.1", "csv": "2.0.2", "flake8": "1.0.7", "forked": "1.4.0", "gitignore": "1.3", "html": "3.1.1", "icdiff": "0.5", "metadata": "1.11.0", "randomly": "3.10.3", "reportportal": "1.0.9", "rerunfailures": "4.1", "sugar": "0.9.4", "xdist": "1.29.0"}
Python3.6.8
+

Summary

+

0 tests ran in 0.02 seconds.

+ 0 passed, 0 skipped, 0 failed, 0 errors, 0 expected failures, 0 unexpected passes, 0 rerun +

Results

+ + + + + + + + +
ResultTestDurationLinks
\ No newline at end of file diff --git a/test_decompile_env_aws/environment.py b/test_decompile_env_aws/environment.py new file mode 100644 index 00000000..c546cece --- /dev/null +++ b/test_decompile_env_aws/environment.py @@ -0,0 +1,65 @@ +# THIS FILE IS AUTOMATICALLY GENERATED. +# Disclaimer: Please test this file before using in production. +""" +Generated environment DSL (.py) +""" + +from calm.dsl.builtins import * + +# Secret Variables +BP_CRED_test_creds_PASSWORD = read_local_file("BP_CRED_test_creds_PASSWORD") +BP_CRED_test_creds = basic_cred( + "admin", + BP_CRED_test_creds_PASSWORD, + name="test_creds", + type="PASSWORD", +) + + +class Untitled(Substrate): + + os_type = "Windows" + provider_type = "AWS_VM" + provider_spec = read_provider_spec( + os.path.join("specs", "Untitled_provider_spec.yaml") + ) + + readiness_probe = readiness_probe( + connection_type="POWERSHELL", + disabled=False, + retries="5", + connection_port=5985, + address="@@{public_ip_address}@@", + delay_secs="60", + ) + + +class Untitled_1(Substrate): + + name = "Untitled" + + os_type = "Linux" + provider_type = "AWS_VM" + provider_spec = read_provider_spec( + os.path.join("specs", "Untitled_1_provider_spec.yaml") + ) + + readiness_probe = readiness_probe( + connection_type="SSH", + disabled=True, + retries="5", + connection_port=22, + address="@@{public_ip_address}@@", + delay_secs="60", + ) + + +class ENV_test_decompile_env_aws(Environment): + substrates = [Untitled, Untitled_1] + credentials = [BP_CRED_test_creds] + + providers = [ + Provider.Aws( + account=Ref.Account("primary"), + ), + ] diff --git a/test_decompile_env_aws/specs/Untitled_1_provider_spec.yaml b/test_decompile_env_aws/specs/Untitled_1_provider_spec.yaml new file mode 100644 index 00000000..38bdbf53 --- /dev/null +++ b/test_decompile_env_aws/specs/Untitled_1_provider_spec.yaml @@ -0,0 +1,34 @@ +availability_zone_reference: null +backup_policy: null +cluster_reference: null +name: vm-@@{calm_array_index}@@-@@{calm_time}@@ +resources: + account_uuid: e8c72001-e53b-f749-7254-e84f4abba73f + associate_public_ip_address: true + availability_zone: us-east-1a + block_device_map: + data_disk_list: [] + root_disk: + delete_on_termination: true + device_name: /dev/sda1 + iops: 100 + size_gb: 8 + snapshot_id: '' + type: '' + volume_type: GP2 + type: '' + image_id: ami-021d9d94f93a07a43 + instance_initiated_shutdown_behavior: '' + instance_profile_name: aws-controltower-AdministratorExecutionRole + instance_type: t3a.small + key_name: piu + private_ip_address: '' + region: us-east-1 + security_group_list: [] + state: RUNNING + subnet_id: subnet-9a9ea2b0 + tag_list: [] + type: '' + user_data: '' + vpc_id: vpc-dcd149bb +type: PROVISION_AWS_VM diff --git a/test_decompile_env_aws/specs/Untitled_provider_spec.yaml b/test_decompile_env_aws/specs/Untitled_provider_spec.yaml new file mode 100644 index 00000000..26cfb527 --- /dev/null +++ b/test_decompile_env_aws/specs/Untitled_provider_spec.yaml @@ -0,0 +1,34 @@ +availability_zone_reference: null +backup_policy: null +cluster_reference: null +name: vm-@@{calm_array_index}@@-@@{calm_time}@@ +resources: + account_uuid: e8c72001-e53b-f749-7254-e84f4abba73f + associate_public_ip_address: true + availability_zone: '' + block_device_map: + data_disk_list: [] + root_disk: + delete_on_termination: true + device_name: '' + iops: 100 + size_gb: 8 + snapshot_id: '' + type: '' + volume_type: GP2 + type: '' + image_id: '' + instance_initiated_shutdown_behavior: '' + instance_profile_name: '' + instance_type: '' + key_name: '' + private_ip_address: '' + region: '' + security_group_list: [] + state: RUNNING + subnet_id: '' + tag_list: [] + type: '' + user_data: '' + vpc_id: '' +type: PROVISION_AWS_VM diff --git a/test_decompile_env_gcp/environment.py b/test_decompile_env_gcp/environment.py new file mode 100644 index 00000000..0195226a --- /dev/null +++ b/test_decompile_env_gcp/environment.py @@ -0,0 +1,65 @@ +# THIS FILE IS AUTOMATICALLY GENERATED. +# Disclaimer: Please test this file before using in production. +""" +Generated environment DSL (.py) +""" + +from calm.dsl.builtins import * + +# Secret Variables +BP_CRED_test_cred_PASSWORD = read_local_file("BP_CRED_test_cred_PASSWORD") +BP_CRED_test_cred = basic_cred( + "admin", + BP_CRED_test_cred_PASSWORD, + name="test_cred", + type="PASSWORD", +) + + +class Untitled(Substrate): + + os_type = "Linux" + provider_type = "GCP_VM" + provider_spec = read_provider_spec( + os.path.join("specs", "Untitled_provider_spec.yaml") + ) + + readiness_probe = readiness_probe( + connection_type="SSH", + disabled=True, + retries="5", + connection_port=22, + address="@@{platform.networkInterfaces[0].accessConfigs[0].natIP}@@", + delay_secs="60", + ) + + +class Untitled_1(Substrate): + + name = "Untitled" + + os_type = "Windows" + provider_type = "GCP_VM" + provider_spec = read_provider_spec( + os.path.join("specs", "Untitled_1_provider_spec.yaml") + ) + + readiness_probe = readiness_probe( + connection_type="POWERSHELL", + disabled=True, + retries="5", + connection_port=5985, + address="@@{platform.networkInterfaces[0].accessConfigs[0].natIP}@@", + delay_secs="60", + ) + + +class ENV_test_decompile_env_gcp(Environment): + substrates = [Untitled, Untitled_1] + credentials = [BP_CRED_test_cred] + + providers = [ + Provider.Gcp( + account=Ref.Account("GCP"), + ), + ] diff --git a/test_decompile_env_gcp/specs/Untitled_1_provider_spec.yaml b/test_decompile_env_gcp/specs/Untitled_1_provider_spec.yaml new file mode 100644 index 00000000..658d2848 --- /dev/null +++ b/test_decompile_env_gcp/specs/Untitled_1_provider_spec.yaml @@ -0,0 +1,68 @@ +resources: + account_uuid: 1423621a-99ee-35be-e6d1-8f51d6986f41 + blankDisks: [] + canIpForward: false + description: '' + disks: + - autoDelete: true + boot: true + deviceName: '' + diskEncryptionKey: {} + disk_type: PERSISTENT + initializeParams: + diskName: '' + diskSizeGb: -1 + diskType: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/zones/us-central1-c/diskTypes/hyperdisk-throughput + sourceImage: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/global/images/centos-7 + sourceImageEncryptionKey: {} + type: '' + interface: '' + mode: '' + source: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/zones/us-central1-c/disks/bdisk2-0-231103-044630 + type: '' + guestCustomization: {} + labelFingerprint: '' + labels: [] + machineType: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/zones/us-central1-c/machineTypes/a2-highgpu-1g + metadata: + fingerprint: '' + items: [] + type: '' + minCpuPlatform: '' + name: test-vm-1 + networkInterfaces: + - accessConfigs: + - config_type: ONE_TO_ONE_NAT + name: test + natIP: '' + type: '' + aliasIpRanges: [] + associatePublicIP: true + network: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/global/networks/default + networkIP: '' + subnetwork: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/regions/us-central1/subnetworks/default + type: '' + scheduling: + automaticRestart: true + onHostMaintenance: TERMINATE + preemptible: false + type: '' + serviceAccounts: + - email: 108048128720-compute@developer.gserviceaccount.com + scopes: + - https://www.googleapis.com/auth/devstorage.read_only + - https://www.googleapis.com/auth/logging.write + - https://www.googleapis.com/auth/monitoring.write + - https://www.googleapis.com/auth/servicecontrol + - https://www.googleapis.com/auth/service.management.readonly + - https://www.googleapis.com/auth/trace.append + type: '' + sshKeys: [] + tags: + fingerprint: '' + items: + - ldap + type: '' + type: '' + zone: us-central1-c +type: PROVISION_GCP_VM diff --git a/test_decompile_env_gcp/specs/Untitled_provider_spec.yaml b/test_decompile_env_gcp/specs/Untitled_provider_spec.yaml new file mode 100644 index 00000000..5a10fbe8 --- /dev/null +++ b/test_decompile_env_gcp/specs/Untitled_provider_spec.yaml @@ -0,0 +1,68 @@ +resources: + account_uuid: 1423621a-99ee-35be-e6d1-8f51d6986f41 + blankDisks: [] + canIpForward: false + description: '' + disks: + - autoDelete: true + boot: true + deviceName: '' + diskEncryptionKey: {} + disk_type: PERSISTENT + initializeParams: + diskName: '' + diskSizeGb: -1 + diskType: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/zones/us-central1-c/diskTypes/hyperdisk-extreme + sourceImage: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/global/snapshots/postgres-image + sourceImageEncryptionKey: {} + type: '' + interface: '' + mode: '' + source: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/zones/us-central1-c/disks/bdisk-delete-0-54a65a75 + type: '' + guestCustomization: {} + labelFingerprint: '' + labels: [] + machineType: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/zones/us-central1-c/machineTypes/a2-highgpu-1g + metadata: + fingerprint: '' + items: [] + type: '' + minCpuPlatform: '' + name: test-vm-2 + networkInterfaces: + - accessConfigs: + - config_type: ONE_TO_ONE_NAT + name: test + natIP: '' + type: '' + aliasIpRanges: [] + associatePublicIP: true + network: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/global/networks/default + networkIP: '' + subnetwork: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/regions/us-central1/subnetworks/default + type: '' + scheduling: + automaticRestart: true + onHostMaintenance: TERMINATE + preemptible: false + type: '' + serviceAccounts: + - email: 108048128720-compute@developer.gserviceaccount.com + scopes: + - https://www.googleapis.com/auth/devstorage.read_only + - https://www.googleapis.com/auth/logging.write + - https://www.googleapis.com/auth/monitoring.write + - https://www.googleapis.com/auth/servicecontrol + - https://www.googleapis.com/auth/service.management.readonly + - https://www.googleapis.com/auth/trace.append + type: '' + sshKeys: [] + tags: + fingerprint: '' + items: + - ldap + type: '' + type: '' + zone: us-central1-c +type: PROVISION_GCP_VM diff --git a/test_decompile_env_vmware/environment.py b/test_decompile_env_vmware/environment.py new file mode 100644 index 00000000..9bebef90 --- /dev/null +++ b/test_decompile_env_vmware/environment.py @@ -0,0 +1,45 @@ +# THIS FILE IS AUTOMATICALLY GENERATED. +# Disclaimer: Please test this file before using in production. +""" +Generated environment DSL (.py) +""" + +from calm.dsl.builtins import * + +# Secret Variables +BP_CRED_test_creds_PASSWORD = read_local_file("BP_CRED_test_creds_PASSWORD") +BP_CRED_test_creds = basic_cred( + "admin", + BP_CRED_test_creds_PASSWORD, + name="test_creds", + type="PASSWORD", +) + + +class Untitled(Substrate): + + os_type = "Linux" + provider_type = "VMWARE_VM" + provider_spec = read_vmw_spec(os.path.join("specs", "Untitled_provider_spec.yaml")) + provider_spec_editables = read_spec( + os.path.join("specs", "Untitled_create_spec_editables.yaml") + ) + readiness_probe = readiness_probe( + connection_type="SSH", + disabled=True, + retries="5", + connection_port=22, + address="@@{platform.ipAddressList[0]}@@", + delay_secs="60", + ) + + +class ENV_test_decompile_env_vmware(Environment): + substrates = [Untitled] + credentials = [BP_CRED_test_creds] + + providers = [ + Provider.Vmware( + account=Ref.Account("vmware_second"), + ), + ] diff --git a/test_decompile_env_vmware/specs/Untitled_create_spec_editables.yaml b/test_decompile_env_vmware/specs/Untitled_create_spec_editables.yaml new file mode 100644 index 00000000..5b7a6dfb --- /dev/null +++ b/test_decompile_env_vmware/specs/Untitled_create_spec_editables.yaml @@ -0,0 +1 @@ +resources: {} diff --git a/test_decompile_env_vmware/specs/Untitled_provider_spec.yaml b/test_decompile_env_vmware/specs/Untitled_provider_spec.yaml new file mode 100644 index 00000000..84de38a3 --- /dev/null +++ b/test_decompile_env_vmware/specs/Untitled_provider_spec.yaml @@ -0,0 +1,77 @@ +clone_is_template: false +cluster: '' +compute_drs_mode: false +datastore: ds:///vmfs/volumes/210932e4-ce99bf5c/ +drs_mode: false +folder: null +host: 00000000-0000-0000-0000-0cc47ac3fcb0 +library: null +name: vm-@@{calm_array_index}@@-@@{calm_time}@@ +resources: + account_uuid: 0b323c83-a649-7265-0176-14e3ae109f15 + controller_list: [] + cpu_hot_add: false + disk_list: [] + guest_customization: + cloud_init: '' + customization_name: '' + customization_type: GUEST_OS_LINUX + type: '' + memory_hot_plug: false + memory_size_mib: 2048 + nic_list: [] + num_sockets: 2 + num_vcpus_per_socket: 1 + power_state: poweron + tag_list: [] + template_controller_list: + - bus_sharing: noSharing + controller_type: VirtualLsiLogicSASController + is_deleted: false + key: 1000 + type: '' + template_disk_list: + - adapter_type: IDE + controller_key: 200 + device_slot: 0 + disk_mode: persistent + disk_size_mb: -1 + disk_type: cdrom + is_deleted: false + iso_path: '' + key: 3000 + location: '' + type: '' + - adapter_type: IDE + controller_key: 200 + device_slot: 1 + disk_mode: persistent + disk_size_mb: -1 + disk_type: cdrom + is_deleted: false + iso_path: '' + key: 3001 + location: '' + type: '' + - adapter_type: SCSI + controller_key: 1000 + device_slot: 0 + disk_mode: persistent + disk_size_mb: 40960 + disk_type: disk + is_deleted: false + iso_path: '' + key: 2000 + location: ds:///vmfs/volumes/99c8246a-966b6b35/ + type: '' + template_nic_list: + - is_deleted: false + key: 4000 + net_name: key-vim.host.PortGroup-Backplane Network + nic_type: e1000 + type: '' + type: '' +storage_drs_mode: false +storage_pod: '' +template: 503dbadd-2c28-deaf-201d-f1882d69a2fb +type: PROVISION_VMWARE_VM diff --git a/test_dsl_decompile/project.py b/test_dsl_decompile/project.py new file mode 100644 index 00000000..23aa817a --- /dev/null +++ b/test_dsl_decompile/project.py @@ -0,0 +1,38 @@ +# THIS FILE IS AUTOMATICALLY GENERATED. +# Disclaimer: Please test this file before using in production. +""" +Generated project DSL (.py) +Decompiles project's providers, users and quotas if available. +""" +from calm.dsl.builtins import Project +from calm.dsl.builtins import Provider, Ref + + +class test_dsl_decompile(Project): + + providers = [ + Provider.Vmware( + account=Ref.Account("vmware_second"), + ), + Provider.Gcp( + account=Ref.Account("GCP"), + ), + Provider.Ntnx( + account=Ref.Account("NTNX_LOCAL_AZ"), + subnets=[ + Ref.Subnet( + name="vlan.154", + cluster="auto_cluster_prod_4faf4699cdea", + ) + ], + ), + Provider.Aws( + account=Ref.Account("primary"), + ), + ] + + users = [ + Ref.User(name="local_ad_user_admin@adfs19.com"), + ] + + quotas = {"vcpus": 5, "storage": 5, "memory": 5} diff --git a/tests/ahv_vm_overlay_subnet/test_overlay_subnet_blueprint.py b/tests/ahv_vm_overlay_subnet/test_overlay_subnet_blueprint.py index d10c70be..c76c39a2 100644 --- a/tests/ahv_vm_overlay_subnet/test_overlay_subnet_blueprint.py +++ b/tests/ahv_vm_overlay_subnet/test_overlay_subnet_blueprint.py @@ -68,7 +68,7 @@ class HelloVmResources(AhvVmResources): AhvVmDisk.Disk.Scsi.cloneFromVMDiskPackage(CentosPackage, bootable=True), ] # TODO replace vpc, nic name from config - nics = [AhvVmNic.DirectNic.ingress(NETWORK1, vpc=VPC1)] + nics = [AhvVmNic.NormalNic.ingress(NETWORK1, vpc=VPC1)] # nics = [AhvVmNic.DirectNic.ingress(subnet="vlan.800", cluster="auto_cluster_prod_1a619308826b")] guest_customization = AhvVmGC.CloudInit( diff --git a/tests/api_interface/test_runbooks/test_files/decision_task.py b/tests/api_interface/test_runbooks/test_files/decision_task.py index 7e4d32f8..6140f6c2 100644 --- a/tests/api_interface/test_runbooks/test_files/decision_task.py +++ b/tests/api_interface/test_runbooks/test_files/decision_task.py @@ -72,18 +72,18 @@ def DecisionTask(endpoints=[linux_endpoint, windows_endpoint], default=False): name="FAILURE4", script="echo 'FAILURE'", target=endpoints[1] ) - with Task.Decision.escript(script="exit(0)") as d: + with Task.Decision.escript.py3(script="exit(0)") as d: if d.ok: - Task.Exec.escript(name="SUCCESS5", script="print 'SUCCESS'") + Task.Exec.escript.py3(name="SUCCESS5", script="print('SUCCESS')") else: - Task.Exec.escript(name="FAILURE5", script="print 'FAILURE'") + Task.Exec.escript.py3(name="FAILURE5", script="print('FAILURE')") - with Task.Decision.escript(script="exit(1)") as d: + with Task.Decision.escript.py3(script="exit(1)") as d: if d.ok: - Task.Exec.escript(name="SUCCESS6", script="print 'SUCCESS'") + Task.Exec.escript.py3(name="SUCCESS6", script="print('SUCCESS')") else: - Task.Exec.escript(name="FAILURE6", script="print 'FAILURE'") + Task.Exec.escript.py3(name="FAILURE6", script="print('FAILURE')") diff --git a/tests/api_interface/test_runbooks/test_files/decision_task_with_multiple_target.py b/tests/api_interface/test_runbooks/test_files/decision_task_with_multiple_target.py index ebcaacf2..f0c12bd9 100644 --- a/tests/api_interface/test_runbooks/test_files/decision_task_with_multiple_target.py +++ b/tests/api_interface/test_runbooks/test_files/decision_task_with_multiple_target.py @@ -46,9 +46,9 @@ escript = """url = "@@{endpoint.base_url}@@" if not "runbook" in url: - print "Failing" + print("Failing") exit(1) -print "Passing" +print("Passing") exit(0)""" linux_ip_pass_script = shell_script.replace("target_ip", linux_ip) @@ -83,7 +83,7 @@ def DecisionWithMultipleIpTarget( name="FAILURE1_D2", script="date", inherit_target=True ) - with Task.Decision.escript(script=escript, target=endpoints[1]) as d3: + with Task.Decision.escript.py3(script=escript, target=endpoints[1]) as d3: if d3.ok: Task.HTTP.post( name="HTTPTask1", diff --git a/tests/api_interface/test_runbooks/test_files/exec_task.py b/tests/api_interface/test_runbooks/test_files/exec_task.py index 13b716d7..3515a342 100644 --- a/tests/api_interface/test_runbooks/test_files/exec_task.py +++ b/tests/api_interface/test_runbooks/test_files/exec_task.py @@ -114,28 +114,30 @@ @runbook def EscriptTask(): - Task.Exec.escript(name="ExecTask", script='''print "Task is Successful"''') + Task.Exec.escript.py3(name="ExecTask", script="""print("Task is Successful")""") @runbook def EscriptMacroTask(): - Task.Exec.escript(name="EscriptMacroTask", filename="macro_escript.py") + Task.Exec.escript.py3(name="EscriptMacroTask", filename="macro_escript.py") @runbook def SetVariableOnEscript(): - Task.SetVariable.escript( + Task.SetVariable.escript.py3( name="SetVariableTask", - script='''print "task_state=Successful"''', + script="""print("task_state=Successful")""", variables=["task_state"], ) - Task.Exec.escript(name="ExecTask", script='''print "Task is @@{task_state}@@"''') + Task.Exec.escript.py3( + name="ExecTask", script="""print("Task is @@{task_state}@@")""" + ) @runbook def EscriptOnEndpoint(endpoints=[multiple_linux_endpoint]): - Task.Exec.escript( - name="ExecTask", script='''print "Task is Successful"''', target=endpoints[0] + Task.Exec.escript.py3( + name="ExecTask", script="""print("Task is Successful")""", target=endpoints[0] ) @@ -337,9 +339,9 @@ def MacroOnPowershell(endpoints=[windows_endpoint]): @runbook def MacroOnEscript(): - Task.Exec.escript( + Task.Exec.escript.py3( name="ExecTask", - script='''print "@@{calm_runbook_name}@@, @@{calm_runbook_uuid}@@ @@{calm_project_name}@@ @@{calm_jwt}@@ @@{calm_date}@@"''', + script="""print("@@{calm_runbook_name}@@, @@{calm_runbook_uuid}@@ @@{calm_project_name}@@ @@{calm_jwt}@@ @@{calm_date}@@")""", ) @@ -362,28 +364,28 @@ def EndpointMacroOnPowershell(endpoints=[windows_endpoint]): @runbook def WindowsEndpointMacroOnEscript(endpoints=[windows_endpoint], default=False): - Task.Exec.escript( + Task.Exec.escript.py3( name="ExecTask", target=endpoints[0], - script='''print "@@{endpoint.name}@@, @@{endpoint.type}@@, @@{endpoint.address}@@, @@{endpoint.port}@@,\ - @@{endpoint.connection_protocol}@@, @@{endpoint.credential.username}@@"''', + script="""print("@@{endpoint.name}@@, @@{endpoint.type}@@, @@{endpoint.address}@@, @@{endpoint.port}@@,\ + @@{endpoint.connection_protocol}@@, @@{endpoint.credential.username}@@")""", ) @runbook def LinuxEndpointMacroOnEscript(endpoints=[linux_endpoint], default=False): - Task.Exec.escript( + Task.Exec.escript.py3( name="ExecTask", target=endpoints[0], - script='''print "@@{endpoint.name}@@, @@{endpoint.type}@@, @@{endpoint.address}@@, @@{endpoint.port}@@, @@{endpoint.credential.username}@@"''', + script="""print("@@{endpoint.name}@@, @@{endpoint.type}@@, @@{endpoint.address}@@, @@{endpoint.port}@@, @@{endpoint.credential.username}@@")""", ) @runbook def HttpEndpointMacroOnEscript(endpoints=[http_endpoint], default=False): - Task.Exec.escript( + Task.Exec.escript.py3( name="ExecTask", target=endpoints[0], - script='''print "@@{endpoint.name}@@, @@{endpoint.type}@@, @@{endpoint.base_url}@@, @@{endpoint.retry_count}@@, \ - @@{endpoint.retry_interval}@@, @@{endpoint.tls_verify}@@, @@{endpoint.connection_timeout}@@"''', + script="""print("@@{endpoint.name}@@, @@{endpoint.type}@@, @@{endpoint.base_url}@@, @@{endpoint.retry_count}@@, \ + @@{endpoint.retry_interval}@@, @@{endpoint.tls_verify}@@, @@{endpoint.connection_timeout}@@")""", ) diff --git a/tests/api_interface/test_runbooks/test_files/http_task.py b/tests/api_interface/test_runbooks/test_files/http_task.py index d84c3ba4..de173a4b 100644 --- a/tests/api_interface/test_runbooks/test_files/http_task.py +++ b/tests/api_interface/test_runbooks/test_files/http_task.py @@ -87,7 +87,7 @@ def HTTPTask(endpoints=[endpoint]): target=endpoints[0], ) - Task.Exec.escript(name="ExecTask", script='''print "@@{ep_type}@@"''') + Task.Exec.escript.py3(name="ExecTask", script="""print("@@{ep_type}@@")""") return HTTPTask diff --git a/tests/api_interface/test_runbooks/test_files/marketplace_runbook.py b/tests/api_interface/test_runbooks/test_files/marketplace_runbook.py index 768e40f2..5d86172e 100644 --- a/tests/api_interface/test_runbooks/test_files/marketplace_runbook.py +++ b/tests/api_interface/test_runbooks/test_files/marketplace_runbook.py @@ -38,17 +38,17 @@ escript_code = """ -print "@@{var1}@@" +print("@@{var1}@@") if "@@{var1}@@" == "test": - print "yes" + print("yes") else: - print "no" -print "@@{var2}@@" + print("no") +print("@@{var2}@@") if "@@{var2}@@" == "test": - print "yes" + print("yes") else: - print "no" -print "Hello @@{firstname}@@ @@{lastname}@@" + print("no") +print("Hello @@{firstname}@@ @@{lastname}@@") """ ssh_code = """ @@ -73,7 +73,7 @@ def DslRunbookForMPI(endpoints=[windows_endpoint, linux_endpoint, http_endpoint] target=endpoints[2], ) - Task.Exec.escript(name="ES_Task", script=escript_code) + Task.Exec.escript.py3(name="ES_Task", script=escript_code) Task.Exec.ssh(name="SSH_Task", script=ssh_code, target=endpoints[1]) @@ -85,9 +85,9 @@ def DslRunbookDynamicVariable(): """Runbook example with dynamic variables""" escript_var = Variable.WithOptions.FromTask.int( # NOQA - Task.Exec.escript(script="print '123'") + Task.Exec.escript.py3(script="print('123')") ) - Task.Exec.escript(name="ES_Task", script='print "Hello @@{escript_var}@@"') + Task.Exec.escript.py3(name="ES_Task", script='print("Hello @@{escript_var}@@")') @runbook @@ -95,15 +95,15 @@ def DslWhileDecisionRunbookForMPI(): "Runbook Service example" var = Variable.Simple("3") # noqa with Task.Loop("@@{var}@@", name="WhileTask", loop_variable="iteration"): - Task.Exec.escript(name="Exec", script='''print "test"''') + Task.Exec.escript.py3(name="Exec", script="""print("test")""") - with Task.Decision.escript(script="exit(0)") as d: + with Task.Decision.escript.py3(script="exit(0)") as d: if d.ok: - Task.Exec.escript(name="SUCCESS", script="print 'SUCCESS'") + Task.Exec.escript.py3(name="SUCCESS", script="print('SUCCESS')") else: - Task.Exec.escript(name="FAILURE", script="print 'FAILURE'") + Task.Exec.escript.py3(name="FAILURE", script="print('FAILURE')") def create_project_endpoints(client, project_name=RBAC_PROJECT): diff --git a/tests/api_interface/test_runbooks/test_files/updated_runbook.py b/tests/api_interface/test_runbooks/test_files/updated_runbook.py index dfbf4912..dd82ec32 100644 --- a/tests/api_interface/test_runbooks/test_files/updated_runbook.py +++ b/tests/api_interface/test_runbooks/test_files/updated_runbook.py @@ -8,9 +8,9 @@ from calm.dsl.runbooks import read_local_file, basic_cred -code = '''print "Start" +code = """print("Start") sleep(20) -print "End"''' +print("End")""" CRED_USERNAME = read_local_file(".tests/runbook_tests/username") CRED_PASSWORD = read_local_file(".tests/runbook_tests/password") @@ -21,7 +21,7 @@ @runbook def DslUpdatedRunbook(credentials=[LinuxCred]): "Runbook Service example" - Task.Exec.escript(name="Task2", script=code) - Task.Exec.escript(name="Task3", script=code) - Task.Exec.escript(name="Task4", script=code) - Task.Exec.escript(name="Task5", script=code) + Task.Exec.escript.py3(name="Task2", script=code) + Task.Exec.escript.py3(name="Task3", script=code) + Task.Exec.escript.py3(name="Task4", script=code) + Task.Exec.escript.py3(name="Task5", script=code) diff --git a/tests/api_interface/test_runbooks/test_files/vm_endpoints_warning.py b/tests/api_interface/test_runbooks/test_files/vm_endpoints_warning.py index 667fd765..862708f9 100644 --- a/tests/api_interface/test_runbooks/test_files/vm_endpoints_warning.py +++ b/tests/api_interface/test_runbooks/test_files/vm_endpoints_warning.py @@ -46,9 +46,9 @@ @runbook def VMEndpointWithIncorrectID(endpoints=[EndpointWithIncorrectId]): - Task.Exec.escript( + Task.Exec.escript.py3( name="EscriptTask", - script='''print "Escript Task is Successful"''', + script="""print("Escript Task is Successful")""", target=endpoints[0], ) Task.Exec.ssh( @@ -60,9 +60,9 @@ def VMEndpointWithIncorrectID(endpoints=[EndpointWithIncorrectId]): @runbook def VMEndpointWithNoIP(endpoints=[EndpointWithNoIP]): - Task.Exec.escript( + Task.Exec.escript.py3( name="EscriptTask", - script='''print "Escript Task is Successful"''', + script="""print("Escript Task is Successful")""", target=endpoints[0], ) Task.Exec.ssh( @@ -74,9 +74,9 @@ def VMEndpointWithNoIP(endpoints=[EndpointWithNoIP]): @runbook def VMEndpointWithIPOutsideSubnet(endpoints=[EndpointWithIPOutsideSubnet]): - Task.Exec.escript( + Task.Exec.escript.py3( name="EscriptTask", - script='''print "Escript Task is Successful"''', + script="""print("Escript Task is Successful")""", target=endpoints[0], ) Task.Exec.ssh( @@ -88,9 +88,9 @@ def VMEndpointWithIPOutsideSubnet(endpoints=[EndpointWithIPOutsideSubnet]): @runbook def VMEndpointWithOffState(endpoints=[EndpointWithOffState]): - Task.Exec.escript( + Task.Exec.escript.py3( name="EscriptTask", - script='''print "Escript Task is Successful"''', + script="""print("Escript Task is Successful")""", target=endpoints[0], ) Task.Exec.ssh( diff --git a/tests/api_interface/test_runbooks/test_files/while_task.py b/tests/api_interface/test_runbooks/test_files/while_task.py index 679d395e..79ea9f29 100644 --- a/tests/api_interface/test_runbooks/test_files/while_task.py +++ b/tests/api_interface/test_runbooks/test_files/while_task.py @@ -79,13 +79,13 @@ def WhileTask(endpoints=[linux_endpoint, windows_endpoint, http_endpoint]): with Task.Loop( 10, name="Task7", loop_variable="iteration1", exit_condition=Status.SUCCESS ): - Task.Exec.escript(script="print 'test'") + Task.Exec.escript.py3(script="print('test')") with Task.Loop( 10, name="Task8", loop_variable="iteration2", exit_condition=Status.SUCCESS ): - Task.Exec.escript(script="print 'test'") - Task.Exec.escript(script="print 'test'", name="Task9") - Task.Exec.escript(script="print 'test'", name="Task10") + Task.Exec.escript.py3(script="print('test')") + Task.Exec.escript.py3(script="print('test')", name="Task9") + Task.Exec.escript.py3(script="print('test')", name="Task10") Task.Exec.ssh(script="echo 'test'", name="Task11", target=endpoints[0]) Task.Exec.ssh(script="echo 'test'", name="Task12", target=endpoints[0]) Task.Exec.powershell(script="echo 'test'", name="Task13", target=endpoints[1]) @@ -96,9 +96,9 @@ def WhileTask(endpoints=[linux_endpoint, windows_endpoint, http_endpoint]): def WhileTaskLoopVariable(endpoints=[http_endpoint]): "Runbook Service example" with Task.Loop(10, name="Task1", loop_variable="iteration"): - Task.SetVariable.escript( + Task.SetVariable.escript.py3( name="SetVariableTask", - script='''print "iteration=random"''', + script="""print("iteration=random")""", variables=["iteration"], ) with Task.Loop(10, name="Task2", loop_variable="iteration"): @@ -118,4 +118,4 @@ def WhileTaskMacro(): "Runbook Service example" var = Variable.Simple("3") # noqa with Task.Loop("@@{var}@@", name="WhileTask", loop_variable="iteration"): - Task.Exec.escript(name="Exec", script='''print "test"''') + Task.Exec.escript.py3(name="Exec", script="""print("test")""") diff --git a/tests/api_interface/test_runbooks/test_http_task.py b/tests/api_interface/test_runbooks/test_http_task.py index ff4270d9..81b464b6 100644 --- a/tests/api_interface/test_runbooks/test_http_task.py +++ b/tests/api_interface/test_runbooks/test_http_task.py @@ -293,6 +293,9 @@ def test_http_incorrect_response_code(self): (HTTPTaskWithIncorrectAuth, "AUTHENTICATION_REQUIRED"), ], ) + @pytest.mark.skip( + reason="ENG-626272, v3 pc api returns diff payload between release versions" + ) def test_http_failure_scenarios(self, Helper): """test_http_task_failure_status_code_check, test_unsupported_payload_json, diff --git a/tests/cli/test_account_commands.py b/tests/cli/test_account_commands.py index 2b345508..e6ef4a65 100644 --- a/tests/cli/test_account_commands.py +++ b/tests/cli/test_account_commands.py @@ -59,7 +59,7 @@ "tests/example_accounts/test_k8s_vanilla_account_service_account_auth.py" ) DSL_VMWARE_ACCOUNT_FILEPATH = "tests/example_accounts/test_vmware_account.py" -DSL_NDB_ACCOUNT_FILEPATH = "tests/example_accounts/test_ndb_account.py" +DSL_NDB_ACCOUNT_FILEPATH = "tests/example_accounts/ndb_account.py" DSL_CREDENTIAL_PROVIDER_ACCOUNT_FILEPATH = ( "tests/example_accounts/test_credential_provider_account.py" ) @@ -87,7 +87,7 @@ "tests/example_accounts/updated_accounts/test_update_vmware_account.py" ) DSL_NDB_UPDATE_ACCOUNT_FILEPATH = ( - "tests/example_accounts/updated_accounts/test_update_ndb_account.py" + "tests/example_accounts/updated_accounts/update_ndb_account.py" ) DSL_CREDENTIAL_PROVIDER_UPDATE_ACCOUNT_FILEPATH = ( "tests/example_accounts/updated_accounts/test_update_credential_provider_account.py" diff --git a/tests/cli/test_marketplace_bp_commands.py b/tests/cli/test_marketplace_bp_commands.py index 137db639..0207372d 100644 --- a/tests/cli/test_marketplace_bp_commands.py +++ b/tests/cli/test_marketplace_bp_commands.py @@ -1296,36 +1296,7 @@ def test_mpi_launch(self, BP_PATH): ) LOG.info("Success") - # Delete the marketplace blueprint - LOG.info( - "Deleting marketplace blueprint {} with version {}".format( - self.marketplace_bp_name, self.mpi1_version - ) - ) - command = [ - "delete", - "marketplace", - "bp", - self.marketplace_bp_name, - "--version", - self.mpi1_version, - ] - - result = runner.invoke(cli, command) - if result.exit_code: - cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} - LOG.debug( - "Cli Response: {}".format( - json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) - ) - ) - LOG.debug( - "Traceback: \n{}".format( - "".join(traceback.format_tb(result.exc_info[2])) - ) - ) - pytest.fail("Deletion of marketplace blueprint in ACCEPTED state failed") - LOG.info("Success") + self._delete_mpi() def test_publish_to_marketplace_flag(self): """Test for publish_to_marketplace_flag for publsh command""" @@ -1408,34 +1379,7 @@ def test_publish_to_marketplace_flag(self): ) pytest.fail("Unpublishing of marketplace blueprint failed") - LOG.info( - "Deleting marketplace blueprint {} with version {} in ACCEPTED state".format( - self.marketplace_bp_name, self.mpi1_version - ) - ) - command = [ - "delete", - "marketplace", - "bp", - self.marketplace_bp_name, - "--version", - self.mpi1_version, - ] - - result = runner.invoke(cli, command) - if result.exit_code: - cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} - LOG.debug( - "Cli Response: {}".format( - json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) - ) - ) - LOG.debug( - "Traceback: \n{}".format( - "".join(traceback.format_tb(result.exc_info[2])) - ) - ) - pytest.fail("Deletion of marketplace blueprint in ACCEPTED state failed") + self._delete_mpi() def test_auto_approve_flag(self): """Test for auto_approve flag in publish command""" @@ -1447,7 +1391,6 @@ def test_auto_approve_flag(self): ) self.mpi1_version = "1.0.0" - # Publish Bp directly to marketplace using --publish_to_marketplace flag LOG.info( "Publishing Bp {} as new marketplace blueprint {}".format( self.created_dsl_bp_name, self.marketplace_bp_name @@ -1489,34 +1432,7 @@ def test_auto_approve_flag(self): bp_state = mpi_data["status"]["resources"]["app_state"] assert bp_state == MARKETPLACE_ITEM.STATES.ACCEPTED - LOG.info( - "Deleting marketplace blueprint {} with version {} in ACCEPTED state".format( - self.marketplace_bp_name, self.mpi1_version - ) - ) - command = [ - "delete", - "marketplace", - "bp", - self.marketplace_bp_name, - "--version", - self.mpi1_version, - ] - - result = runner.invoke(cli, command) - if result.exit_code: - cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} - LOG.debug( - "Cli Response: {}".format( - json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) - ) - ) - LOG.debug( - "Traceback: \n{}".format( - "".join(traceback.format_tb(result.exc_info[2])) - ) - ) - pytest.fail("Deletion of marketplace blueprint in ACCEPTED state failed") + self._delete_mpi() def test_publish_bp_with_icon(self): self._create_bp() @@ -1525,7 +1441,7 @@ def test_publish_bp_with_icon(self): self.marketplace_bp_name = "Test_Marketplace_Bp_{}".format( str(uuid.uuid4())[-10:] ) - self.mpi_version = "1.0.0" + self.mpi1_version = "1.0.0" self.icon_name = "test_icon{}".format(str(uuid.uuid4())[:10]) LOG.info("Publishing the blueprint to marketplace manager") @@ -1534,7 +1450,7 @@ def test_publish_bp_with_icon(self): "bp", self.created_dsl_bp_name, "--version", - self.mpi_version, + self.mpi1_version, "--name", self.marketplace_bp_name, "-f", @@ -1565,43 +1481,17 @@ def test_publish_bp_with_icon(self): app_icon_uuid = app_icon_name_uuid_map.get(self.icon_name) bp_data = get_mpi_by_name_n_version( - name=self.marketplace_bp_name, version=self.mpi_version + name=self.marketplace_bp_name, version=self.mpi1_version ) icon_reference = bp_data["status"]["resources"]["icon_reference_list"][0][ "icon_reference" ] assert icon_reference["uuid"] == app_icon_uuid, "App icon not used for the bp" - LOG.info( - "Deleting the marketplace blueprint {}".format(self.marketplace_bp_name) - ) - result = runner.invoke( - cli, - [ - "delete", - "marketplace", - "bp", - self.marketplace_bp_name, - "--version", - self.mpi_version, - ], - ) - if result.exit_code: - cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} - LOG.debug( - "Cli Response: {}".format( - json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) - ) - ) - LOG.debug( - "Traceback: \n{}".format( - "".join(traceback.format_tb(result.exc_info[2])) - ) - ) - pytest.fail("Deleting of marketplace blueprint failed") + self._delete_mpi() def test_all_projects_flag_on_publising_bp_with_auto_approve_flag(self): - """Tests `--all_projects` flag to publish bp to marketplace manager""" + """Tests `--all_projects` flag to publish bp to marketplace manager with auto approval flag""" client = get_api_client() self._create_bp() @@ -1661,34 +1551,7 @@ def test_all_projects_flag_on_publising_bp_with_auto_approve_flag(self): for _proj in project_name_uuid_map.keys(): assert _proj in bp_projects - LOG.info( - "Deleting marketplace blueprint {} with version {} in ACCEPTED state".format( - self.marketplace_bp_name, self.mpi1_version - ) - ) - command = [ - "delete", - "marketplace", - "bp", - self.marketplace_bp_name, - "--version", - self.mpi1_version, - ] - - result = runner.invoke(cli, command) - if result.exit_code: - cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} - LOG.debug( - "Cli Response: {}".format( - json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) - ) - ) - LOG.debug( - "Traceback: \n{}".format( - "".join(traceback.format_tb(result.exc_info[2])) - ) - ) - pytest.fail("Deletion of marketplace blueprint in ACCEPTED state failed") + self._delete_mpi() def test_all_projects_flag_on_approving_marketplace_bp(self): """Tests `--all_projects` flag to approving bp to marketplace manager""" @@ -1785,34 +1648,7 @@ def test_all_projects_flag_on_approving_marketplace_bp(self): for _proj in project_name_uuid_map.keys(): assert _proj in bp_projects - LOG.info( - "Deleting marketplace blueprint {} with version {} in ACCEPTED state".format( - self.marketplace_bp_name, self.mpi1_version - ) - ) - command = [ - "delete", - "marketplace", - "bp", - self.marketplace_bp_name, - "--version", - self.mpi1_version, - ] - - result = runner.invoke(cli, command) - if result.exit_code: - cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} - LOG.debug( - "Cli Response: {}".format( - json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) - ) - ) - LOG.debug( - "Traceback: \n{}".format( - "".join(traceback.format_tb(result.exc_info[2])) - ) - ) - pytest.fail("Deletion of marketplace blueprint in ACCEPTED state failed") + self._delete_mpi() def test_all_projects_flag_on_publishing_marketplace_bp(self): """Tests `--all_projects` flag for publishing bp to marketplace store""" @@ -1937,7 +1773,31 @@ def test_all_projects_flag_on_publishing_marketplace_bp(self): pytest.fail("Unpublishing of marketplace blueprint to marketplace failed") LOG.info("Success") - # Delete the marketplace blueprint + self._delete_mpi() + + def _test_app_delete(self, app_name): + + runner = CliRunner() + self.app_helper._wait_for_non_busy_state(app_name) + LOG.info("Deleting App {} ".format(app_name)) + result = runner.invoke(cli, ["delete", "app", app_name]) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Deletion of application '{}' failed".format(app_name)) + LOG.info("App is deleted successfully") + + def _delete_mpi(self): + runner = CliRunner() LOG.info( "Deleting marketplace blueprint {} with version {}".format( self.marketplace_bp_name, self.mpi1_version @@ -1966,14 +1826,39 @@ def test_all_projects_flag_on_publishing_marketplace_bp(self): ) ) pytest.fail("Deletion of marketplace blueprint failed") + LOG.info("Success") - def _test_app_delete(self, app_name): + def test_all_projects_flag_on_publising_bp_without_auto_approve_flag(self): + """Tests `--all_projects` flag to publish bp to marketplace manager without auto approval flag""" + + client = get_api_client() + self._create_bp() + self.created_bp_list.append(self.created_dsl_bp_name) + self.marketplace_bp_name = "Test_Marketplace_Bp_{}".format( + str(uuid.uuid4())[-10:] + ) + self.mpi1_version = "1.0.0" + # Publish Bp marketplace using --all_projects flag + LOG.info( + "Publishing Bp {} as new marketplace blueprint {} with 'all_projects' flag".format( + self.created_dsl_bp_name, self.marketplace_bp_name + ) + ) + command = [ + "publish", + "bp", + self.created_dsl_bp_name, + "--version", + self.mpi1_version, + "--name", + self.marketplace_bp_name, + "--all_projects", + ] runner = CliRunner() - self.app_helper._wait_for_non_busy_state(app_name) - LOG.info("Deleting App {} ".format(app_name)) - result = runner.invoke(cli, ["delete", "app", app_name]) + + result = runner.invoke(cli, command) if result.exit_code: cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} LOG.debug( @@ -1986,5 +1871,118 @@ def _test_app_delete(self, app_name): "".join(traceback.format_tb(result.exc_info[2])) ) ) - pytest.fail("Deletion of application '{}' failed".format(app_name)) - LOG.info("App is deleted successfully") + pytest.fail("Publishing Bp using all_projects flag failed") + LOG.info("Success") + + mpi_data = get_mpi_by_name_n_version( + name=self.marketplace_bp_name, + version=self.mpi1_version, + app_source=MARKETPLACE_ITEM.SOURCES.LOCAL, + ) + + project_name_uuid_map = client.project.get_name_uuid_map({"length": 250}) + + bp_projects = [] + for _proj in mpi_data["spec"]["resources"]["project_reference_list"]: + bp_projects.append(_proj["name"]) + + for _proj in project_name_uuid_map.keys(): + assert _proj in bp_projects + + self._delete_mpi() + + def test_project_removal_flag_on_approving_marketplace_bp(self): + """Tests `--remove-project` flag on approving bp to marketplace manager""" + + project_name = "default" # This project will be removed while approving + self._create_bp() + self.created_bp_list.append(self.created_dsl_bp_name) + self.marketplace_bp_name = "Test_Marketplace_Bp_{}".format( + str(uuid.uuid4())[-10:] + ) + self.mpi1_version = "1.0.0" + + # Publish Bp to marketplace manager as new marketplace blueprint with all projects + LOG.info( + "Publishing Bp {} as new marketplace blueprint {}".format( + self.created_dsl_bp_name, self.marketplace_bp_name + ) + ) + command = [ + "publish", + "bp", + self.created_dsl_bp_name, + "--version", + self.mpi1_version, + "--name", + self.marketplace_bp_name, + "--all_projects", + "--with_secrets", + ] + runner = CliRunner() + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail( + "Publishing of marketplace blueprint as new marketplace item failed" + ) + + # Approve the blueprint + LOG.info( + "Approving marketplace blueprint {} with version {} and removing {} project".format( + self.marketplace_bp_name, self.mpi1_version, project_name + ) + ) + command = [ + "approve", + "marketplace", + "bp", + self.marketplace_bp_name, + "--version", + self.mpi1_version, + "--remove-project", + project_name, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail( + "Approving of marketplace blueprint using remove-project flag failed" + ) + LOG.info("Success") + + mpi_data = get_mpi_by_name_n_version( + name=self.marketplace_bp_name, + version=self.mpi1_version, + app_source=MARKETPLACE_ITEM.SOURCES.LOCAL, + ) + + bp_projects = [] + for _proj in mpi_data["spec"]["resources"]["project_reference_list"]: + bp_projects.append(_proj["name"]) + + assert project_name not in bp_projects + + self._delete_mpi() diff --git a/tests/cli/test_marketplace_runbook_commands.py b/tests/cli/test_marketplace_runbook_commands.py new file mode 100644 index 00000000..aae5b1c1 --- /dev/null +++ b/tests/cli/test_marketplace_runbook_commands.py @@ -0,0 +1,1466 @@ +import pytest +from click.testing import CliRunner +from itertools import combinations +import uuid +import json +import traceback +from distutils.version import LooseVersion as LV +from calm.dsl.cli import main as cli +from calm.dsl.api import get_api_client +from calm.dsl.cli.marketplace import ( + get_app_family_list, + get_mpi_by_name_n_version, +) +from calm.dsl.cli.utils import get_states_filter +from calm.dsl.builtins import read_local_file +from calm.dsl.cli.constants import MARKETPLACE_ITEM +from calm.dsl.log import get_logging_handle +from calm.dsl.store import Version + +LOG = get_logging_handle(__name__) +CALM_VERSION = Version.get_version("Calm") +APP_ICON_IMAGE_PATH = "tests/cli/images/test_app_icon.jpg" +DSL_RB_FILEPATH = "tests/sample_runbooks/simple_runbook.py" + +APP_STATES = [ + MARKETPLACE_ITEM.STATES.PENDING, + MARKETPLACE_ITEM.STATES.ACCEPTED, + MARKETPLACE_ITEM.STATES.REJECTED, + MARKETPLACE_ITEM.STATES.PUBLISHED, +] +APP_SOURCES = [ + MARKETPLACE_ITEM.SOURCES.GLOBAL, + MARKETPLACE_ITEM.SOURCES.LOCAL, +] + +DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) + +PROJECT = DSL_CONFIG["PROJECTS"]["PROJECT1"] +PROJECT_NAME = PROJECT["NAME"] + + +class TestMarketplaceRunbookCommands: + def setup_method(self): + """Method to instantiate to created_rb_list""" + + self.created_rb_list = [] + + def teardown_method(self): + """Method to delete creates runbooks during tests""" + + for rb_name in self.created_rb_list: + LOG.info("Deleting Runbook {}".format(rb_name)) + runner = CliRunner() + result = runner.invoke(cli, ["delete", "runbook", rb_name]) + assert result.exit_code == 0 + + self.created_rb_list = [] + + def _create_runbook(self, RB_PATH): + + self.created_dsl_rb_name = "Test_Runbook_for_MPI_{}".format( + str(uuid.uuid4())[-10:] + ) + LOG.info("Creating Runbook {}".format(self.created_dsl_rb_name)) + + runner = CliRunner() + result = runner.invoke( + cli, + [ + "create", + "runbook", + "--file={}".format(RB_PATH), + "--name={}".format(self.created_dsl_rb_name), + "--description='Test DSL Runbook; to delete'", + ], + ) + + LOG.debug("Response: {}".format(result.output)) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Runbook creation failed") + + LOG.info("Success") + + def _delete_mpi(self): + runner = CliRunner() + LOG.info( + "Deleting marketplace runboook {} with version {}".format( + self.marketplace_rb_name, self.mpi1_version + ) + ) + command = [ + "delete", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Deletion of marketplace runboook failed") + LOG.info("Success") + + def test_get_marketplace_runbooks(self): + """Tests 'calm get marketplace runbooks command'""" + + runner = CliRunner() + + LOG.info("Running 'calm get marketplace runbooks' command") + result = runner.invoke(cli, ["get", "marketplace", "runbooks"]) + if result.exit_code: + LOG.error(result.output) + pytest.fail("Failed to fetch marketplace runboooks") + LOG.info("Success") + + # Test quiet flag + LOG.info("Running 'calm get marketplace runbooks --quiet' command") + result = runner.invoke(cli, ["get", "marketplace", "bps", "--quiet"]) + if result.exit_code: + LOG.error(result.output) + pytest.fail("Failed to fetch marketplace runbooks with quiet flag") + LOG.info("Success") + + # Test app states option + LOG.info( + "Testing app_state option for 'calm get marketplace runbooks' command" + ) + app_states = APP_STATES + app_states = sum( + [ + list(map(list, combinations(app_states, i))) + for i in range(len(app_states) + 1) + ], + [], + ) + for app_state_list in app_states: + input = ["get", "marketplace", "runbooks"] + for app_state in app_state_list: + input.append("--app_state") + input.append(app_state) + result = runner.invoke(cli, input) + if result.exit_code: + LOG.error(result.ouput) + pytest.fail( + "Failed to fetch marketplace runbooks with app_state option" + ) + LOG.info("Success") + + # Test app_family attribute + LOG.info( + "Testing app_family option for 'calm get marketplace runbooks' command" + ) + app_family_list = get_app_family_list() + input = ["get", "marketplace", "runbooks", "--app_family", ""] + for app_family in app_family_list: + input[4] = app_family + result = runner.invoke(cli, input) + if result.exit_code: + LOG.error(result.output) + pytest.fail( + "Failed to fetch marketplace runbooks with app_family option" + ) + LOG.info("Success") + + # Test filter attribute + LOG.info("Running 'calm get marketplace runbooks --filter' command") + result = runner.invoke( + cli, ["get", "marketplace", "runbooks", "--filter", "version==1.0.0"] + ) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Failed to fetch marketplace runbooks with 'filter' cli option") + LOG.info("Success") + + def test_describe_marketplace_runbook(self): + """To describe marketplace runbook""" + + payload = {"length": 250} + + client = get_api_client() + runner = CliRunner() + + # test source option and app state action + app_states = APP_STATES + app_sources = APP_SOURCES + + LOG.info("Testing 'calm describe marketplace runbook command'") + for app_state in app_states: + for app_source in app_sources: + filter_query = "" + if app_state: + filter_query += get_states_filter( + state_key="app_state", states=[app_state] + ) + + if app_source: + filter_query += ";app_source=={}".format(app_source) + + if filter_query.startswith(";"): + filter_query = filter_query[1:] + + if filter_query: + payload["filter"] = filter_query + + res, err = client.market_place.list(params=payload) + if err: + raise Exception("[{}] - {}".format(err["code"], err["error"])) + + res = res.json() + total_matches = res["metadata"]["total_matches"] + if not total_matches: + continue + + entity = res["entities"][0] + mpi_name = entity["metadata"]["name"] + mpi_version = entity["status"]["version"] + + command = [ + "describe", + "marketplace", + "runbook", + mpi_name, + "--version", + mpi_version, + "--app_state", + app_state, + "--source", + app_source, + ] + result = runner.invoke(cli, command) + + if result.exit_code: + cli_res_dict = { + "Output": result.output, + "Exception": str(result.exception), + } + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Describe marketplace runbook command failed") + + LOG.info("Success") + + def test_mpi_basic_commands(self): + """ + Steps: + 1. Create a runbook + 2. Publish this runbook as new marketplace runbook i.e. mp_rb1 + 3. Publish the same runbook as existing marketplace runbook i.e. mp_rb2 + 3. Publish the same runbook with secrets as existing marketplace runbook i.e. mp_rb3 + 4. Negative Test: Publish the same runbook with mp_rb2 's version + 5. Approve the mp_rb1 + 6. Publish the mp_rb1 + 7. Negative Test: Delete the published runbook + 8. Unpublish the runbook mp_rb1 + 9. Delete the runbook in ACCEPTED state i.e. mp_rb1 + 10. Reject the runbook i.e. mp_rb2 + 11. Delete the runbook in REJECTED state i.e. mp_rb2 + 12. Delete the runbook in PENDING sates i.e. mp_rb3 + + """ + + self._create_runbook(DSL_RB_FILEPATH) + self.created_rb_list.append(self.created_dsl_rb_name) + self.marketplace_rb_name = "Test_Marketplace_Runbook_{}".format( + str(uuid.uuid4())[-10:] + ) + self.mpi1_version = "1.0.0" + self.mpi2_version = "2.0.0" + self.mpi3_with_secrets_version = "3.0.0" + + # Publish Runbook to marketplace manager as new marketplace runbook + LOG.info( + "Publishing Rb {} as new marketplace Runbook {}".format( + self.created_dsl_rb_name, self.marketplace_rb_name + ) + ) + command = [ + "publish", + "runbook", + self.created_dsl_rb_name, + "--version", + self.mpi1_version, + "--name", + self.marketplace_rb_name, + ] + runner = CliRunner() + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Publishing of Runbook as new marketplace item failed") + LOG.info("Success") + + LOG.info( + "Publishing Runbook {} as existing marketplace runbook {}".format( + self.created_dsl_rb_name, self.marketplace_rb_name + ) + ) + command = [ + "publish", + "runbook", + self.created_dsl_rb_name, + "--version", + self.mpi2_version, + "--name", + self.marketplace_rb_name, + "--existing_marketplace_runbook", + ] + runner = CliRunner() + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Publishing of runbook as existing marketplace_item failed") + LOG.info("Success") + + LOG.info( + "Publishing Runboook {} with secrets as existing marketplace runbook {}".format( + self.created_dsl_rb_name, self.marketplace_rb_name + ) + ) + command = [ + "publish", + "runbook", + self.created_dsl_rb_name, + "--version", + self.mpi3_with_secrets_version, + "--name", + self.marketplace_rb_name, + "--existing_marketplace_runbook", + "--with_secrets", + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail( + "Publishing of runbook with secrets as existing marketplace_item failed" + ) + LOG.info("Success") + + LOG.info( + "Negative Test: Publishing runbook {} as existing marketplace runbook {}".format( + self.created_dsl_rb_name, self.marketplace_rb_name + ) + ) + command = [ + "publish", + "runbook", + self.created_dsl_rb_name, + "--version", + self.mpi2_version, + "--name", + self.marketplace_rb_name, + "--existing_markeplace_runbook", + ] + + result = runner.invoke(cli, command) + if result.exit_code == 0: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail( + "Publishing runbook with version that already exists in marketplace should not be successful" + ) + LOG.info("Success") + + # Approve the runbook + LOG.info( + "Approving marketplace runbook {} with version {}".format( + self.marketplace_rb_name, self.mpi1_version + ) + ) + command = [ + "approve", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Approving of marketplace runbook failed") + LOG.info("Success") + + # Update the runbook + LOG.info( + "Updating marketplace runbook {} with version {}".format( + self.marketplace_rb_name, self.mpi1_version + ) + ) + command = [ + "update", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + "--description", + "Sample description", + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Updating of marketplace runbook failed") + LOG.info("Success") + + # Publising runboook without projects + LOG.info( + "Negative Test: Publishing marketplace runbook {} with version {} to marketplace without projects".format( + self.marketplace_rb_name, self.mpi1_version + ) + ) + command = [ + "publish", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + ] + + result = runner.invoke(cli, command) + if result.exit_code == 0: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail( + "Publishing of marketplace runbook without projects should fail" + ) + LOG.info("Success") + + # Publishing marketplace runbook + LOG.info( + "Publishing marketplace runbook {} with version {} to marketplace".format( + self.marketplace_rb_name, self.mpi1_version + ) + ) + command = [ + "publish", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + "--project", + PROJECT_NAME, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Publishing of marketplace runbook to marketplace failed") + LOG.info("Success") + + # Try to delete the published runbook + LOG.info("Negative Test: Deleting marketplace runbook in PUBLISHED state") + command = [ + "delete", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + ] + + result = runner.invoke(cli, command) + if result.exit_code == 0: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail( + "Deleting of marketplace runbook should fail if runbook is in published state" + ) + LOG.info("Success") + + # Unpublish marketplace runbook from marketplace + LOG.info( + "Unpublishing marketplace runboook {} with version {}".format( + self.marketplace_rb_name, self.mpi1_version + ) + ) + command = [ + "unpublish", + "marketplace", + "item", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Unpublishing of marketplace runboook failed") + LOG.info("Success") + + # Deleting the Accepted marketplace runboook + LOG.info( + "Deleting marketplace runboook {} with version {} in ACCEPTED state".format( + self.marketplace_rb_name, self.mpi1_version + ) + ) + command = [ + "delete", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Deletion of marketplace runboook in ACCEPTED state failed") + LOG.info("Success") + + # Reject the marketplace runboook + LOG.info( + "Rejecting marketplace runboook {} with version {}".format( + self.marketplace_rb_name, self.mpi2_version + ) + ) + command = [ + "reject", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi2_version, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Rejection of marketplace runboook failed") + LOG.info("Success") + + # Delete the rejected runboook + LOG.info("Deleting marketplace runboook in REJECTED state") + command = [ + "delete", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi2_version, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Deletion of marketplace runboook in REJECTED state failed") + LOG.info("Success") + + # Delete the pending runboook + LOG.info("Deleting marketplace runboook in PENDING state") + command = [ + "delete", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi3_with_secrets_version, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Deletion of marketplace runboook in PENDING state failed") + LOG.info("Success") + + def test_publish_to_marketplace_flag(self): + """Test for publish_to_marketplace_flag for publsh command""" + + self._create_runbook(DSL_RB_FILEPATH) + self.created_rb_list.append(self.created_dsl_rb_name) + self.marketplace_rb_name = "Test_Marketplace_Bp_{}".format( + str(uuid.uuid4())[-10:] + ) + self.mpi1_version = "1.0.0" + + # Publish runbook directly to marketplace using --publish_to_marketplace flag + LOG.info( + "Publishing runbook {} as new marketplace runboook {}".format( + self.created_dsl_rb_name, self.marketplace_rb_name + ) + ) + command = [ + "publish", + "runbook", + self.created_dsl_rb_name, + "--version", + self.mpi1_version, + "--name", + self.marketplace_rb_name, + "--publish_to_marketplace", + ] + runner = CliRunner() + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Publishing runbook using publish_to_marketplace flag failed") + LOG.info("Success") + + mpi_data = get_mpi_by_name_n_version( + name=self.marketplace_rb_name, + version=self.mpi1_version, + app_source=MARKETPLACE_ITEM.SOURCES.LOCAL, + ) + runbook_state = mpi_data["status"]["resources"]["app_state"] + assert runbook_state == MARKETPLACE_ITEM.STATES.PUBLISHED + + LOG.info( + "Unpublishing marketplace runboook {} with version {}".format( + self.marketplace_rb_name, self.mpi1_version + ) + ) + command = [ + "unpublish", + "marketplace", + "item", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Unpublishing of marketplace runboook failed") + + LOG.info( + "Deleting marketplace runboook {} with version {} in ACCEPTED state".format( + self.marketplace_rb_name, self.mpi1_version + ) + ) + command = [ + "delete", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Deletion of marketplace runboook in ACCEPTED state failed") + + def test_auto_approve_flag(self): + """Test for auto_approve flag in publish command""" + + self._create_runbook(DSL_RB_FILEPATH) + self.created_rb_list.append(self.created_dsl_rb_name) + self.marketplace_rb_name = "Test_Marketplace_Bp_{}".format( + str(uuid.uuid4())[-10:] + ) + self.mpi1_version = "1.0.0" + + LOG.info( + "Publishing runbook {} as new marketplace runboook {}".format( + self.created_dsl_rb_name, self.marketplace_rb_name + ) + ) + command = [ + "publish", + "runbook", + self.created_dsl_rb_name, + "--version", + self.mpi1_version, + "--name", + self.marketplace_rb_name, + "--auto_approve", + ] + runner = CliRunner() + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Publishing runbook using auto_approve flag failed") + LOG.info("Success") + + mpi_data = get_mpi_by_name_n_version( + name=self.marketplace_rb_name, + version=self.mpi1_version, + app_source=MARKETPLACE_ITEM.SOURCES.LOCAL, + ) + runbook_state = mpi_data["status"]["resources"]["app_state"] + assert runbook_state == MARKETPLACE_ITEM.STATES.ACCEPTED + + LOG.info( + "Deleting marketplace runboook {} with version {} in ACCEPTED state".format( + self.marketplace_rb_name, self.mpi1_version + ) + ) + command = [ + "delete", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Deletion of marketplace runboook in ACCEPTED state failed") + + def test_publish_runbook_with_icon(self): + self._create_runbook(DSL_RB_FILEPATH) + self.created_rb_list.append(self.created_dsl_rb_name) + + self.marketplace_rb_name = "Test_Marketplace_Bp_{}".format( + str(uuid.uuid4())[-10:] + ) + self.mpi_version = "1.0.0" + self.icon_name = "test_icon{}".format(str(uuid.uuid4())[:10]) + + LOG.info("Publishing the runboook to marketplace manager") + command = [ + "publish", + "runbook", + self.created_dsl_rb_name, + "--version", + self.mpi_version, + "--name", + self.marketplace_rb_name, + "-f", + APP_ICON_IMAGE_PATH, + "-i", + self.icon_name, + ] + runner = CliRunner() + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Publishing of runboook as new marketplace item failed") + LOG.info("Success") + + client = get_api_client() + app_icon_name_uuid_map = client.app_icon.get_name_uuid_map() + app_icon_uuid = app_icon_name_uuid_map.get(self.icon_name) + + runbook_data = get_mpi_by_name_n_version( + name=self.marketplace_rb_name, version=self.mpi_version + ) + icon_reference = runbook_data["status"]["resources"]["icon_reference_list"][0][ + "icon_reference" + ] + assert ( + icon_reference["uuid"] == app_icon_uuid + ), "App icon not used for the runbook" + + LOG.info( + "Deleting the marketplace runboook {}".format(self.marketplace_rb_name) + ) + result = runner.invoke( + cli, + [ + "delete", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi_version, + ], + ) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Deleting of marketplace runboook failed") + + def test_all_projects_flag_on_publising_runbook_with_auto_approve_flag(self): + """Tests `--all_projects` flag to publish runbook to marketplace manager with auto approval flag""" + + client = get_api_client() + self._create_runbook(DSL_RB_FILEPATH) + self.created_rb_list.append(self.created_dsl_rb_name) + self.marketplace_rb_name = "Test_Marketplace_Bp_{}".format( + str(uuid.uuid4())[-10:] + ) + self.mpi1_version = "1.0.0" + + # Publish runbook marketplace using --all_projects flag + LOG.info( + "Publishing runbook {} as new marketplace runboook {} with 'all_projects' flag".format( + self.created_dsl_rb_name, self.marketplace_rb_name + ) + ) + command = [ + "publish", + "runbook", + self.created_dsl_rb_name, + "--version", + self.mpi1_version, + "--name", + self.marketplace_rb_name, + "--all_projects", + "--auto_approve", + ] + runner = CliRunner() + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Publishing runbook using all_projects flag failed") + LOG.info("Success") + + mpi_data = get_mpi_by_name_n_version( + name=self.marketplace_rb_name, + version=self.mpi1_version, + app_source=MARKETPLACE_ITEM.SOURCES.LOCAL, + ) + + project_name_uuid_map = client.project.get_name_uuid_map({"length": 250}) + + runbook_projects = [] + for _proj in mpi_data["spec"]["resources"]["project_reference_list"]: + runbook_projects.append(_proj["name"]) + + for _proj in project_name_uuid_map.keys(): + assert _proj in runbook_projects + + self._delete_mpi() + + def test_all_projects_flag_on_approving_marketplace_bp(self): + """Tests `--all_projects` flag to approving runbook to marketplace manager""" + + client = get_api_client() + self._create_runbook(DSL_RB_FILEPATH) + self.created_rb_list.append(self.created_dsl_rb_name) + self.marketplace_rb_name = "Test_Marketplace_Bp_{}".format( + str(uuid.uuid4())[-10:] + ) + self.mpi1_version = "1.0.0" + + # Publish runbook to marketplace manager as new marketplace runboook + LOG.info( + "Publishing runbook {} as new marketplace runboook {}".format( + self.created_dsl_rb_name, self.marketplace_rb_name + ) + ) + command = [ + "publish", + "runbook", + self.created_dsl_rb_name, + "--version", + self.mpi1_version, + "--name", + self.marketplace_rb_name, + "--with_secrets", + ] + runner = CliRunner() + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail( + "Publishing of marketplace runboook as new marketplace item failed" + ) + + # Approve the runboook + LOG.info( + "Approving marketplace runboook {} with version {} with 'all_projects' flag".format( + self.marketplace_rb_name, self.mpi1_version + ) + ) + command = [ + "approve", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + "--all_projects", + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail( + "Approving of marketplace runboook using all_projects flag failed" + ) + LOG.info("Success") + + mpi_data = get_mpi_by_name_n_version( + name=self.marketplace_rb_name, + version=self.mpi1_version, + app_source=MARKETPLACE_ITEM.SOURCES.LOCAL, + ) + + project_name_uuid_map = client.project.get_name_uuid_map({"length": 250}) + + runbook_projects = [] + for _proj in mpi_data["spec"]["resources"]["project_reference_list"]: + runbook_projects.append(_proj["name"]) + + for _proj in project_name_uuid_map.keys(): + assert _proj in runbook_projects + + self._delete_mpi() + + def test_all_projects_flag_on_publishing_marketplace_bp(self): + """Tests `--all_projects` flag for publishing runbook to marketplace store""" + + client = get_api_client() + self._create_runbook(DSL_RB_FILEPATH) + self.created_rb_list.append(self.created_dsl_rb_name) + self.marketplace_rb_name = "Test_Marketplace_Bp_{}".format( + str(uuid.uuid4())[-10:] + ) + self.mpi1_version = "1.0.0" + + # Publish runbook directly to marketplace using --auto_approve flag + LOG.info( + "Publishing runbook {} as new marketplace runboook {}".format( + self.created_dsl_rb_name, self.marketplace_rb_name + ) + ) + command = [ + "publish", + "runbook", + self.created_dsl_rb_name, + "--version", + self.mpi1_version, + "--name", + self.marketplace_rb_name, + "--auto_approve", + ] + runner = CliRunner() + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Publishing runbook using auto_approve flag failed") + LOG.info("Success") + + # Publish runboook to marketplace + LOG.info( + "Publishing marketplace runboook {} with version {} to marketplace with 'all_projects' flag".format( + self.marketplace_rb_name, self.mpi1_version + ) + ) + command = [ + "publish", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + "--all_projects", + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Publishing of marketplace runboook to marketplace failed") + LOG.info("Success") + + mpi_data = get_mpi_by_name_n_version( + name=self.marketplace_rb_name, + version=self.mpi1_version, + app_source=MARKETPLACE_ITEM.SOURCES.LOCAL, + ) + + project_name_uuid_map = client.project.get_name_uuid_map({"length": 250}) + + runbook_projects = [] + for _proj in mpi_data["spec"]["resources"]["project_reference_list"]: + runbook_projects.append(_proj["name"]) + + for _proj in project_name_uuid_map.keys(): + assert _proj in runbook_projects + + # Unpublish marketplace runboook from marketplace + LOG.info( + "Unpublishing marketplace runboook {} with version {}".format( + self.marketplace_rb_name, self.mpi1_version + ) + ) + command = [ + "unpublish", + "marketplace", + "item", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Unpublishing of marketplace runboook to marketplace failed") + LOG.info("Success") + + def test_all_projects_flag_on_publising_bp_without_auto_approve_flag(self): + """Tests `--all_projects` flag to publish runbook to marketplace manager without auto approval flag""" + + client = get_api_client() + self._create_runbook(DSL_RB_FILEPATH) + self.created_rb_list.append(self.created_dsl_rb_name) + self.marketplace_rb_name = "Test_Marketplace_Bp_{}".format( + str(uuid.uuid4())[-10:] + ) + self.mpi1_version = "1.0.0" + + # Publish runbook marketplace using --all_projects flag + LOG.info( + "Publishing runbook {} as new marketplace runboook {} with 'all_projects' flag".format( + self.created_dsl_rb_name, self.marketplace_rb_name + ) + ) + command = [ + "publish", + "runbook", + self.created_dsl_rb_name, + "--version", + self.mpi1_version, + "--name", + self.marketplace_rb_name, + "--all_projects", + ] + runner = CliRunner() + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail("Publishing runbook using all_projects flag failed") + LOG.info("Success") + + mpi_data = get_mpi_by_name_n_version( + name=self.marketplace_rb_name, + version=self.mpi1_version, + app_source=MARKETPLACE_ITEM.SOURCES.LOCAL, + ) + + project_name_uuid_map = client.project.get_name_uuid_map({"length": 250}) + + runbook_projects = [] + for _proj in mpi_data["spec"]["resources"]["project_reference_list"]: + runbook_projects.append(_proj["name"]) + + for _proj in project_name_uuid_map.keys(): + assert _proj in runbook_projects + + self._delete_mpi() + + def test_project_removal_flag_on_approving_marketplace_runbook(self): + """Tests `--remove-project` flag on approving runbook to marketplace manager""" + + project_name = "default" # This project will be removed while approving + self._create_runbook(DSL_RB_FILEPATH) + self.created_rb_list.append(self.created_dsl_rb_name) + self.marketplace_rb_name = "Test_Marketplace_Bp_{}".format( + str(uuid.uuid4())[-10:] + ) + self.mpi1_version = "1.0.0" + + # Publish runbook to marketplace manager as new marketplace runbook with all projects + LOG.info( + "Publishing runbook {} as new marketplace runbook {}".format( + self.created_dsl_rb_name, self.marketplace_rb_name + ) + ) + command = [ + "publish", + "runbook", + self.created_dsl_rb_name, + "--version", + self.mpi1_version, + "--name", + self.marketplace_rb_name, + "--all_projects", + "--with_secrets", + ] + runner = CliRunner() + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail( + "Publishing of marketplace runbook as new marketplace item failed" + ) + + # Approve the runbook + LOG.info( + "Approving marketplace runbook {} with version {} and removing {} project".format( + self.marketplace_rb_name, self.mpi1_version, project_name + ) + ) + command = [ + "approve", + "marketplace", + "runbook", + self.marketplace_rb_name, + "--version", + self.mpi1_version, + "--remove-project", + project_name, + ] + + result = runner.invoke(cli, command) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + pytest.fail( + "Approving of marketplace runbook using remove-project flag failed" + ) + LOG.info("Success") + + mpi_data = get_mpi_by_name_n_version( + name=self.marketplace_rb_name, + version=self.mpi1_version, + app_source=MARKETPLACE_ITEM.SOURCES.LOCAL, + ) + + runbook_projects = [] + for _proj in mpi_data["spec"]["resources"]["project_reference_list"]: + runbook_projects.append(_proj["name"]) + + assert project_name not in runbook_projects + + self._delete_mpi() diff --git a/tests/cli/test_project_commands.py b/tests/cli/test_project_commands.py index ccfdbcfb..ff31fbfb 100644 --- a/tests/cli/test_project_commands.py +++ b/tests/cli/test_project_commands.py @@ -6,7 +6,7 @@ from distutils.version import LooseVersion as LV from click.testing import CliRunner from calm.dsl.api import get_api_client -from calm.dsl.cli.quotas import _get_quota +from calm.dsl.builtins.models.helper.quotas import _get_quota from calm.dsl.builtins.models.helper.common import get_project from calm.dsl.cli import main as cli from calm.dsl.builtins.models.metadata_payload import reset_metadata_obj diff --git a/tests/cli/test_scheduler_commands.py b/tests/cli/test_scheduler_commands.py index c6572035..82b800e0 100644 --- a/tests/cli/test_scheduler_commands.py +++ b/tests/cli/test_scheduler_commands.py @@ -252,6 +252,8 @@ def test_job_create_duplicate_name(self, dsl_file): assert "job with name '{}' already exists".format(jobname) in msgs + scheduler.delete_job([jobname]) + @pytest.mark.scheduler @pytest.mark.parametrize("dsl_file", ["job_create_blank_name.py"]) def test_job_name_blank(self, dsl_file): @@ -328,6 +330,8 @@ def test_job_list(self, dsl_file): "Job List API did not return the job which was created as part of the test" ) + scheduler.delete_job([job_name]) + @pytest.mark.scheduler @pytest.mark.parametrize("dsl_file", ["job_describe.py"]) def test_job_describe(self, dsl_file): @@ -366,6 +370,8 @@ def test_job_describe(self, dsl_file): LOG.info(job_response) assert job_response["resources"]["name"] == job_name + scheduler.delete_job([job_name]) + @pytest.mark.scheduler @pytest.mark.parametrize( "dsl_file, dsl_runbook_file", @@ -422,6 +428,103 @@ def test_job_scheduler(self, dsl_file, dsl_runbook_file): for record in result: assert record["resources"]["state"] != JOBINSTANCES.STATES.FAILED + scheduler.delete_job([jobname]) + + @pytest.mark.scheduler + @pytest.mark.parametrize( + "dsl_file, dsl_runbook_file", + [("job_recurring_no_expiration_runbook.py", "runbook_variables.py")], + ) + def test_recurring_rb_job_with_no_expiration(self, dsl_file, dsl_runbook_file): + """ + Test for recurring runbook job scheduler with no expiration + """ + current_path = os_lib.path.dirname(os_lib.path.realpath(__file__)) + dsl_file = os_lib.path.dirname(current_path) + "/scheduler/" + dsl_file + LOG.info("Scheduler py file used {}".format(dsl_file)) + + # Create runbook + current_path = os_lib.path.dirname(os_lib.path.realpath(__file__)) + dsl_file_name_for_runbook = dsl_file[dsl_file.rfind("/") :].replace("/", "") + runbook_name = dsl_file_name_for_runbook[: dsl_file_name_for_runbook.find(".")] + runbook_name_suffixed = "{}_{}".format(runbook_name, suffix()) + runbook_file = ( + os_lib.path.dirname(current_path) + "/scheduler/" + dsl_runbook_file + ) + + runbooks.create_runbook_command( + runbook_file, runbook_name_suffixed, description="", force=True + ) + jobname = "test_job_scheduler" + suffix() + file_replace( + dsl_file, + r'"{}.*\n'.format(runbook_name), + r'"{}"\n'.format(runbook_name_suffixed), + ) + result = scheduler.create_job_command(dsl_file, jobname, None, False) + assert result.get("resources").get("state") == "ACTIVE" + + client = get_api_client() + job_get_res = scheduler.get_job(client, jobname, all=True) + res, err = client.job.read(job_get_res["metadata"]["uuid"]) + job_response = res.json() + LOG.info(job_response) + schedule_info = job_response["resources"].get("schedule_info") + + assert job_response["resources"]["name"] == jobname + assert job_response["resources"]["type"] == "RECURRING" + assert ( + "expiry_time" not in schedule_info.keys() + ), "No expiration not set, hence failed" + + scheduler.delete_job([jobname]) + + @pytest.mark.scheduler + @pytest.mark.parametrize("dsl_file", ["job_recurring_no_expiration_app_action.py"]) + def test_recurring_app_job_with_no_expiration(self, dsl_file): + """ + Test for recurring app job scheduler with no expiration + """ + current_path = os_lib.path.dirname(os_lib.path.realpath(__file__)) + dsl_file = os_lib.path.dirname(current_path) + "/scheduler/" + dsl_file + LOG.info("Scheduler py file used {}".format(dsl_file)) + + # Create blueprint + current_path = os_lib.path.dirname(os_lib.path.realpath(__file__)) + dsl_file_name = dsl_file[dsl_file.rfind("/") :].replace("/", "") + bp_name = "{}_{}".format(dsl_file_name[: dsl_file_name.find(".")], suffix()) + + bp_file = os_lib.path.dirname(current_path) + "/scheduler/" + DSL_BP_FILE + client = get_api_client() + bps.create_blueprint_from_dsl(client, bp_file, bp_name, force_create=True) + app_name = "{}_{}".format(bp_name, suffix()) + # Launch Blueprint + bps.launch_blueprint_simple(bp_name, app_name=app_name, patch_editables=False) + + jobname = "test_job_scheduler" + suffix() + file_replace( + dsl_file, + r'"{}.*\n'.format(dsl_file_name[: dsl_file_name.find(".")]), + r'"{}"\n'.format(app_name), + ) + result = scheduler.create_job_command(dsl_file, jobname, None, False) + assert result.get("resources").get("state") == "ACTIVE" + + client = get_api_client() + job_get_res = scheduler.get_job(client, jobname, all=True) + res, err = client.job.read(job_get_res["metadata"]["uuid"]) + job_response = res.json() + LOG.info(job_response) + schedule_info = job_response["resources"].get("schedule_info") + + assert job_response["resources"]["name"] == jobname + assert job_response["resources"]["type"] == "RECURRING" + assert ( + "expiry_time" not in schedule_info.keys() + ), "No expiration not set, hence failed" + + scheduler.delete_job([jobname]) + # To create job by passing a custom job name def _create_job_with_custom_name(dsl_file, name=""): diff --git a/tests/escript/test_escript.py b/tests/escript/test_escript.py index 9d0e9e10..8fea7e9b 100644 --- a/tests/escript/test_escript.py +++ b/tests/escript/test_escript.py @@ -104,6 +104,7 @@ def get_actual_script_status(client, runlog_uuid, script_name): @pytest.mark.escript +@pytest.mark.nightly_380 class TestEscript: def setup_class(self): """setup class method""" diff --git a/tests/escript/test_parallel_escript.py b/tests/escript/test_parallel_escript.py index 3b6c41c2..f400ea54 100644 --- a/tests/escript/test_parallel_escript.py +++ b/tests/escript/test_parallel_escript.py @@ -83,6 +83,7 @@ def get_escript_version_status(escript): @pytest.mark.escript +@pytest.mark.nightly_380 class TestEscript: @pytest.mark.parametrize( "escript, parallel_count", diff --git a/tests/example_accounts/test_ndb_account.py b/tests/example_accounts/ndb_account.py similarity index 100% rename from tests/example_accounts/test_ndb_account.py rename to tests/example_accounts/ndb_account.py diff --git a/tests/example_accounts/updated_accounts/test_update_ndb_account.py b/tests/example_accounts/updated_accounts/update_ndb_account.py similarity index 100% rename from tests/example_accounts/updated_accounts/test_update_ndb_account.py rename to tests/example_accounts/updated_accounts/update_ndb_account.py diff --git a/tests/multivm_migrate/test_multivm_migrate.py b/tests/multivm_migrate/test_multivm_migrate.py index 25f1afa6..a17436e5 100644 --- a/tests/multivm_migrate/test_multivm_migrate.py +++ b/tests/multivm_migrate/test_multivm_migrate.py @@ -35,6 +35,7 @@ PROJECT_NAME = PROJECT["NAME"] +@pytest.mark.nightly_380 class TestMultiVmMigrate: app_helper = ApplicationHelper() diff --git a/tests/sample_runbooks/confirm_task.py b/tests/sample_runbooks/confirm_task.py index 93fec347..9672c4b2 100644 --- a/tests/sample_runbooks/confirm_task.py +++ b/tests/sample_runbooks/confirm_task.py @@ -7,15 +7,15 @@ from calm.dsl.runbooks import RunbookTask as Task -code = '''print "Hello" -print "Bye"''' +code = """print("Hello") +print("Bye")""" @runbook def DslConfirmRunbook(): "Runbook Service example" Task.Confirm(name="Confirm_Task") - Task.Exec.escript(name="Exec_Task", script=code) + Task.Exec.escript.py3(name="Exec_Task", script=code) def main(): diff --git a/tests/sample_runbooks/decision_task.py b/tests/sample_runbooks/decision_task.py index 7ba0cd3c..cfb5519c 100644 --- a/tests/sample_runbooks/decision_task.py +++ b/tests/sample_runbooks/decision_task.py @@ -32,12 +32,12 @@ def DslDecisionRunbook(): ) as d: if d.ok: - Task.Exec.escript( - name="Task1", script="print 'Decision Task is Successful'" + Task.Exec.escript.py3( + name="Task1", script="print('Decision Task is Successful')" ) else: - Task.Exec.escript(name="Task2", script="print 'Decision Task Failed'") + Task.Exec.escript.py3(name="Task2", script="print('Decision Task Failed')") def main(): diff --git a/tests/sample_runbooks/inherit_target_runbook.py b/tests/sample_runbooks/inherit_target_runbook.py index d1c6949c..568c28a3 100644 --- a/tests/sample_runbooks/inherit_target_runbook.py +++ b/tests/sample_runbooks/inherit_target_runbook.py @@ -21,9 +21,9 @@ def DslInheritTargetRunbook(): ) as d: if d.ok: - Task.Exec.escript( + Task.Exec.escript.py3( name="Task1", - script="print 'Decision Task is Successful'", + script="print('Decision Task is Successful')", inherit_target=True, ) diff --git a/tests/sample_runbooks/input_task.py b/tests/sample_runbooks/input_task.py index 7768076b..a8eabcc4 100644 --- a/tests/sample_runbooks/input_task.py +++ b/tests/sample_runbooks/input_task.py @@ -7,11 +7,11 @@ from calm.dsl.runbooks import RunbookTask as Task, RunbookVariable as Variable -code = """print "Hello @@{user_name}@@" -print "Your Password is @@{password}@@" -print "Date you selected is @@{date}@@" -print "Time selected is @@{time}@@" -print "User selected is @@{user}@@" +code = """print("Hello @@{user_name}@@") +print("Your Password is @@{password}@@") +print("Date you selected is @@{date}@@") +print("Time selected is @@{time}@@") +print("User selected is @@{user}@@") """ @@ -30,7 +30,7 @@ def DslInputRunbook(): ), ], ) - Task.Exec.escript(name="Exec_Task", script=code) + Task.Exec.escript.py3(name="Exec_Task", script=code) def main(): diff --git a/tests/sample_runbooks/parallel.py b/tests/sample_runbooks/parallel.py index 58734602..355d3161 100644 --- a/tests/sample_runbooks/parallel.py +++ b/tests/sample_runbooks/parallel.py @@ -8,28 +8,28 @@ from calm.dsl.runbooks import RunbookTask as Task -code = '''print "Start" +code = """print("Start") sleep(20) -print "End"''' +print("End")""" @runbook def DslParallelRunbook(): "Runbook example for running tasks in parallel" - Task.Exec.escript(name="root", script=code) + Task.Exec.escript.py3(name="root", script=code) with parallel() as p: with branch(p): - Task.Exec.escript(name="Task1", script=code) + Task.Exec.escript.py3(name="Task1", script=code) with branch(p): - Task.Exec.escript(name="Task2", script=code) - Task.Exec.escript(name="Task3", script=code) + Task.Exec.escript.py3(name="Task2", script=code) + Task.Exec.escript.py3(name="Task3", script=code) with branch(p): - Task.Exec.escript(name="Task4", script=code) - Task.Exec.escript(name="Task5", script=code) + Task.Exec.escript.py3(name="Task4", script=code) + Task.Exec.escript.py3(name="Task5", script=code) def main(): diff --git a/tests/sample_runbooks/runbook_variables.py b/tests/sample_runbooks/runbook_variables.py index 297f8527..1d2d0ea3 100644 --- a/tests/sample_runbooks/runbook_variables.py +++ b/tests/sample_runbooks/runbook_variables.py @@ -8,17 +8,17 @@ code = """ -print "@@{var1}@@" +print("@@{var1}@@") if "@@{var1}@@" == "test": - print "yes" + print("yes") else: - print "no" -print "@@{var2}@@" + print("no") +print("@@{var2}@@") if "@@{var2}@@" == "test": - print "yes" + print("yes") else: - print "no" -print "Hello @@{firstname}@@ @@{lastname}@@" + print("no") +print("Hello @@{firstname}@@ @@{lastname}@@") """ @@ -30,7 +30,7 @@ def DslRunbookWithVariables(): var2 = Variable.Simple.Secret("test", runtime=True) # noqa firstname = Variable.Simple("FIRSTNAME", runtime=True) # noqa lastname = Variable.Simple("LASTNAME") # noqa - Task.Exec.escript(name="Exec_Task", script=code) + Task.Exec.escript.py3(name="Exec_Task", script=code) def main(): diff --git a/tests/sample_runbooks/set_variable.py b/tests/sample_runbooks/set_variable.py index a8782932..0ea67af3 100644 --- a/tests/sample_runbooks/set_variable.py +++ b/tests/sample_runbooks/set_variable.py @@ -19,11 +19,11 @@ def DslSetVariableTask(endpoints=[endpoint], default=False): "Runbook example with Set Variable Tasks" - Task.SetVariable.escript(script="print 'var1=test'", variables=["var1"]) + Task.SetVariable.escript.py3(script="print('var1=test')", variables=["var1"]) Task.SetVariable.ssh( filename="scripts/sample_script.sh", variables=["var2"], target=endpoints[0] ) - Task.Exec.escript(script="print '@@{var1}@@ @@{var2}@@'") + Task.Exec.escript.py3(script="print('@@{var1}@@ @@{var2}@@')") def main(): diff --git a/tests/sample_runbooks/simple_runbook.py b/tests/sample_runbooks/simple_runbook.py index 3fc6ea62..b4e71497 100644 --- a/tests/sample_runbooks/simple_runbook.py +++ b/tests/sample_runbooks/simple_runbook.py @@ -7,20 +7,20 @@ from calm.dsl.runbooks import RunbookTask as Task -code = '''print "Start" +code = """print("Start") sleep(20) -print "End"''' +print("End")""" @runbook def DslSimpleRunbook(): "Runbook example" - Task.Exec.escript(name="Task1", script=code) - Task.Exec.escript(name="Task2", script=code) - Task.Exec.escript(name="Task3", script=code) - Task.Exec.escript(name="Task4", script=code) - Task.Exec.escript(name="Task5", script=code) + Task.Exec.escript.py3(name="Task1", script=code) + Task.Exec.escript.py3(name="Task2", script=code) + Task.Exec.escript.py3(name="Task3", script=code) + Task.Exec.escript.py3(name="Task4", script=code) + Task.Exec.escript.py3(name="Task5", script=code) def main(): diff --git a/tests/sample_runbooks/test_decision_task.json b/tests/sample_runbooks/test_decision_task.json index 51416215..78231feb 100644 --- a/tests/sample_runbooks/test_decision_task.json +++ b/tests/sample_runbooks/test_decision_task.json @@ -47,8 +47,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print 'Decision Task is Successful'" + "script_type": "static_py3", + "script": "print('Decision Task is Successful')" }, "child_tasks_local_reference_list": [], "variable_list": [], @@ -75,8 +75,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print 'Decision Task Failed'" + "script_type": "static_py3", + "script": "print('Decision Task Failed')" }, "child_tasks_local_reference_list": [], "variable_list": [], diff --git a/tests/sample_runbooks/test_inherit_target_runbook.json b/tests/sample_runbooks/test_inherit_target_runbook.json index eb172e12..24b86ce2 100644 --- a/tests/sample_runbooks/test_inherit_target_runbook.json +++ b/tests/sample_runbooks/test_inherit_target_runbook.json @@ -47,8 +47,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print 'Decision Task is Successful'" + "script_type": "static_py3", + "script": "print('Decision Task is Successful')" }, "child_tasks_local_reference_list": [], "variable_list": [], diff --git a/tests/sample_runbooks/test_parallel.json b/tests/sample_runbooks/test_parallel.json index d786ed3e..5fbd0314 100644 --- a/tests/sample_runbooks/test_parallel.json +++ b/tests/sample_runbooks/test_parallel.json @@ -103,8 +103,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print \"Start\"\nsleep(20)\nprint \"End\"" + "script_type": "static_py3", + "script": "print(\"Start\")\nsleep(20)\nprint(\"End\")" }, "child_tasks_local_reference_list": [], "variable_list": [], @@ -116,8 +116,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print \"Start\"\nsleep(20)\nprint \"End\"" + "script_type": "static_py3", + "script": "print(\"Start\")\nsleep(20)\nprint(\"End\")" }, "child_tasks_local_reference_list": [], "variable_list": [], @@ -129,8 +129,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print \"Start\"\nsleep(20)\nprint \"End\"" + "script_type": "static_py3", + "script": "print(\"Start\")\nsleep(20)\nprint(\"End\")" }, "child_tasks_local_reference_list": [], "variable_list": [], @@ -142,8 +142,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print \"Start\"\nsleep(20)\nprint \"End\"" + "script_type": "static_py3", + "script": "print(\"Start\")\nsleep(20)\nprint(\"End\")" }, "child_tasks_local_reference_list": [], "variable_list": [], @@ -155,8 +155,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print \"Start\"\nsleep(20)\nprint \"End\"" + "script_type": "static_py3", + "script": "print(\"Start\")\nsleep(20)\nprint(\"End\")" }, "child_tasks_local_reference_list": [], "variable_list": [], @@ -168,8 +168,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print \"Start\"\nsleep(20)\nprint \"End\"" + "script_type": "static_py3", + "script": "print(\"Start\")\nsleep(20)\nprint(\"End\")" }, "child_tasks_local_reference_list": [], "variable_list": [], diff --git a/tests/sample_runbooks/test_runbook_variables.json b/tests/sample_runbooks/test_runbook_variables.json index ac94eb56..a8d33f57 100644 --- a/tests/sample_runbooks/test_runbook_variables.json +++ b/tests/sample_runbooks/test_runbook_variables.json @@ -32,8 +32,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "\nprint \"@@{var1}@@\"\nif \"@@{var1}@@\" == \"test\":\n print \"yes\"\nelse:\n print \"no\"\nprint \"@@{var2}@@\"\nif \"@@{var2}@@\" == \"test\":\n print \"yes\"\nelse:\n print \"no\"\nprint \"Hello @@{firstname}@@ @@{lastname}@@\"\n" + "script_type": "static_py3", + "script": "\nprint(\"@@{var1}@@\")\nif \"@@{var1}@@\" == \"test\":\n print(\"yes\")\nelse:\n print(\"no\")\nprint(\"@@{var2}@@\")\nif \"@@{var2}@@\" == \"test\":\n print(\"yes\")\nelse:\n print(\"no\")\nprint(\"Hello @@{firstname}@@ @@{lastname}@@\")\n" }, "child_tasks_local_reference_list": [], "variable_list": [], diff --git a/tests/sample_runbooks/test_simple_runbook.json b/tests/sample_runbooks/test_simple_runbook.json index 3073d62b..df6540e0 100644 --- a/tests/sample_runbooks/test_simple_runbook.json +++ b/tests/sample_runbooks/test_simple_runbook.json @@ -89,8 +89,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print \"Start\"\nsleep(20)\nprint \"End\"" + "script_type": "static_py3", + "script": "print(\"Start\")\nsleep(20)\nprint(\"End\")" }, "child_tasks_local_reference_list": [], "variable_list": [], @@ -102,8 +102,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print \"Start\"\nsleep(20)\nprint \"End\"" + "script_type": "static_py3", + "script": "print(\"Start\")\nsleep(20)\nprint(\"End\")" }, "child_tasks_local_reference_list": [], "variable_list": [], @@ -115,8 +115,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print \"Start\"\nsleep(20)\nprint \"End\"" + "script_type": "static_py3", + "script": "print(\"Start\")\nsleep(20)\nprint(\"End\")" }, "child_tasks_local_reference_list": [], "variable_list": [], @@ -128,8 +128,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print \"Start\"\nsleep(20)\nprint \"End\"" + "script_type": "static_py3", + "script": "print(\"Start\")\nsleep(20)\nprint(\"End\")" }, "child_tasks_local_reference_list": [], "variable_list": [], @@ -141,8 +141,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print \"Start\"\nsleep(20)\nprint \"End\"" + "script_type": "static_py3", + "script": "print(\"Start\")\nsleep(20)\nprint(\"End\")" }, "child_tasks_local_reference_list": [], "variable_list": [], diff --git a/tests/sample_runbooks/test_while_loop.json b/tests/sample_runbooks/test_while_loop.json index 5a95d9b7..d75d8fec 100644 --- a/tests/sample_runbooks/test_while_loop.json +++ b/tests/sample_runbooks/test_while_loop.json @@ -62,8 +62,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print 'Inside loop1 @@{loop_var}@@'" + "script_type": "static_py3", + "script": "print('Inside loop1 @@{loop_var}@@')" }, "child_tasks_local_reference_list": [], "variable_list": [], @@ -109,8 +109,8 @@ "description": "", "type": "EXEC", "attrs": { - "script_type": "static", - "script": "print 'Inside loop2 @@{iteration}@@'" + "script_type": "static_py3", + "script": "print('Inside loop2 @@{iteration}@@')" }, "child_tasks_local_reference_list": [], "variable_list": [], diff --git a/tests/sample_runbooks/while_loop.py b/tests/sample_runbooks/while_loop.py index 9cd675ab..8970ea06 100644 --- a/tests/sample_runbooks/while_loop.py +++ b/tests/sample_runbooks/while_loop.py @@ -17,10 +17,14 @@ def DslWhileLoopRunbook(): exit_condition=Status.SUCCESS, loop_variable="loop_var", ): - Task.Exec.escript(name="Task1", script="print 'Inside loop1 @@{loop_var}@@'") + Task.Exec.escript.py3( + name="Task1", script="print('Inside loop1 @@{loop_var}@@')" + ) with Task.Loop(iterations=2, name="WhileTask2"): - Task.Exec.escript(name="Task2", script="print 'Inside loop2 @@{iteration}@@'") + Task.Exec.escript.py3( + name="Task2", script="print('Inside loop2 @@{iteration}@@')" + ) def main(): diff --git a/tests/scheduler/job_recurring_no_expiration_app_action.py b/tests/scheduler/job_recurring_no_expiration_app_action.py new file mode 100644 index 00000000..8ace067e --- /dev/null +++ b/tests/scheduler/job_recurring_no_expiration_app_action.py @@ -0,0 +1,26 @@ +from datetime import datetime, timedelta + +from calm.dsl.builtins import Job, JobScheduler + +start_date = datetime.now() + timedelta(seconds=10) +start_date = ( + str(start_date.strftime("%Y-%m-%dT%H:%M:%SZ")).replace("T", " ").replace("Z", "") +) +cron = "50 23 * * *" +time_zone = "Asia/Calcutta" + +APP_NAME = "job_recurring_no_expiration_app_action" + + +class JobRecurring(Job): + """ + Recurring Job with no expiration to Start action on app. + Note: Skip passing expiry_time parameter to set no expiration in job. + """ + + name = "test_no_expiration_app_job" + schedule_info = JobScheduler.ScheduleInfo.recurring( + cron, start_date, time_zone=time_zone + ) + + executable = JobScheduler.Exec.app_action(APP_NAME, "Start") diff --git a/tests/scheduler/job_recurring_no_expiration_runbook.py b/tests/scheduler/job_recurring_no_expiration_runbook.py new file mode 100644 index 00000000..32fae8e6 --- /dev/null +++ b/tests/scheduler/job_recurring_no_expiration_runbook.py @@ -0,0 +1,26 @@ +from datetime import datetime, timedelta + +from calm.dsl.builtins import Job, JobScheduler + +start_date = datetime.now() + timedelta(seconds=10) +start_date = ( + str(start_date.strftime("%Y-%m-%dT%H:%M:%SZ")).replace("T", " ").replace("Z", "") +) +cron = "50 23 * * *" +time_zone = "Asia/Calcutta" + +RUNBOOK_NAME = "job_recurring_no_expiration_runbook" + + +class JobRecurring(Job): + """ + Recurring job with no expiration to execute runbook. + Note: Skip passing expiry_time parameter to set no expiration in job. + """ + + name = "test_no_expiration_rb_job" + schedule_info = JobScheduler.ScheduleInfo.recurring( + cron, start_date, time_zone=time_zone + ) + + executable = JobScheduler.Exec.runbook(RUNBOOK_NAME, False) diff --git a/tests/simple_blueprint/test_simple_bp_create.py b/tests/simple_blueprint/test_simple_bp_create.py index aedd7be1..4bad1dec 100644 --- a/tests/simple_blueprint/test_simple_bp_create.py +++ b/tests/simple_blueprint/test_simple_bp_create.py @@ -19,7 +19,13 @@ LOG = get_logging_handle(__name__) SIMPLE_BP_FILE_PATH = "tests/simple_blueprint/test_simple_blueprint.py" +SIMPLE_BP_FILE_PATH2 = ( + "tests/simple_blueprint/test_simple_bp_with_downloadable_image.py" +) SIMPLE_BP_OUT_PATH = "tests/simple_blueprint/test_simple_blueprint.json" +SIMPLE_BP_OUT_PATH2 = ( + "tests/simple_blueprint/test_simple_bp_with_downloadable_image.json" +) DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) NTNX_LOCAL_ACCOUNT = DSL_CONFIG["ACCOUNTS"]["NTNX_LOCAL_AZ"] @@ -43,7 +49,11 @@ def teardown_method(self): self.created_bp_list = [] - def test_create_bp(self): + @pytest.mark.parametrize( + "bp_file_path", + [SIMPLE_BP_FILE_PATH, SIMPLE_BP_FILE_PATH2], + ) + def test_create_bp(self, bp_file_path): runner = CliRunner() created_dsl_bp_name = "Test_Simple_DSL_BP_{}".format(int(time.time())) @@ -53,7 +63,7 @@ def test_create_bp(self): [ "create", "bp", - "--file={}".format(SIMPLE_BP_FILE_PATH), + "--file={}".format(bp_file_path), "--name={}".format(created_dsl_bp_name), "--description='Test DSL Blueprint; to delete'", ], @@ -78,12 +88,19 @@ def test_create_bp(self): LOG.info("Success") - def test_compile(self): + @pytest.mark.parametrize( + "bp_file_path, json_file_path", + [ + (SIMPLE_BP_FILE_PATH, SIMPLE_BP_OUT_PATH), + (SIMPLE_BP_FILE_PATH2, SIMPLE_BP_OUT_PATH2), + ], + ) + def test_compile(self, bp_file_path, json_file_path): runner = CliRunner() - LOG.info("Compiling bp at {}".format(SIMPLE_BP_FILE_PATH)) + LOG.info("Compiling bp at {}".format(bp_file_path)) result = runner.invoke( - cli, ["-vv", "compile", "bp", "--file={}".format(SIMPLE_BP_FILE_PATH)] + cli, ["-vv", "compile", "bp", "--file={}".format(bp_file_path)] ) if result.exit_code: cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} @@ -109,7 +126,7 @@ def test_compile(self): generated_json["spec"]["resources"]["app_profile_list"][0].pop( "patch_list", None ) - known_json = json.loads(open(SIMPLE_BP_OUT_PATH).read()) + known_json = json.loads(open(json_file_path).read()) # Change dynamic values in known json and remove account_uuid from generated_json for _sd in known_json["spec"]["resources"]["substrate_definition_list"]: diff --git a/tests/simple_blueprint/test_simple_bp_with_downloadable_image.json b/tests/simple_blueprint/test_simple_bp_with_downloadable_image.json new file mode 100644 index 00000000..7b089314 --- /dev/null +++ b/tests/simple_blueprint/test_simple_bp_with_downloadable_image.json @@ -0,0 +1,439 @@ +{ + "metadata": { + "spec_version": 1, + "kind": "blueprint", + "name": "SimpleLampBlueprint", + "categories": {} + }, + "spec": { + "name": "SimpleLampBlueprint", + "description": "Simple blueprint Spec", + "resources": { + "service_definition_list": [ + { + "name": "VmDeploymentService", + "description": "Single VM service", + "port_list": [], + "singleton": false, + "tier": "", + "depends_on_list": [], + "variable_list": [], + "container_spec": {}, + "action_list": [ + { + "name": "action_create", + "description": "", + "type": "system", + "critical": true, + "runbook": { + "name": "Runbook_for_Service_VmDeploymentService_action_create", + "description": "", + "main_task_local_reference": { + "kind": "app_task", + "name": "DAG_Task_for_Service_VmDeploymentService_action_create" + }, + "task_definition_list": [ + { + "name": "DAG_Task_for_Service_VmDeploymentService_action_create", + "description": "", + "type": "DAG", + "target_any_local_reference": { + "kind": "app_service", + "name": "VmDeploymentService" + }, + "attrs": { + "edges": [] + }, + "child_tasks_local_reference_list": [], + "variable_list": [], + "retries": "", + "timeout_secs": "" + } + ], + "variable_list": [] + } + }, + { + "name": "action_delete", + "description": "", + "type": "system", + "critical": true, + "runbook": { + "name": "Runbook_for_Service_VmDeploymentService_action_delete", + "description": "", + "main_task_local_reference": { + "kind": "app_task", + "name": "DAG_Task_for_Service_VmDeploymentService_action_delete" + }, + "task_definition_list": [ + { + "name": "DAG_Task_for_Service_VmDeploymentService_action_delete", + "description": "", + "type": "DAG", + "target_any_local_reference": { + "kind": "app_service", + "name": "VmDeploymentService" + }, + "attrs": { + "edges": [] + }, + "child_tasks_local_reference_list": [], + "variable_list": [], + "retries": "", + "timeout_secs": "" + } + ], + "variable_list": [] + } + }, + { + "name": "action_start", + "description": "", + "type": "system", + "critical": true, + "runbook": { + "name": "Runbook_for_Service_VmDeploymentService_action_start", + "description": "", + "main_task_local_reference": { + "kind": "app_task", + "name": "DAG_Task_for_Service_VmDeploymentService_action_start" + }, + "task_definition_list": [ + { + "name": "DAG_Task_for_Service_VmDeploymentService_action_start", + "description": "", + "type": "DAG", + "target_any_local_reference": { + "kind": "app_service", + "name": "VmDeploymentService" + }, + "attrs": { + "edges": [] + }, + "child_tasks_local_reference_list": [], + "variable_list": [], + "retries": "", + "timeout_secs": "" + } + ], + "variable_list": [] + } + }, + { + "name": "action_stop", + "description": "", + "type": "system", + "critical": true, + "runbook": { + "name": "Runbook_for_Service_VmDeploymentService_action_stop", + "description": "", + "main_task_local_reference": { + "kind": "app_task", + "name": "DAG_Task_for_Service_VmDeploymentService_action_stop" + }, + "task_definition_list": [ + { + "name": "DAG_Task_for_Service_VmDeploymentService_action_stop", + "description": "", + "type": "DAG", + "target_any_local_reference": { + "kind": "app_service", + "name": "VmDeploymentService" + }, + "attrs": { + "edges": [] + }, + "child_tasks_local_reference_list": [], + "variable_list": [], + "retries": "", + "timeout_secs": "" + } + ], + "variable_list": [] + } + }, + { + "name": "action_restart", + "description": "", + "type": "system", + "critical": true, + "runbook": { + "name": "Runbook_for_Service_VmDeploymentService_action_restart", + "description": "", + "main_task_local_reference": { + "kind": "app_task", + "name": "DAG_Task_for_Service_VmDeploymentService_action_restart" + }, + "task_definition_list": [ + { + "name": "DAG_Task_for_Service_VmDeploymentService_action_restart", + "description": "", + "type": "DAG", + "target_any_local_reference": { + "kind": "app_service", + "name": "VmDeploymentService" + }, + "attrs": { + "edges": [] + }, + "child_tasks_local_reference_list": [], + "variable_list": [], + "retries": "", + "timeout_secs": "" + } + ], + "variable_list": [] + } + }, + { + "name": "action_soft_delete", + "description": "", + "type": "system", + "critical": true, + "runbook": { + "name": "Runbook_for_Service_VmDeploymentService_action_soft_delete", + "description": "", + "main_task_local_reference": { + "kind": "app_task", + "name": "DAG_Task_for_Service_VmDeploymentService_action_soft_delete" + }, + "task_definition_list": [ + { + "name": "DAG_Task_for_Service_VmDeploymentService_action_soft_delete", + "description": "", + "type": "DAG", + "target_any_local_reference": { + "kind": "app_service", + "name": "VmDeploymentService" + }, + "attrs": { + "edges": [] + }, + "child_tasks_local_reference_list": [], + "variable_list": [], + "retries": "", + "timeout_secs": "" + } + ], + "variable_list": [] + } + } + ] + } + ], + "package_definition_list": [ + { + "name": "VmDeploymentPackage", + "description": "", + "type": "CUSTOM", + "options": { + "install_runbook": { + "name": "Runbook_for_Package_VmDeploymentPackage_action_install", + "description": "", + "main_task_local_reference": { + "kind": "app_task", + "name": "DAG_Task_for_Package_VmDeploymentPackage_action_install" + }, + "task_definition_list": [ + { + "name": "DAG_Task_for_Package_VmDeploymentPackage_action_install", + "description": "", + "type": "DAG", + "target_any_local_reference": { + "kind": "app_service", + "name": "VmDeploymentService" + }, + "attrs": { + "edges": [] + }, + "child_tasks_local_reference_list": [], + "variable_list": [], + "retries": "", + "timeout_secs": "" + } + ], + "variable_list": [] + }, + "uninstall_runbook": { + "name": "Runbook_for_Package_VmDeploymentPackage_action_uninstall", + "description": "", + "main_task_local_reference": { + "kind": "app_task", + "name": "DAG_Task_for_Package_VmDeploymentPackage_action_uninstall" + }, + "task_definition_list": [ + { + "name": "DAG_Task_for_Package_VmDeploymentPackage_action_uninstall", + "description": "", + "type": "DAG", + "target_any_local_reference": { + "kind": "app_service", + "name": "VmDeploymentService" + }, + "attrs": { + "edges": [] + }, + "child_tasks_local_reference_list": [], + "variable_list": [], + "retries": "", + "timeout_secs": "" + } + ], + "variable_list": [] + } + }, + "service_local_reference_list": [ + { + "kind": "app_service", + "name": "VmDeploymentService" + } + ], + "variable_list": [], + "version": "" + }, + { + "name": "centos_disk", + "description": "", + "type": "SUBSTRATE_IMAGE", + "options": { + "name": "centos_disk", + "description": "", + "resources": { + "image_type": "DISK_IMAGE", + "source_uri": "http://download.nutanix.com/calm/CentOS-7-x86_64-1810.qcow2", + "architecture": "X86_64", + "version": { + "product_version": "1.0", + "product_name": "centos_disk" + }, + "checksum": {} + } + }, + "service_local_reference_list": [], + "variable_list": [], + "action_list": [], + "version": "" + } + ], + "substrate_definition_list": [ + { + "name": "VmDeploymentSubstrate", + "description": "", + "type": "AHV_VM", + "os_type": "Linux", + "create_spec": { + "name": "@@{calm_application_name}@@-@@{calm_array_index}@@", + "resources": { + "boot_config": { + "boot_device": { + "disk_address": { + "adapter_type": "SCSI", + "device_index": 0 + } + } + }, + "disk_list": [ + { + "data_source_reference": { + "kind": "image", + "name": "Centos7", + "uuid": "294fa133-be65-4393-aae8-e3b10a0b4293" + }, + "device_properties": { + "device_type": "DISK", + "disk_address": { + "adapter_type": "SCSI", + "device_index": 0 + } + }, + "disk_size_mib": 0 + } + ], + "memory_size_mib": 1024, + "nic_list": [ + { + "network_function_nic_type": "INGRESS", + "nic_type": "NORMAL_NIC", + "subnet_reference": { + "kind": "subnet", + "uuid": "fbc15051-dfc0-4f37-a4ef-8bd761257f44" + } + } + ], + "num_sockets": 1, + "num_vcpus_per_socket": 1, + "power_state": "ON" + } + }, + "variable_list": [], + "action_list": [], + "readiness_probe": { + "connection_type": "SSH", + "connection_port": 22, + "connection_protocol": "", + "timeout_secs": "", + "delay_secs": "60", + "retries": "5", + "address": "@@{platform.status.resources.nic_list[0].ip_endpoint_list[0].ip}@@", + "disable_readiness_probe": true + }, + "editables": {} + } + ], + "credential_definition_list": [ + { + "name": "default cred", + "description": "", + "type": "PASSWORD", + "username": "root", + "secret": { + "attrs": { + "is_secret_modified": true + }, + "value": "passwd" + }, + "cred_class": "static", + "editables": {} + } + ], + "app_profile_list": [ + { + "name": "SimpleLampBlueprintProfile", + "deployment_create_list": [ + { + "published_service_local_reference_list": [], + "package_local_reference_list": [ + { + "kind": "app_package", + "name": "VmDeploymentPackage" + } + ], + "substrate_local_reference": { + "kind": "app_substrate", + "name": "VmDeploymentSubstrate" + }, + "depends_on_list": [], + "variable_list": [], + "action_list": [], + "min_replicas": "1", + "default_replicas": "", + "max_replicas": "1", + "type": "GREENFIELD", + "name": "VmDeployment", + "options": {}, + "description": "", + "editables": {} + } + ], + "variable_list": [], + "action_list": [] + } + ], + "published_service_definition_list": [], + "default_credential_local_reference": { + "kind": "app_credential", + "name": "default cred" + } + } + } +} \ No newline at end of file diff --git a/tests/simple_blueprint/test_simple_bp_with_downloadable_image.py b/tests/simple_blueprint/test_simple_bp_with_downloadable_image.py new file mode 100644 index 00000000..5f04e3d4 --- /dev/null +++ b/tests/simple_blueprint/test_simple_bp_with_downloadable_image.py @@ -0,0 +1,49 @@ +import json + +from calm.dsl.builtins import SimpleDeployment, SimpleBlueprint +from calm.dsl.builtins import read_provider_spec, Ref, Metadata +from calm.dsl.builtins import read_local_file, basic_cred +from calm.dsl.builtins import vm_disk_package + +DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) + +CRED_USERNAME = read_local_file(".tests/username") +CRED_PASSWORD = read_local_file(".tests/password") + +# OS Image details for VM +CENTOS_IMAGE_SOURCE = "http://download.nutanix.com/calm/CentOS-7-x86_64-1810.qcow2" +CentosPackage = vm_disk_package( + name="centos_disk", + config={"image": {"source": CENTOS_IMAGE_SOURCE}}, +) + +# project constants +PROJECT = DSL_CONFIG["PROJECTS"]["PROJECT1"] +PROJECT_NAME = PROJECT["NAME"] +NTNX_LOCAL_ACCOUNT = DSL_CONFIG["ACCOUNTS"]["NTNX_LOCAL_AZ"] +SUBNET_UUID = NTNX_LOCAL_ACCOUNT["SUBNETS"][0]["UUID"] + +Centos = basic_cred(CRED_USERNAME, CRED_PASSWORD, name="default cred", default=True) + + +class VmDeployment(SimpleDeployment): + """Single VM service""" + + # VM Spec + provider_spec = read_provider_spec("specs/ahv_provider_spec.yaml") + provider_spec.spec["resources"]["nic_list"][0]["subnet_reference"][ + "uuid" + ] = SUBNET_UUID + + +class SimpleLampBlueprint(SimpleBlueprint): + """Simple blueprint Spec""" + + credentials = [Centos] + deployments = [VmDeployment] + packages = [CentosPackage] # add downloadable image packages here + + +class BpMetadata(Metadata): + + project = Ref.Project(PROJECT_NAME) diff --git a/tests/snapshot_restore_blueprint/test_snapshot_restore_blueprint.json b/tests/snapshot_restore_blueprint/test_snapshot_restore_blueprint.json deleted file mode 100644 index 95640941..00000000 --- a/tests/snapshot_restore_blueprint/test_snapshot_restore_blueprint.json +++ /dev/null @@ -1,897 +0,0 @@ -{ - "name": "Hello", - "description": "", - "resources": { - "type": "USER", - "service_definition_list": [ - { - "name": "HelloService", - "description": "Sample Service", - "port_list": [], - "singleton": false, - "tier": "", - "depends_on_list": [], - "variable_list": [], - "action_list": [ - { - "name": "action_create", - "description": "", - "type": "system", - "critical": true, - "runbook": { - "name": "HelloService___create___runbook", - "description": "", - "main_task_local_reference": { - "kind": "app_task", - "name": "HelloService___create___dag" - }, - "task_definition_list": [ - { - "name": "HelloService___create___dag", - "description": "", - "type": "DAG", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "edges": [] - }, - "child_tasks_local_reference_list": [ - { - "kind": "app_task", - "name": "Task1" - } - ], - "variable_list": [], - "retries": "", - "timeout_secs": "" - }, - { - "name": "Task1", - "description": "", - "type": "EXEC", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "script_type": "sh", - "script": "echo 'Service create '" - }, - "child_tasks_local_reference_list": [], - "variable_list": [], - "retries": "", - "timeout_secs": "" - } - ], - "variable_list": [] - } - }, - { - "name": "action_start", - "description": "", - "type": "system", - "critical": true, - "runbook": { - "name": "HelloService___start___runbook", - "description": "", - "main_task_local_reference": { - "kind": "app_task", - "name": "HelloService___start___dag" - }, - "task_definition_list": [ - { - "name": "HelloService___start___dag", - "description": "", - "type": "DAG", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "edges": [] - }, - "child_tasks_local_reference_list": [ - { - "kind": "app_task", - "name": "Task1" - } - ], - "variable_list": [], - "retries": "", - "timeout_secs": "" - }, - { - "name": "Task1", - "description": "", - "type": "EXEC", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "script_type": "sh", - "script": "echo 'Service start '" - }, - "child_tasks_local_reference_list": [], - "variable_list": [], - "retries": "", - "timeout_secs": "" - } - ], - "variable_list": [] - } - }, - { - "name": "action_stop", - "description": "", - "type": "system", - "critical": true, - "runbook": { - "name": "HelloService___stop___runbook", - "description": "", - "main_task_local_reference": { - "kind": "app_task", - "name": "HelloService___stop___dag" - }, - "task_definition_list": [ - { - "name": "HelloService___stop___dag", - "description": "", - "type": "DAG", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "edges": [] - }, - "child_tasks_local_reference_list": [ - { - "kind": "app_task", - "name": "Task1" - } - ], - "variable_list": [], - "retries": "", - "timeout_secs": "" - }, - { - "name": "Task1", - "description": "", - "type": "EXEC", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "script_type": "sh", - "script": "echo 'Service stop '" - }, - "child_tasks_local_reference_list": [], - "variable_list": [], - "retries": "", - "timeout_secs": "" - } - ], - "variable_list": [] - } - }, - { - "name": "action_delete", - "description": "", - "type": "system", - "critical": true, - "runbook": { - "name": "HelloService___delete___runbook", - "description": "", - "main_task_local_reference": { - "kind": "app_task", - "name": "HelloService___delete___dag" - }, - "task_definition_list": [ - { - "name": "HelloService___delete___dag", - "description": "", - "type": "DAG", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "edges": [] - }, - "child_tasks_local_reference_list": [ - { - "kind": "app_task", - "name": "Task1" - } - ], - "variable_list": [], - "retries": "", - "timeout_secs": "" - }, - { - "name": "Task1", - "description": "", - "type": "EXEC", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "script_type": "sh", - "script": "echo 'Service delete'" - }, - "child_tasks_local_reference_list": [], - "variable_list": [], - "retries": "", - "timeout_secs": "" - } - ], - "variable_list": [] - } - }, - { - "name": "action_restart", - "description": "", - "type": "system", - "critical": true, - "runbook": { - "name": "HelloService___restart___runbook", - "description": "", - "main_task_local_reference": { - "kind": "app_task", - "name": "HelloService___restart___dag" - }, - "task_definition_list": [ - { - "name": "HelloService___restart___dag", - "description": "", - "type": "DAG", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "edges": [] - }, - "child_tasks_local_reference_list": [], - "variable_list": [], - "retries": "", - "timeout_secs": "" - } - ], - "variable_list": [] - } - }, - { - "name": "action_soft_delete", - "description": "", - "type": "system", - "critical": true, - "runbook": { - "name": "HelloService___soft_delete___runbook", - "description": "", - "main_task_local_reference": { - "kind": "app_task", - "name": "HelloService___soft_delete___dag" - }, - "task_definition_list": [ - { - "name": "HelloService___soft_delete___dag", - "description": "", - "type": "DAG", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "edges": [] - }, - "child_tasks_local_reference_list": [], - "variable_list": [], - "retries": "", - "timeout_secs": "" - } - ], - "variable_list": [] - } - } - ], - "container_spec": {} - } - ], - "package_definition_list": [ - { - "name": "HelloPackage", - "description": "Sample Package", - "type": "CUSTOM", - "options": { - "install_runbook": { - "name": "HelloPackage___install___runbook", - "description": "", - "main_task_local_reference": { - "kind": "app_task", - "name": "HelloPackage___install___dag" - }, - "task_definition_list": [ - { - "name": "HelloPackage___install___dag", - "description": "", - "type": "DAG", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "edges": [] - }, - "child_tasks_local_reference_list": [ - { - "kind": "app_task", - "name": "Task1" - } - ], - "variable_list": [], - "retries": "", - "timeout_secs": "" - }, - { - "name": "Task1", - "description": "", - "type": "EXEC", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "script_type": "sh", - "script": "echo 'Package install'" - }, - "child_tasks_local_reference_list": [], - "variable_list": [], - "retries": "", - "timeout_secs": "" - } - ], - "variable_list": [] - }, - "uninstall_runbook": { - "name": "HelloPackage___uninstall___runbook", - "description": "", - "main_task_local_reference": { - "kind": "app_task", - "name": "HelloPackage___uninstall___dag" - }, - "task_definition_list": [ - { - "name": "HelloPackage___uninstall___dag", - "description": "", - "type": "DAG", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "edges": [] - }, - "child_tasks_local_reference_list": [ - { - "kind": "app_task", - "name": "Task1" - } - ], - "variable_list": [], - "retries": "", - "timeout_secs": "" - }, - { - "name": "Task1", - "description": "", - "type": "EXEC", - "target_any_local_reference": { - "kind": "app_service", - "name": "HelloService" - }, - "attrs": { - "script_type": "sh", - "script": "echo 'Package uninstall'" - }, - "child_tasks_local_reference_list": [], - "variable_list": [], - "retries": "", - "timeout_secs": "" - } - ], - "variable_list": [] - } - }, - "service_local_reference_list": [ - { - "kind": "app_service", - "name": "HelloService" - } - ], - "variable_list": [], - "version": "" - }, - { - "name": "centos_disk", - "description": "", - "type": "SUBSTRATE_IMAGE", - "options": { - "name": "centos_disk", - "description": "", - "resources": { - "image_type": "DISK_IMAGE", - "source_uri": "http://download.nutanix.com/calm/CentOS-7-x86_64-1810.qcow2", - "architecture": "X86_64", - "version": { - "product_version": "1.0", - "product_name": "centos_disk" - }, - "checksum": {} - } - }, - "service_local_reference_list": [], - "variable_list": [], - "version": "" - } - ], - "published_service_definition_list": [], - "substrate_definition_list": [ - { - "name": "HelloSubstrate", - "description": "AHV VM Substrate", - "type": "AHV_VM", - "os_type": "Linux", - "create_spec": { - "name": "HelloVm", - "categories": { - "AppFamily": "Demo", - "AppType": "Default" - }, - "resources": { - "nic_list": [ - { - "network_function_nic_type": "INGRESS", - "nic_type": "NORMAL_NIC", - "network_function_chain_reference": null, - "mac_address": "", - "ip_endpoint_list": [] - } - ], - "num_vcpus_per_socket": 1, - "num_sockets": 2, - "memory_size_mib": 4096, - "power_state": "ON", - "account_uuid": "", - "gpu_list": [], - "disk_list": [ - { - "data_source_reference": { - "name": "centos_disk", - "kind": "app_package", - "uuid": "" - }, - "device_properties": { - "device_type": "DISK", - "disk_address": { - "device_index": 0, - "adapter_type": "SCSI" - } - }, - "disk_size_mib": 0 - } - ], - "guest_customization": { - "sysprep": null, - "cloud_init": { - "user_data": "#cloud-config\nusers:\n- name: centos\n ssh-authorized-keys:\n - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDN58B138h7FqAKU0EXuL+vbzDluVlkYu1Qr1Y6XBfeRt01DgxssXIIUbGEKAP9P4RrAHqRcCjaMS4wqUUYxOLvT1UCxIGulb3Cfu2L9i+V1NWN/4j3j20elQwhlvYD0sqm5iCHsVhiHlLroapEScBDjo4MUUQ1tTwE/p2gHAhu45Vy28x6N8GC7sh9AvTVIO7HcsQNU/71YSjUmQTmYBIomV0+DCMzYekHWvGjHLiWlP6MIhuiunXTS8PjNj4h9hD36YlCTxWGuX9niX9XeyUAcD7n92ZFiR95WZ7OxX+JruCMMAMo8DiXhhssXZvxzqBmALGPk2ssdDMi2qzBSlRj\n sudo:\n - ALL=(ALL) NOPASSWD:ALL\n" - } - }, - "serial_port_list": [], - "boot_config": { - "boot_device": { - "disk_address": { - "device_index": 0, - "adapter_type": "SCSI" - } - } - } - } - }, - "variable_list": [], - "action_list": [ - { - "name": "pre_action_create", - "description": "", - "type": "fragment", - "critical": false, - "runbook": { - "name": "HelloSubstrate___pre_create___runbook", - "description": "", - "main_task_local_reference": { - "kind": "app_task", - "name": "HelloSubstrate___pre_create___dag" - }, - "task_definition_list": [ - { - "name": "HelloSubstrate___pre_create___dag", - "description": "", - "type": "DAG", - "target_any_local_reference": { - "kind": "app_substrate", - "name": "HelloSubstrate" - }, - "attrs": { - "edges": [] - }, - "child_tasks_local_reference_list": [ - { - "kind": "app_task", - "name": "Task1" - } - ], - "variable_list": [], - "retries": "", - "timeout_secs": "" - }, - { - "name": "Task1", - "description": "", - "type": "EXEC", - "target_any_local_reference": { - "kind": "app_substrate", - "name": "HelloSubstrate" - }, - "attrs": { - "script_type": "static", - "script": "print 'Pre Create task runs before VM is created'" - }, - "child_tasks_local_reference_list": [], - "variable_list": [], - "retries": "", - "timeout_secs": "" - } - ], - "variable_list": [] - } - }, - { - "name": "post_action_delete", - "description": "", - "type": "fragment", - "critical": false, - "runbook": { - "name": "HelloSubstrate___post_delete___runbook", - "description": "", - "main_task_local_reference": { - "kind": "app_task", - "name": "HelloSubstrate___post_delete___dag" - }, - "task_definition_list": [ - { - "name": "HelloSubstrate___post_delete___dag", - "description": "", - "type": "DAG", - "target_any_local_reference": { - "kind": "app_substrate", - "name": "HelloSubstrate" - }, - "attrs": { - "edges": [] - }, - "child_tasks_local_reference_list": [ - { - "kind": "app_task", - "name": "Task1" - } - ], - "variable_list": [], - "retries": "", - "timeout_secs": "" - }, - { - "name": "Task1", - "description": "", - "type": "EXEC", - "target_any_local_reference": { - "kind": "app_substrate", - "name": "HelloSubstrate" - }, - "attrs": { - "script_type": "static", - "script": "print 'Post delete task runs after VM is deleted'" - }, - "child_tasks_local_reference_list": [], - "variable_list": [], - "retries": "", - "timeout_secs": "" - } - ], - "variable_list": [] - } - } - ], - "readiness_probe": { - "connection_type": "SSH", - "connection_port": 22, - "connection_protocol": "", - "timeout_secs": "", - "delay_secs": "60", - "retries": "5", - "address": "@@{platform.status.resources.nic_list[0].ip_endpoint_list[0].ip}@@", - "disable_readiness_probe": true - }, - "editables": {} - } - ], - "credential_definition_list": [ - { - "name": "Centos", - "description": "", - "type": "KEY", - "username": "centos", - "secret": { - "attrs": { - "is_secret_modified": true - }, - "value": "-----BEGIN RSA PRIVATE KEY-----\nMIIEogIBAAKCAQEAzefAdd/IexagClNBF7i/r28w5blZZGLtUK9WOlwX3kbdNQ4M\nbLFyCFGxhCgD/T+EawB6kXAo2jEuMKlFGMTi709VAsSBrpW9wn7ti/YvldTVjf+I\n949tHpUMIZb2A9LKpuYgh7FYYh5S66GqREnAQ46ODFFENbU8BP6doBwIbuOVctvM\nejfBgu7IfQL01SDux3LEDVP+9WEo1JkE5mASKJldPgwjM2HpB1rxoxy4lpT+jCIb\norp100vD4zY+IfYQ9+mJQk8Vhrl/Z4l/V3slAHA+5/dmRYkfeVmezsV/ia7gjDAD\nKPA4l4YbLF2b8c6gZgCxj5NrLHQzItqswUpUYwIDAQABAoIBAALbyKEPohsMsDHE\ngHRXBRjnPHC2rIYyFg+sh3rC7t/lRZ/oGdsOcZdU+a0XLcr/wpOxC4KRDI15k6XA\nS9ccBfctjPAUu09/AQXouN0xu0Jjad2RTRz8DuyW7swD6lbDlTr7meWtsXJB79kZ\nQvcKERMUW7t8YKvgNWeaWKTcOW+/Tw1cXwYhL4+1nUowQHKACUVIojvI1MWbS2eK\nLI9X6OjTwKs8Zh/BR3UGRUA7kvjJBEWAeuVElWkKtHYw3oSnwhQEfFAz5aNp8yMt\nTuf0zJowtwpHEZARgzyWtsNb2AINMpF05kCH707K52PLK7wwF83C1CnbaCXC8D58\nwjbFFoUCgYEA5C2CUsr8qXQcMH1s/MJ0cX5CNNbQhrQqIi74uhoQ4e5NzW0g1dne\nryQNkesCjfw3+NzRmuwqwE9FWuLlVmhyeGVdCANU0F7uawsTI0W5bKghL9lbmCo/\nrn9IqpYPLHlVe3RoG4gv9WMvQKBhBzHR9QwYjBgA93u2utaFbgnKtF8CgYEA5wMD\nvJp9uZgI8f+e+0jjm9F+WnqiQ4/u2fSrSNLWFMY7Mk+ERCls8C1hFgvP68GeJLlf\nfbxh16q6WG4BE/CVub5Us1LlTk5o9fACaWgsY8vQ2Kkw4eeWW2xpBIVDAArN/TlR\n57HEq0taIcWOgqQuAVnSmIrHfSmogZqxnlPm/n0CgYBJEPGchXem45FrTZ6qybHz\n7dRRi0A3AlZDn/rxzQr+7gpn77wF/sA6ITdfNHwa71sVC9Dq5RlDf57b3bBGnSea\nugqyWC6+kseR63fzYY5g5u3Jnraf4VeHDyc7yNWPDkN7RulXd6Kz11D4RuMLJL7w\n0zDBG6ajmTAIYE1V0TjhzQKBgAHXCk6URHAX/+o9iPyjZfy1vHXoCCbVnKdrxXl9\nBGuB8LLD41/PnKiFFALi8TMFhcbxGwubVoQrPfS6W5PXFLo4A+CxxlXLVhavZLWG\nOORKmU1Ae70w5QexY1y/2vrDG0TrEfXweoqKvRsLwcjUmfE0z/KbGF+s10TBAK0s\nKejZAoGAVSR1iD62vawranCbe9+ZyQdRD2oKOYNpwgVzcc5i+NCKjXTatabjN2i+\nbFOsvlBIJyEwbYRdycv9ZXnm65y55l96JtqNM2mQDgRNRMCMlgz0gNyUs0zLgp6X\nIkxCuZ8PUq91AcI83W9pdl4XHXBxmin2RUPGC2n/+lFSSrrrksM=\n-----END RSA PRIVATE KEY-----" - }, - "editables": {} - } - ], - "app_profile_list": [ - { - "name": "HelloProfile", - "deployment_create_list": [ - { - "published_service_local_reference_list": [], - "package_local_reference_list": [ - { - "kind": "app_package", - "name": "HelloPackage" - } - ], - "substrate_local_reference": { - "kind": "app_substrate", - "name": "HelloSubstrate" - }, - "depends_on_list": [], - "variable_list": [], - "action_list": [], - "min_replicas": "1", - "default_replicas": "", - "max_replicas": "1", - "type": "GREENFIELD", - "name": "HelloDeployment", - "options": {}, - "description": "Sample Deployment", - "editables": {} - } - ], - "variable_list": [], - "action_list": [ - { - "name": "Restore_test_r", - "description": "", - "type": "user", - "critical": true, - "runbook": { - "name": "Restore_test_r_runbook", - "description": "", - "main_task_local_reference": { - "kind": "app_task", - "name": "DAG_Task_Restore_test_r" - }, - "task_definition_list": [ - { - "name": "DAG_Task_Restore_test_r", - "description": "", - "type": "DAG", - "attrs": { - "edges": [] - }, - "child_tasks_local_reference_list": [ - { - "kind": "app_task", - "name": "Call_Config_Task_Restore_test_r" - } - ], - "variable_list": [], - "retries": "", - "timeout_secs": "" - }, - { - "name": "Call_Config_Task_Restore_test_r", - "description": "", - "type": "CALL_CONFIG", - "target_any_local_reference": { - "kind": "app_blueprint_deployment", - "name": "HelloDeployment" - }, - "attrs": { - "config_spec_reference": { - "kind": "app_config_spec", - "name": "test_r" - } - }, - "child_tasks_local_reference_list": [], - "variable_list": [], - "retries": "", - "timeout_secs": "" - } - ], - "variable_list": [] - } - }, - { - "name": "Snapshot_test_s", - "description": "", - "type": "user", - "critical": true, - "runbook": { - "name": "Snapshot_test_s_runbook", - "description": "", - "main_task_local_reference": { - "kind": "app_task", - "name": "DAG_Task_Snapshot_test_s" - }, - "task_definition_list": [ - { - "name": "DAG_Task_Snapshot_test_s", - "description": "", - "type": "DAG", - "attrs": { - "edges": [] - }, - "child_tasks_local_reference_list": [ - { - "kind": "app_task", - "name": "Call_Config_Task_Snapshot_test_s" - } - ], - "variable_list": [], - "retries": "", - "timeout_secs": "" - }, - { - "name": "Call_Config_Task_Snapshot_test_s", - "description": "", - "type": "CALL_CONFIG", - "target_any_local_reference": { - "kind": "app_blueprint_deployment", - "name": "HelloDeployment" - }, - "attrs": { - "config_spec_reference": { - "kind": "app_config_spec", - "name": "test_s" - } - }, - "child_tasks_local_reference_list": [], - "variable_list": [], - "retries": "", - "timeout_secs": "" - } - ], - "variable_list": [] - } - } - ], - "snapshot_config_list": [ - { - "name": "test_s", - "description": "", - "type": "AHV_SNAPSHOT", - "variable_list": [ - { - "name": "snapshot_name", - "description": "", - "type": "LOCAL", - "label": "", - "attrs": {}, - "val_type": "STRING", - "value": "test_s", - "data_type": "BASE", - "editables": { - "value": true - }, - "is_hidden": false, - "is_mandatory": true - }, - { - "name": "snapshot_type", - "description": "", - "type": "LOCAL", - "label": "", - "attrs": {}, - "val_type": "STRING", - "value": "CRASH_CONSISTENT", - "data_type": "BASE", - "editables": { - "value": true - }, - "is_hidden": false, - "is_mandatory": true - } - ], - "config_reference_list": [ - { - "kind": "app_config_spec", - "name": "test_r" - } - ], - "attrs_list": [ - { - "target_any_local_reference": { - "kind": "app_blueprint_deployment", - "name": "HelloDeployment" - }, - "snapshot_location_type": "LOCAL", - "num_of_replicas": "ONE" - } - ] - } - ], - "restore_config_list": [ - { - "name": "test_r", - "description": "", - "type": "AHV_RESTORE", - "variable_list": [ - { - "name": "snapshot_uuids", - "description": "", - "type": "LOCAL", - "label": "", - "attrs": {}, - "val_type": "STRING", - "value": "", - "data_type": "BASE", - "editables": { - "value": true - }, - "is_hidden": false, - "is_mandatory": true - }, - { - "name": "delete_vm_post_restore", - "description": "", - "type": "LOCAL", - "label": "", - "attrs": {}, - "val_type": "STRING", - "value": "false", - "data_type": "BASE", - "editables": { - "value": true - }, - "is_hidden": false, - "is_mandatory": true - } - ], - "config_reference_list": [], - "attrs_list": [ - { - "target_any_local_reference": { - "kind": "app_blueprint_deployment", - "name": "HelloDeployment" - }, - "delete_vm_post_restore": false, - "snapshot_location_type": "LOCAL" - } - ] - } - ] - } - ], - "default_credential_local_reference": { - "kind": "app_credential", - "name": "Centos" - } - } -} diff --git a/tests/snapshot_restore_blueprint/test_snapshot_restore_blueprint.py b/tests/snapshot_restore_blueprint/test_snapshot_restore_blueprint.py deleted file mode 100644 index 629d01cb..00000000 --- a/tests/snapshot_restore_blueprint/test_snapshot_restore_blueprint.py +++ /dev/null @@ -1,157 +0,0 @@ -import json -import os - -from calm.dsl.builtins import Service, Package, Substrate -from calm.dsl.builtins import Deployment, Profile, Blueprint -from calm.dsl.builtins import CalmTask as Task -from calm.dsl.builtins import action, ref, basic_cred -from calm.dsl.builtins import read_local_file -from calm.dsl.builtins import vm_disk_package, AhvVmDisk, AhvVmNic -from calm.dsl.builtins import AhvVmGC, AhvVmResources, AhvVm -from calm.dsl.builtins import ref, Ref -from calm.dsl.builtins import AppProtection - -DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) -NETWORK1 = DSL_CONFIG["AHV"]["NETWORK"]["VLAN1211"] - -# SSH Credentials -CENTOS_USER = "centos" -CENTOS_KEY = read_local_file(os.path.join(".tests", "keys", "centos")) -CENTOS_PUBLIC_KEY = read_local_file(os.path.join(".tests", "keys", "centos_pub")) -CentosCred = basic_cred( - CENTOS_USER, - CENTOS_KEY, - name="Centos", - type="KEY", - default=True, -) - -# OS Image details for VM -CENTOS_IMAGE_SOURCE = "http://download.nutanix.com/calm/CentOS-7-x86_64-1810.qcow2" -CentosPackage = vm_disk_package( - name="centos_disk", - config={"image": {"source": CENTOS_IMAGE_SOURCE}}, -) - - -class HelloService(Service): - """Sample Service""" - - # Service Actions - @action - def __create__(): - # Step 1 - Task.Exec.ssh(name="Task1", script="echo 'Service create '") - - @action - def __start__(): - # Step 1 - Task.Exec.ssh(name="Task1", script="echo 'Service start '") - - @action - def __stop__(): - # Step 1 - Task.Exec.ssh(name="Task1", script="echo 'Service stop '") - - @action - def __delete__(): - # Step 1 - Task.Exec.ssh(name="Task1", script="echo 'Service delete'") - - -class HelloPackage(Package): - """Sample Package""" - - # Services created by installing this Package - services = [ref(HelloService)] - - # Package Actions - @action - def __install__(): - - # Step 1 - Task.Exec.ssh(name="Task1", script="echo 'Package install'") - - @action - def __uninstall__(): - - # Step 1 - Task.Exec.ssh(name="Task1", script="echo 'Package uninstall'") - - -class HelloVmResources(AhvVmResources): - - memory = 4 - vCPUs = 2 - cores_per_vCPU = 1 - disks = [ - AhvVmDisk.Disk.Scsi.cloneFromVMDiskPackage(CentosPackage, bootable=True), - ] - nics = [AhvVmNic(subnet=NETWORK1)] - - guest_customization = AhvVmGC.CloudInit( - config={ - "users": [ - { - "name": CENTOS_USER, - "ssh-authorized-keys": [CENTOS_PUBLIC_KEY], - "sudo": ["ALL=(ALL) NOPASSWD:ALL"], - } - ] - } - ) - - -class HelloVm(AhvVm): - - resources = HelloVmResources - categories = {"AppFamily": "Demo", "AppType": "Default"} - - -class HelloSubstrate(Substrate): - """AHV VM Substrate""" - - provider_type = "AHV_VM" - provider_spec = HelloVm - - # Substrate Actions - @action - def __pre_create__(): - - # Step 1 - Task.Exec.escript( - name="Task1", script="print 'Pre Create task runs before VM is created'" - ) - - @action - def __post_delete__(): - - # Step 1 - Task.Exec.escript( - name="Task1", script="print 'Post delete task runs after VM is deleted'" - ) - - -class HelloDeployment(Deployment): - """Sample Deployment""" - - packages = [ref(HelloPackage)] - substrate = ref(HelloSubstrate) - - -class HelloProfile(Profile): - - # Deployments under this profile - deployments = [HelloDeployment] - - snapshot_configs = [AppProtection.SnapshotConfig("test_s")] - restore_configs = [AppProtection.RestoreConfig("test_r")] - - -class Hello(Blueprint): - - credentials = [CentosCred] - services = [HelloService] - packages = [HelloPackage, CentosPackage] - substrates = [HelloSubstrate] - profiles = [HelloProfile] diff --git a/tests/snapshot_restore_blueprint/test_snapshot_restore_bp_create.py b/tests/snapshot_restore_blueprint/test_snapshot_restore_bp_create.py deleted file mode 100644 index 11ac04e9..00000000 --- a/tests/snapshot_restore_blueprint/test_snapshot_restore_bp_create.py +++ /dev/null @@ -1,127 +0,0 @@ -import pytest -import sys -import json -import time -import traceback -from click.testing import CliRunner -from distutils.version import LooseVersion as LV - -from calm.dsl.store import Version -from calm.dsl.cli import main as cli -from calm.dsl.log import get_logging_handle - -# Setting the recursion limit to max for -sys.setrecursionlimit(100000) - -LOG = get_logging_handle(__name__) - -BP_FILE_PATH = "tests/snapshot_restore_blueprint/test_snapshot_restore_blueprint.py" -BP_OUT_PATH = "tests/snapshot_restore_blueprint/test_snapshot_restore_blueprint.json" - -# calm_version -CALM_VERSION = Version.get_version("Calm") - - -@pytest.mark.skipif( - LV(CALM_VERSION) < LV("3.3.0"), - reason="Snapshot Restore feature is available from Calm v3.3.0", -) -class TestSnapshotRestoreBlueprint: - def setup_method(self): - """Method to instantiate to created_bp_list""" - - self.created_bp_list = [] - - def teardown_method(self): - """Method to delete creates bps and apps during tests""" - - for bp_name in self.created_bp_list: - LOG.info("Deleting Blueprint {}".format(bp_name)) - runner = CliRunner() - result = runner.invoke(cli, ["delete", "bp", bp_name]) - assert result.exit_code == 0 - - self.created_bp_list = [] - - def test_create_bp(self): - - runner = CliRunner() - created_dsl_bp_name = "Test_Snapshot_Restore_DSL_BP_{}".format(int(time.time())) - LOG.info("Creating Bp {}".format(created_dsl_bp_name)) - result = runner.invoke( - cli, - [ - "create", - "bp", - "--file={}".format(BP_FILE_PATH), - "--name={}".format(created_dsl_bp_name), - "--description='Test DSL Blueprint; to delete'", - ], - ) - - self.created_bp_list.append(created_dsl_bp_name) - if result.exit_code: - cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} - LOG.debug( - "Cli Response: {}".format( - json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) - ) - ) - LOG.debug( - "Traceback: \n{}".format( - "".join(traceback.format_tb(result.exc_info[2])) - ) - ) - pytest.fail("BP creation from python file failed") - - assert '"state": "ACTIVE"' in result.output - - LOG.info("Success") - - def test_compile(self): - - runner = CliRunner() - LOG.info("Compiling bp at {}".format(BP_FILE_PATH)) - result = runner.invoke( - cli, ["-vv", "compile", "bp", "--file={}".format(BP_FILE_PATH)] - ) - if result.exit_code: - cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} - LOG.debug( - "Cli Response: {}".format( - json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) - ) - ) - LOG.debug( - "Traceback: \n{}".format( - "".join(traceback.format_tb(result.exc_info[2])) - ) - ) - pytest.fail("BP compile command failed") - - generated_json = json.loads(result.output) - generated_json = generated_json["spec"] - generated_json["resources"].pop("client_attrs", None) - generated_json["resources"]["substrate_definition_list"][0]["create_spec"][ - "resources" - ]["nic_list"][0].pop("subnet_reference", None) - generated_json["resources"]["app_profile_list"][0].pop("patch_list", None) - - # Assert whether account_uuid is present in generated_json - sub_account_uuid = generated_json["resources"]["substrate_definition_list"][0][ - "create_spec" - ]["resources"]["account_uuid"] - assert sub_account_uuid != "" - - # Replace correct account uuid in known_json - known_json = json.loads(open(BP_OUT_PATH).read()) - known_json["resources"]["substrate_definition_list"][0]["create_spec"][ - "resources" - ]["account_uuid"] = sub_account_uuid - - # For versions > 3.4, cred_class is needed to cred-payload - if LV(CALM_VERSION) >= LV("3.4.0"): - for cred in known_json["resources"]["credential_definition_list"]: - cred["cred_class"] = "static" - - assert sorted(known_json.items()) == sorted(generated_json.items()) diff --git a/tests/testprep.py b/tests/testprep.py index db49c2d9..86b21051 100644 --- a/tests/testprep.py +++ b/tests/testprep.py @@ -11,6 +11,7 @@ from calm.dsl.constants import STRATOS from calm.dsl.log import get_logging_handle from calm.dsl.tools.utils import make_file_dir +from calm.dsl.builtins.models.helper.common import get_project LOG = get_logging_handle(__name__) VPC_LINUX_EP_PATH = "tests/tunnel_endpoints/linux_endpoint.py" @@ -406,6 +407,55 @@ def add_vpc_details(config): add_tunnel_details(config) +def add_protection_policy_details(config, config_header, project_name): + """Adds protection policy details of vmware/ahv snapshot projects for tests""" + + client = get_api_client() + LOG.info("Fetching protection policies") + params = {"length": 20, "offset": 0} + if project_name: + project = get_project(project_name) + params["filter"] = "project_reference=={}".format(project["metadata"]["uuid"]) + res, err = client.app_protection_policy.list(params) + if err: + LOG.error(err) + return + res = res.json()["entities"] + if not res: + LOG.warning("No protection policy found for project {}".format(project_name)) + return + + config[config_header]["PROJECT1"]["SNAPSHOT_POLICY"] = [] + for entity in res: + name = entity["status"]["name"] + for rule in entity["status"]["resources"]["app_protection_rule_list"]: + rule_name = rule["name"] + snapshot_policy_details = {"NAME": name, "RULE": rule_name} + config[config_header]["PROJECT1"]["SNAPSHOT_POLICY"].append( + snapshot_policy_details + ) + + +def add_vmw_snapshot_policy(config): + """Adds vmware snapshot policy project if it exists for tests""" + + project_name = "test_vmw_snapshot_policy_project" + project_exists = check_project_exists(project_name) + if project_exists: + add_project_details(config, "VMW_SNAPSHOT_PROJECTS", project_name) + add_protection_policy_details(config, "VMW_SNAPSHOT_PROJECTS", project_name) + + +def add_ahv_snapshot_policy(config): + """Adds ahv snapshot policy project if it exists for tests""" + + project_name = "test_snapshot_policy_project" + project_exists = check_project_exists(project_name) + if project_exists: + add_project_details(config, "AHV_SNAPSHOT_PROJECTS", project_name) + add_protection_policy_details(config, "AHV_SNAPSHOT_PROJECTS", project_name) + + def add_rerun_report_portal(config): config["reportportal"] = { "run_name": "runname", @@ -542,6 +592,8 @@ def add_provider_constants(config): add_directory_service_user_groups(config) add_project_details(config) add_vpc_details(config) +add_ahv_snapshot_policy(config) +add_vmw_snapshot_policy(config) add_rerun_report_portal(config) add_vpc_endpoints(config) add_approval_details(config) diff --git a/tests/unit/jsons/action_with_endpoint.json b/tests/unit/jsons/action_with_endpoint.json new file mode 100644 index 00000000..9ec10104 --- /dev/null +++ b/tests/unit/jsons/action_with_endpoint.json @@ -0,0 +1,56 @@ + { + "critical": false, + "type": "user", + "uuid": "bbcf9a71-54bf-8fcf-3c3a-29f57fc80a3a", + "runbook": { + "main_task_local_reference": { + "kind": "app_task", + "uuid": "876c5e1c-91be-2e8a-feca-b99d7799365c" + }, + "task_definition_list": [ + { + "target_any_local_reference": { + "kind": "app_service", + "uuid": "647a0917-e340-76a0-7fd4-9a8c5f1d8262" + }, + "uuid": "876c5e1c-91be-2e8a-feca-b99d7799365c", + "child_tasks_local_reference_list": [ + { + "kind": "app_task", + "uuid": "d3604aea-d534-42ad-7629-0ff7e851a038" + } + ], + "attrs": { + "edges": [] + }, + "type": "DAG", + "variable_list": [], + "name": "2902ea52_dag" + }, + { + "target_any_local_reference": { + "kind": "app_service", + "uuid": "647a0917-e340-76a0-7fd4-9a8c5f1d8262" + }, + "uuid": "d3604aea-d534-42ad-7629-0ff7e851a038", + "exec_target_reference": { + "kind": "app_endpoint", + "uuid": "c1b409e3-3135-42d6-615c-2d4c0361f43b", + "name": "ep2" + }, + "child_tasks_local_reference_list": [], + "attrs": { + "script_type": "sh", + "script": "echo \"Hello\"" + }, + "type": "EXEC", + "variable_list": [], + "name": "Task1" + } + ], + "name": "ed5765cd_runbook", + "variable_list": [], + "uuid": "5e0b93bd-b871-cafb-8750-15378cd2d18d" + }, + "name": "action1" + } diff --git a/tests/unit/jsons/environment.json b/tests/unit/jsons/environment.json new file mode 100644 index 00000000..8c85dc84 --- /dev/null +++ b/tests/unit/jsons/environment.json @@ -0,0 +1,242 @@ +{ + "status": { + "description": "", + "uuid": "", + "state": "ACTIVE", + "message_list": [], + "resources": { + "infra_inclusion_list": [ + { + "subnet_references": [ + { "uuid": "" } + ], + "type": "nutanix_pc", + "cluster_references": [ + { "uuid": "" } + ], + "account_reference": { + "kind": "account", + "name": "", + "uuid": "" + }, + "default_subnet_reference": { + "uuid": "" + } + } + ], + "substrate_definition_list": [ + { + "description": "", + "action_list": [], + "message_list": [], + "uuid": "", + "state": "ACTIVE", + "readiness_probe": { + "connection_type": "SSH", + "retries": "5", + "connection_protocol": "", + "disable_readiness_probe": true, + "address": "", + "delay_secs": "0", + "connection_port": 22 + }, + "editables": {}, + "os_type": "Linux", + "type": "AHV_VM", + "create_spec": { + "name": "vm-@@{calm_array_index}@@-@@{calm_time}@@", + "categories": {}, + "availability_zone_reference": null, + "backup_policy": null, + "type": "", + "cluster_reference": { + "kind": "cluster", + "type": "", + "name": "", + "uuid": "" + }, + "resources": { + "nic_list": [], + "parent_reference": null, + "guest_tools": null, + "num_vcpus_per_socket": 1, + "num_sockets": 1, + "serial_port_list": [], + "gpu_list": [], + "memory_size_mib": 1024, + "power_state": "ON", + "hardware_clock_timezone": "", + "guest_customization": null, + "type": "", + "account_uuid": "", + "boot_config": { + "boot_device": { + "type": "", + "disk_address": { + "adapter_type": "SCSI", + "device_index": 0, + "type": "" + } + }, + "type": "", + "boot_type": "LEGACY", + "mac_address": "" + }, + "disk_list": [ + { + "data_source_reference": { + "kind": "image", + "type": "", + "name": "Centos7HadoopMaster", + "uuid": "" + }, + "type": "", + "disk_size_mib": 0, + "volume_group_reference": null, + "device_properties": { + "type": "", + "device_type": "DISK", + "disk_address": { + "adapter_type": "SCSI", + "device_index": 0, + "type": "" + } + } + } + ] + } + }, + "variable_list": [], + "name": "Untitled" + } + ], + "credential_definition_list": [ + { + "username": "admin", + "description": "", + "state": "ACTIVE", + "message_list": [], + "uuid": "", + "secret": { + "attrs": { "is_secret_modified": false, "secret_reference": {} } + }, + "editables": {}, + "cred_class": "static", + "type": "PASSWORD", + "name": "test_creds" + } + ] + }, + "name": "test_decompile_env" + }, + "spec": { + "name": "test_decompile_env", + "resources": { + "infra_inclusion_list": [ + { + "cluster_references": [ + { "uuid": "" } + ], + "default_subnet_reference": { + "uuid": "" + }, + "account_reference": { + "kind": "account", + "uuid": "" + }, + "vpc_references": [], + "subnet_references": [ + { "uuid": "" } + ], + "type": "nutanix_pc" + } + ], + "substrate_definition_list": [ + { + "uuid": "", + "action_list": [], + "readiness_probe": { + "connection_type": "SSH", + "retries": "5", + "connection_protocol": "", + "connection_port": 22, + "disable_readiness_probe": true + }, + "os_type": "Linux", + "type": "AHV_VM", + "create_spec": { + "resources": { + "nic_list": [], + "power_state": "ON", + "num_vcpus_per_socket": 1, + "num_sockets": 1, + "gpu_list": [], + "memory_size_mib": 1024, + "boot_config": { + "boot_device": { + "disk_address": { "device_index": 0, "adapter_type": "SCSI" } + }, + "boot_type": "LEGACY" + }, + "account_uuid": "", + "disk_list": [ + { + "data_source_reference": { + "kind": "image", + "name": "Centos7HadoopMaster", + "uuid": "" + }, + "device_properties": { + "disk_address": { + "device_index": 0, + "adapter_type": "SCSI" + }, + "device_type": "DISK" + } + } + ] + }, + "name": "vm-@@{calm_array_index}@@-@@{calm_time}@@", + "categories": {}, + "cluster_reference": { + "name": "", + "uuid": "" + } + }, + "variable_list": [], + "name": "Untitled" + } + ], + "credential_definition_list": [ + { + "username": "admin", + "uuid": "", + "secret": { "attrs": { "is_secret_modified": false } }, + "cred_class": "static", + "type": "PASSWORD", + "name": "test_creds" + } + ] + }, + "description": "" + }, + "api_version": "3.0", + "metadata": { + "owner_reference": { + "kind": "user", + "uuid": "", + "name": "admin" + }, + "kind": "environment", + "uuid": "", + "project_reference": { + "kind": "project", + "name": "test_dsl_decompile", + "uuid": "" + }, + "spec_version": 2, + "name": "test_decompile_env", + "__name__": "test_decompile_env", + "__doc__": "" + } +} diff --git a/tests/unit/jsons/project.json b/tests/unit/jsons/project.json new file mode 100644 index 00000000..2e3d234b --- /dev/null +++ b/tests/unit/jsons/project.json @@ -0,0 +1,154 @@ +{ + "status": { + "state": "COMPLETE", + "execution_context": { + "task_uuid": [""] + }, + "name": "test_dsl_decompile", + "resources": { + "account_reference_list": [ + { "kind": "account", "uuid": "" } + ], + "resource_domain": { "resources": [] }, + "directory_reference_list": [], + "enable_directory_and_identity_provider_shortlist": false, + "identity_providers_reference_list": [], + "user_reference_list": [ + { + "kind": "user", + "name": "", + "uuid": "" + } + ], + "default_subnet_reference": { + "kind": "subnet", + "uuid": "" + }, + "environment_reference_list": [ + { + "kind": "environment", + "uuid": "" + }, + { + "kind": "environment", + "uuid": "" + }, + { + "kind": "environment", + "uuid": "" + }, + { + "kind": "environment", + "uuid": "" + }, + { + "kind": "environment", + "uuid": "" + } + ], + "is_default": false, + "tunnel_reference_list": [], + "external_user_group_reference_list": [], + "default_environment_reference": { + "kind": "environment", + "uuid": "" + }, + "subnet_reference_list": [ + { + "kind": "subnet", + "name": "", + "uuid": "" + } + ], + "cluster_reference_list": [ + { "kind": "cluster", "uuid": "" } + ], + "vpc_reference_list": [], + "external_network_list": [] + }, + "description": "" + }, + "spec": { + "name": "test_dsl_decompile", + "resources": { + "account_reference_list": [ + { "kind": "account", "uuid": "" } + ], + "resource_domain": { "resources": [] }, + "directory_reference_list": [], + "enable_directory_and_identity_provider_shortlist": false, + "default_subnet_reference": { + "kind": "subnet", + "uuid": "" + }, + "user_reference_list": [ + { + "kind": "user", + "name": "", + "uuid": "" + } + ], + "identity_providers_reference_list": [], + "environment_reference_list": [ + { + "kind": "environment", + "uuid": "" + }, + { + "kind": "environment", + "uuid": "" + }, + { + "kind": "environment", + "uuid": "" + }, + { + "kind": "environment", + "uuid": "" + }, + { + "kind": "environment", + "uuid": "" + } + ], + "tunnel_reference_list": [], + "external_user_group_reference_list": [], + "cluster_reference_list": [ + { "kind": "cluster", "uuid": "" } + ], + "subnet_reference_list": [ + { + "kind": "subnet", + "name": "", + "uuid": "" + } + ], + "vpc_reference_list": [], + "external_network_list": [], + "default_environment_reference": { + "kind": "environment", + "uuid": "" + } + } + }, + "api_version": "3.1", + "metadata": { + "last_update_time": "2023-11-24T11:48:20Z", + "kind": "project", + "uuid": "", + "project_reference": { + "kind": "project", + "name": "test_dsl_decompile", + "uuid": "" + }, + "creation_time": "2023-11-03T11:00:05Z", + "spec_version": 14, + "categories_mapping": {}, + "owner_reference": { + "kind": "user", + "uuid": "", + "name": "admin" + }, + "categories": {} + } +} diff --git a/tests/unit/jsons/runbook_http_var.json b/tests/unit/jsons/runbook_http_var.json new file mode 100644 index 00000000..8bf2e5a1 --- /dev/null +++ b/tests/unit/jsons/runbook_http_var.json @@ -0,0 +1,292 @@ +{ + "status": { + "description": "", + "message_list": [], + "name": "test_var_decompile", + "state": "ACTIVE", + "critical": false, + "attrs": {}, + "resources": { + "endpoints_information": [], + "endpoint_definition_list": [], + "credential_definition_list": [ + { + "username": "admin", + "description": "", + "state": "ACTIVE", + "message_list": [], + "uuid": "4e0f475c-acf9-5e00-e073-d03e3b7770b6", + "secret": { + "attrs": { + "is_secret_modified": false, + "secret_reference": {} + } + }, + "editables": {}, + "cred_class": "static", + "type": "PASSWORD", + "name": "c111" + } + ], + "runbook": { + "task_definition_list": [ + { + "retries": "0", + "description": "", + "type": "DAG", + "uuid": "9c21fd89-d2c7-084a-a1b5-f40ee8fc275f", + "child_tasks_local_reference_list": [ + { + "kind": "app_task", + "name": "Task 1", + "uuid": "2f3d09c3-7100-a908-b4ac-66c2d76c4315" + } + ], + "state": "ACTIVE", + "attrs": { + "edges": [], + "type": "" + }, + "timeout_secs": "0", + "inherit_target": false, + "message_list": [], + "variable_list": [], + "name": "a8d5a418_dag" + }, + { + "target_any_local_reference": { + "kind": "app_endpoint", + "name": "ep2", + "uuid": "c1b409e3-3135-42d6-615c-2d4c0361f43b" + }, + "retries": "0", + "description": "", + "type": "EXEC", + "uuid": "2f3d09c3-7100-a908-b4ac-66c2d76c4315", + "child_tasks_local_reference_list": [], + "state": "ACTIVE", + "attrs": { + "script_type": "sh", + "type": "", + "command_line_args": "", + "exit_status": [], + "script": "echo \"hello\"" + }, + "timeout_secs": "0", + "inherit_target": false, + "message_list": [], + "variable_list": [], + "name": "Task 1" + } + ], + "description": "", + "message_list": [], + "uuid": "ed37a255-f657-8a76-affc-672903587318", + "state": "ACTIVE", + "variable_list": [ + { + "regex": {}, + "val_type": "STRING", + "is_mandatory": false, + "description": "", + "data_type": "BASE", + "message_list": [], + "uuid": "4689ecc1-3a79-b4da-9616-0904187e19f8", + "value": "", + "label": "", + "state": "ACTIVE", + "attrs": { + "type": "" + }, + "editables": { + "value": true + }, + "is_hidden": false, + "type": "HTTP_LOCAL", + "options": { + "type": "HTTP", + "attrs": { + "expected_response_params": [ + { + "status": "SUCCESS", + "code": 200, + "type": "" + } + ], + "request_body": "", + "retry_count": 1, + "url": "/a/b/c", + "response_paths": { + "http_dyn_var_get": "$.x.y" + }, + "retry_interval": 1, + "proxy_type": "", + "headers": [], + "authentication": { + "username": "admin", + "password": { + "attrs": { + "is_secret_modified": false, + "secret_reference": {} + } + }, + "type": "basic" + }, + "tls_verify": false, + "content_type": "application/json", + "connection_timeout": 120, + "type": "HTTP", + "method": "GET" + } + }, + "name": "http_dyn_var_get" + } + ], + "main_task_local_reference": { + "kind": "app_task", + "name": "a8d5a418_dag", + "uuid": "9c21fd89-d2c7-084a-a1b5-f40ee8fc275f" + }, + "name": "3d01474d_runbook" + }, + "type": "workflow", + "default_target_reference": { + "kind": "app_endpoint", + "name": "ep2", + "uuid": "c1b409e3-3135-42d6-615c-2d4c0361f43b" + } + } + }, + "spec": { + "name": "test_var_decompile", + "resources": { + "runbook": { + "main_task_local_reference": { + "kind": "app_task", + "uuid": "9c21fd89-d2c7-084a-a1b5-f40ee8fc275f" + }, + "task_definition_list": [ + { + "attrs": { + "edges": [] + }, + "type": "DAG", + "name": "a8d5a418_dag", + "child_tasks_local_reference_list": [ + { + "kind": "app_task", + "uuid": "2f3d09c3-7100-a908-b4ac-66c2d76c4315" + } + ], + "uuid": "9c21fd89-d2c7-084a-a1b5-f40ee8fc275f" + }, + { + "target_any_local_reference": { + "kind": "app_endpoint", + "uuid": "c1b409e3-3135-42d6-615c-2d4c0361f43b", + "name": "ep2" + }, + "uuid": "2f3d09c3-7100-a908-b4ac-66c2d76c4315", + "inherit_target": false, + "child_tasks_local_reference_list": [], + "attrs": { + "script_type": "sh", + "script": "echo \"hello\"" + }, + "type": "EXEC", + "variable_list": [], + "name": "Task 1" + } + ], + "name": "3d01474d_runbook", + "variable_list": [ + { + "val_type": "STRING", + "name": "http_dyn_var_get", + "data_type": "BASE", + "uuid": "4689ecc1-3a79-b4da-9616-0904187e19f8", + "label": "", + "attrs": {}, + "editables": { + "value": true + }, + "is_hidden": false, + "type": "HTTP_LOCAL", + "options": { + "type": "HTTP", + "attrs": { + "expected_response_params": [ + { + "status": "SUCCESS", + "code": "200" + } + ], + "headers": [], + "url": "/a/b/c", + "response_paths": { + "http_dyn_var_get": "$.x.y" + }, + "retry_interval": 1, + "retry_count": 1, + "authentication": { + "username": "admin", + "password": { + "attrs": { + "is_secret_modified": false + } + }, + "type": "basic" + }, + "content_type": "application/json", + "connection_timeout": 120, + "type": "HTTP", + "method": "GET" + } + }, + "description": "" + } + ], + "uuid": "ed37a255-f657-8a76-affc-672903587318" + }, + "endpoint_definition_list": [], + "credential_definition_list": [ + { + "username": "admin", + "uuid": "4e0f475c-acf9-5e00-e073-d03e3b7770b6", + "secret": { + "attrs": { + "is_secret_modified": false + } + }, + "cred_class": "static", + "type": "PASSWORD", + "name": "c111" + } + ], + "default_target_reference": { + "kind": "app_endpoint", + "uuid": "c1b409e3-3135-42d6-615c-2d4c0361f43b", + "name": "ep2" + } + } + }, + "api_version": "3.0", + "metadata": { + "last_update_time": "1696930012521107", + "owner_reference": { + "kind": "user", + "uuid": "00000000-0000-0000-0000-000000000000", + "name": "admin" + }, + "kind": "runbook", + "uuid": "16d5478a-ce86-5393-7f6f-b6c5abd94b25", + "project_reference": { + "kind": "project", + "name": "default", + "uuid": "7702830e-e0c0-430b-91ed-f8259c5d72fe" + }, + "spec_version": 7, + "creation_time": "1695970830674979", + "name": "test_var_decompile" + } +} \ No newline at end of file diff --git a/tests/unit/test_action_with_endpoint_decompile.py b/tests/unit/test_action_with_endpoint_decompile.py new file mode 100644 index 00000000..97c14b93 --- /dev/null +++ b/tests/unit/test_action_with_endpoint_decompile.py @@ -0,0 +1,18 @@ +import os +import json +import shutil +from calm.dsl.builtins import ActionType +from calm.dsl.decompile.file_handler import init_bp_dir, get_bp_dir +from calm.dsl.decompile.action import render_action_template + + +def test_action_with_endpoint_decompile(): + _, _, _, scripts_dir = init_bp_dir("./tests/test_action_with_endpoint") + dir_path = os.path.dirname(os.path.realpath(__file__)) + file_path = os.path.join(dir_path, "./jsons/action_with_endpoint.json") + bp_dict = json.loads(open(file_path).read()) + cls = ActionType.decompile(bp_dict) + assert "target=ref(ep2)" in render_action_template( + cls + ), "expected endpoint ep2 to be decompiled in task" + shutil.rmtree(scripts_dir) diff --git a/tests/unit/test_decompile_http_var_with_basic_auth.py b/tests/unit/test_decompile_http_var_with_basic_auth.py new file mode 100644 index 00000000..3c2c44d2 --- /dev/null +++ b/tests/unit/test_decompile_http_var_with_basic_auth.py @@ -0,0 +1,34 @@ +import os +import json +import shutil +from calm.dsl.runbooks import RunbookType +from calm.dsl.decompile.variable import render_variable_template +from calm.dsl.decompile.file_handler import init_runbook_dir + + +def test_runbook_var_with_basic_creds(): + _, _, script_dir = init_runbook_dir( + "./tests/test_runbook_http_var_with_basic_auth/" + ) + dir_path = os.path.dirname(os.path.realpath(__file__)) + file_path = os.path.join(dir_path, "jsons/runbook_http_var.json") + rb_dict = json.loads(open(file_path).read()) + rb_cls = RunbookType.decompile(rb_dict["status"]["resources"]["runbook"]) + credentials_list = [] + rendered_credential_list = [] + variables = [] + for variable in rb_cls.variables: + variables.append( + render_variable_template( + variable, + "", + credentials_list=credentials_list, + rendered_credential_list=rendered_credential_list, + ) + ) + assert len(credentials_list) == 1, "Basic auth in variable was not decompiled" + shutil.rmtree(script_dir) + + +if __name__ == "__main__": + test_runbook_var_with_basic_creds() diff --git a/tests/unit/test_environment_decompile.py b/tests/unit/test_environment_decompile.py new file mode 100644 index 00000000..f22ddc0c --- /dev/null +++ b/tests/unit/test_environment_decompile.py @@ -0,0 +1,68 @@ +import os +import json +import shutil +from calm.dsl.builtins.models.environment import EnvironmentType +from calm.dsl.builtins import read_local_file +from calm.dsl.decompile.file_handler import init_environment_dir, get_environment_dir +from calm.dsl.decompile.environments import render_environment_template +from black import format_str, FileMode + +DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) +NTNX_LOCAL_AZ = DSL_CONFIG["ACCOUNTS"]["NTNX_LOCAL_AZ"] + + +def test_environment_decompile(): + _, _, _, scripts_dir = init_environment_dir("./tests/test_environment_decompile") + dir_path = os.path.dirname(os.path.realpath(__file__)) + file_path = os.path.join(dir_path, "./jsons/environment.json") + environment_dict = json.loads(open(file_path).read()) + + environment_dict["status"]["resources"]["infra_inclusion_list"][0][ + "account_reference" + ]["uuid"] = NTNX_LOCAL_AZ["UUID"] + environment_dict["status"]["resources"]["infra_inclusion_list"][0][ + "account_reference" + ]["name"] = NTNX_LOCAL_AZ["NAME"] + + environment_dict["status"]["resources"]["infra_inclusion_list"][0][ + "subnet_references" + ][0]["name"] = NTNX_LOCAL_AZ["SUBNETS"][0]["NAME"] + + environment_dict["status"]["resources"]["infra_inclusion_list"][0][ + "subnet_references" + ][0]["uuid"] = NTNX_LOCAL_AZ["SUBNETS"][0]["UUID"] + + environment_dict["status"]["resources"]["infra_inclusion_list"][0][ + "default_subnet_reference" + ]["uuid"] = NTNX_LOCAL_AZ["SUBNETS"][0]["UUID"] + + environment_dict["status"]["resources"]["infra_inclusion_list"][0][ + "cluster_references" + ][0]["uuid"] = NTNX_LOCAL_AZ["SUBNETS"][0]["CLUSTER_UUID"] + + environment_dict["status"]["resources"]["infra_inclusion_list"][0][ + "cluster_references" + ][0]["name"] = NTNX_LOCAL_AZ["SUBNETS"][0]["CLUSTER"] + + environment_dict["status"]["resources"]["substrate_definition_list"][0][ + "create_spec" + ]["cluster_reference"]["uuid"] = NTNX_LOCAL_AZ["SUBNETS"][0]["CLUSTER_UUID"] + + environment_dict["status"]["resources"]["substrate_definition_list"][0][ + "create_spec" + ]["cluster_reference"]["name"] = NTNX_LOCAL_AZ["SUBNETS"][0]["CLUSTER"] + + cls = EnvironmentType.decompile(environment_dict["status"]["resources"]) + + assert str(cls.providers[0].account_reference) == NTNX_LOCAL_AZ["NAME"] + assert str(cls.substrates) == "[Untitled]" + assert ( + str(cls.providers[0].default_subnet_reference) + == NTNX_LOCAL_AZ["SUBNETS"][0]["NAME"] + ) + assert ( + str(cls.providers[0].cluster_reference_list[0]) + == NTNX_LOCAL_AZ["SUBNETS"][0]["CLUSTER"] + ) + + shutil.rmtree(scripts_dir) diff --git a/tests/unit/test_project_decompile.py b/tests/unit/test_project_decompile.py new file mode 100644 index 00000000..8434450f --- /dev/null +++ b/tests/unit/test_project_decompile.py @@ -0,0 +1,59 @@ +import os +import json +import shutil +from calm.dsl.builtins.models.project import ProjectType +from calm.dsl.builtins import read_local_file +from calm.dsl.decompile.file_handler import init_project_dir, get_project_dir +from calm.dsl.decompile.projects import render_project_template +from black import format_str, FileMode + +DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) +NTNX_LOCAL_AZ = DSL_CONFIG["ACCOUNTS"]["NTNX_LOCAL_AZ"] + + +def test_project_decompile(): + _, _, _, scripts_dir = init_project_dir("./tests/test_project_decompile") + dir_path = os.path.dirname(os.path.realpath(__file__)) + file_path = os.path.join(dir_path, "./jsons/project.json") + project_dict = json.loads(open(file_path).read()) + + project_dict["status"]["resources"]["user_reference_list"][0]["name"] = DSL_CONFIG[ + "USERS" + ][0]["NAME"] + project_dict["status"]["resources"]["user_reference_list"][0]["uuid"] = DSL_CONFIG[ + "USERS" + ][0]["UUID"] + + project_dict["status"]["resources"]["default_subnet_reference"][ + "uuid" + ] = NTNX_LOCAL_AZ["SUBNETS"][0]["UUID"] + project_dict["status"]["resources"]["subnet_reference_list"][0][ + "name" + ] = NTNX_LOCAL_AZ["SUBNETS"][0]["NAME"] + project_dict["status"]["resources"]["subnet_reference_list"][0][ + "uuid" + ] = NTNX_LOCAL_AZ["SUBNETS"][0]["UUID"] + + project_dict["status"]["resources"]["account_reference_list"][0][ + "uuid" + ] = NTNX_LOCAL_AZ["UUID"] + project_dict["spec"]["resources"]["account_reference_list"][0][ + "uuid" + ] = NTNX_LOCAL_AZ["UUID"] + project_dict["metadata"]["uuid"] = DSL_CONFIG["PROJECTS"]["PROJECT1"]["UUID"] + project_dict["metadata"]["project_reference"]["uuid"] = DSL_CONFIG["PROJECTS"][ + "PROJECT1" + ]["UUID"] + + cls = ProjectType.decompile(project_dict) + + data = render_project_template(cls) + + assert cls.users[0]["name"] == DSL_CONFIG["USERS"][0]["NAME"] + assert str(cls.providers[0].account_reference) == NTNX_LOCAL_AZ["NAME"] + assert ( + str(cls.providers[0].subnet_reference_list[0]) + == NTNX_LOCAL_AZ["SUBNETS"][0]["NAME"] + ) + + shutil.rmtree(scripts_dir)