From 8477573e87f92377d75cb78bc8e09db0f7d51d2f Mon Sep 17 00:00:00 2001 From: kapoor-nimish Date: Thu, 9 Nov 2023 17:05:18 +0530 Subject: [PATCH] Made initial changes. #CALM-39459 (#300) Environment decoupling support in DSL. Added provider compilation templates for ntnx, vmware, gcp, aws. Use command: calm decompile environment -n -p --------- Co-authored-by: Abhijeet Kaurav Co-authored-by: Utkarsh Bairolia (cherry picked from commit 70896ec9005e3ee5c2a4a5fd7c9d824c1c1c85ab) --- Documentation/docs/models/runbook/ndb_task.md | 401 ++++++++++++++++++ calm/dsl/builtins/models/providers.py | 16 + calm/dsl/cli/environment_commands.py | 34 +- calm/dsl/cli/environments.py | 103 ++++- calm/dsl/decompile/decompile_render.py | 38 +- calm/dsl/decompile/environments.py | 86 ++++ calm/dsl/decompile/file_handler.py | 34 ++ calm/dsl/decompile/provider.py | 21 + .../decompile/schemas/environments.py.jinja2 | 32 ++ .../decompile/schemas/provider_aws.py.jinja2 | 6 + .../decompile/schemas/provider_gcp.py.jinja2 | 6 + .../decompile/schemas/provider_ntnx.py.jinja2 | 13 + .../schemas/provider_vmware.py.jinja2 | 6 + test_decompile_env/assets/style.css | 186 ++++++++ test_decompile_env/environment.py | 66 +++ test_decompile_env/test-result.html | 273 ++++++++++++ test_decompile_env_aws/environment.py | 65 +++ .../specs/Untitled_1_provider_spec.yaml | 34 ++ .../specs/Untitled_provider_spec.yaml | 34 ++ test_decompile_env_gcp/environment.py | 65 +++ .../specs/Untitled_1_provider_spec.yaml | 68 +++ .../specs/Untitled_provider_spec.yaml | 68 +++ test_decompile_env_vmware/environment.py | 45 ++ .../specs/Untitled_create_spec_editables.yaml | 1 + .../specs/Untitled_provider_spec.yaml | 77 ++++ tests/unit/jsons/environment.json | 242 +++++++++++ tests/unit/test_environment_decompile.py | 22 + 27 files changed, 2039 insertions(+), 3 deletions(-) create mode 100644 Documentation/docs/models/runbook/ndb_task.md create mode 100644 calm/dsl/decompile/environments.py create mode 100644 calm/dsl/decompile/provider.py create mode 100644 calm/dsl/decompile/schemas/environments.py.jinja2 create mode 100644 calm/dsl/decompile/schemas/provider_aws.py.jinja2 create mode 100644 calm/dsl/decompile/schemas/provider_gcp.py.jinja2 create mode 100644 calm/dsl/decompile/schemas/provider_ntnx.py.jinja2 create mode 100644 calm/dsl/decompile/schemas/provider_vmware.py.jinja2 create mode 100644 test_decompile_env/assets/style.css create mode 100644 test_decompile_env/environment.py create mode 100644 test_decompile_env/test-result.html create mode 100644 test_decompile_env_aws/environment.py create mode 100644 test_decompile_env_aws/specs/Untitled_1_provider_spec.yaml create mode 100644 test_decompile_env_aws/specs/Untitled_provider_spec.yaml create mode 100644 test_decompile_env_gcp/environment.py create mode 100644 test_decompile_env_gcp/specs/Untitled_1_provider_spec.yaml create mode 100644 test_decompile_env_gcp/specs/Untitled_provider_spec.yaml create mode 100644 test_decompile_env_vmware/environment.py create mode 100644 test_decompile_env_vmware/specs/Untitled_create_spec_editables.yaml create mode 100644 test_decompile_env_vmware/specs/Untitled_provider_spec.yaml create mode 100644 tests/unit/jsons/environment.json create mode 100644 tests/unit/test_environment_decompile.py diff --git a/Documentation/docs/models/runbook/ndb_task.md b/Documentation/docs/models/runbook/ndb_task.md new file mode 100644 index 00000000..ac838924 --- /dev/null +++ b/Documentation/docs/models/runbook/ndb_task.md @@ -0,0 +1,401 @@ +# Nutanix Database (NDB) + +- NDB models represents the attributes required for performing various Database operations +- NDB models available in DSL are Database, DatabaseServer, TimeMachine and Tag. They are further divided into different subclasses based on specific operations +- NDB model also constitute of OutputVariable Model which give user leverage to set task variables for the output variables. + +## DatabaseServer: + +This model provides attributes for the server related information. This is divided into different subclasses which inherits from Postgres DatabaseServer, a subclass to DatabaseServer. + +- Postgres: + + This model provides attributes for performing various operations on Postgres Databases. + + - Create: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import DatabaseServer + + DatabaseServer.Postgres.Create( + name="db_server_name", + password="db_server_password", + cluster=Ref.NutanixDB.Cluster(name="EraCluster"), + software_profile=Ref.NutanixDB.Profile.Software(name="POSTGRES_10.4_OOB"), + software_profile_version=Ref.NutanixDB.Profile.Software_Version( + name="POSTGRES_10.4_OOB (1.0)" + ), + compute_profile=Ref.NutanixDB.Profile.Compute(name="DEFAULT_OOB_COMPUTE"), + network_profile=Ref.NutanixDB.Profile.Network( + name="DEFAULT_OOB_POSTGRESQL_NETWORK" + ), + ip_address="10.44.76.141", + ssh_public_key="ssh_key for the server", + description="Sample description of db server", + ) + ``` + + - Attributes supported for this class: + 1. **name**: (String) Name of the database server + 2. **password**: (String) Password of the database server + 3. **cluster**: (NDB Ref/ Macro) Cluster to use for the database server + 4. **software_profile**: (NDB Ref/ Macro) Software Profile to use for the database server + 5. **software_profile_version**: (NDB Ref/ Macro) Version of the Software Profile to use for the database server + 6. **compute_profile**: (NDB Ref/ Macro) Compute Profile to use for the database server + 7. **network_profile**: (NDB Ref/ Macro) Network Profile to use for the database server + 8. **ip_address**: (String) Static IP address for static network profile if provided + 9. **ssh_public_key**: (String) RSA based public key to use for the database server + 10. **description**: (String) Description of the database server + + - Clone: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import DatabaseServer + + DatabaseServer.Postgres.Clone( + name="new_db_@@{calm_time}@@", + password="abc123", + cluster=Ref.NutanixDB.Cluster(name="EraCluster"), + compute_profile=Ref.NutanixDB.Profile.Compute(name="DEFAULT_OOB_COMPUTE"), + network_profile=Ref.NutanixDB.Profile.Network( + name="DEFAULT_OOB_POSTGRESQL_NETWORK" + ), + ssh_public_key="ssh-key", + description="Sample description of db server", + ) + ``` + + - Attributes supported for this class: + 1. **name**: (String) Name of the Postgres Instance + 2. **password**: (String) Password of the database server + 3. **cluster**: (NDB Ref/ Macro) Cluster to use for the database server + 4. **compute_profile**: (NDB Ref/ Macro) Compute Profile to use for the database server + 5. **network_profile**: (NDB Ref/ Macro) Network Profile to use for the database server + 6. **ssh_public_key**: (String) RSA based public key to use for the database server + 7. **description**: (String) Description of the database server + +## Database: + +This model provides attributes for the database instance related information. This is divided into different subclasses which inherits from Postgres Database, a subclass to Database. + +- Postgres: + + This model provides attributes for performing various operations on Postgres Databases. + + - Create: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Database + + Database.Postgres.Create( + name="post_inst_@@{calm_time}@@", + description="Sample description of postgres instances", + database_parameter_profile=Ref.NutanixDB.Profile.Database_Parameter( + name="DEFAULT_POSTGRES_PARAMS" + ), + initial_database_name="TEST_DB_01", + initial_database_password="DB_PASS", + listener_port="5432", + size="200", + pre_create_script="", + post_create_script="", + ) + ``` + + - Attributes supported for this class: + 1. **name**: (String) Name of the Postgres Instance + 2. **description**: (String) Description of the Postgres Instance + 3. **database_parameter_profile**: (NDB Ref/ Macro) Database Parameter Profile to use for Postgres Instance + 4. **initial_database_name**: (String) Intial Database name to use for Postgres Instance + 5. **initial_database_password**: (String) Intial Database password to use for Postgres Instance + 6. **listener_port**: (Integer) Listener Port to use for Postgres Instance + 7. **size**: (Integer) Size of the Postgres Instance + 8. **pre_create_script**: (String) Script to run before creating the Postgres Instance + 9. **post_create_script**: (String) Script to run after creating the Postgres Instance + + - Delete: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Database + + Database.Postgres.Delete( + name=Ref.NutanixDB.Database(name="bekkam-pg-dnd") + ) + ``` + + - Attributes supported for this class: + 1. **database**: (NDB Ref/ Macro) Ref of the Postgres Instance + + - Create Snapshot: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Database + + Database.Postgres.CreateSnapshot( + snapshot_name="snap-from-dsl", + remove_schedule_in_days=2, + # time_machine="@@{tm_uuid}@@", + time_machine=Ref.NutanixDB.TimeMachine(name="dnd-pg_TM"), + database=Ref.NutanixDB.Database(name="dnd-pg"), + ) + ``` + + - Attributes supported for this class: + 1. **snapshot_name**: (String) Snapshot Name + 2. **remove_schedule_in_days**: (Integer) Removal Schedule + 3. **time_machine**: (NDB Ref/ Macro) Time Machine Name + 4. **database**: (NDB Ref/ Macro) Database Name + + - Restore From Time Machine: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Database + + Database.Postgres.RestoreFromTimeMachine( + database=Ref.NutanixDB.Database("test-pg-inst"), + snapshot_with_timeStamp=Ref.NutanixDB.Snapshot( + "era_auto_snapshot (2023-03-01 14:46:17)" + ), + time_zone="America/Resolute", + #point_in_time="2023-02-12 10:01:40", + ) + ``` + + - Attributes supported for this class: + 1. **database**: (NDB Ref/ Macro) Name of the Postgres Instance + 2. **snapshot_with_timeStamp**: (NDB Ref/ Macro) Name of the snapshot along with TimeStamp (yyyy-mm-dd hh:mm:ss) Eg-> "era_auto_snapshot (2023-02-12 10:01:40)" + 3. **point_in_time**: (String) point in Time to Restore yyyy-mm-dd hh:mm:ss Eg -> "2023-02-12 10:01:40" + 4. **time_zone**: (String) Time Zone of the snapshot/point in time (If not given defaults to system timezone) + + - Note: Either of snapshot_with_timeStamp and point_in_time can be specified + + - Clone: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Database + + Database.Postgres.Clone( + name="post_inst_@@{calm_time}@@", + database_parameter_profile=Ref.NutanixDB.Profile.Database_Parameter( + name="DEFAULT_POSTGRES_PARAMS" + ), + password="Nutanix.123", + pre_clone_cmd="", + post_clone_cmd="", + ) + ``` + + - Attributes supported for this class: + 1. **name**: (String) Name of the Postgres Instance + 2. **description**: (String) Description of the Postgres Instance + 3. **password**: (String) Password of the Postgres Instance + 4. **database_parameter_profile**: (NDB Ref/ Macro) Database Parameter Profile to use for Postgres Instance + 5. **pre_clone_cmd**: (String) Script to run before creating the Postgres Instance + 6. **post_clone_cmd**: (String) Script to run after creating the Postgres Instance + +## TimeMachine: + +This model provides attributes for the timeMachine related information. This is divided into different subclasses which inherits from Postgres TimeMachine, a subclass to TimeMachine. + +- Postgres: + + This model provides attributes for performing various operations on Postgres Databases. + + - Create: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import TimeMachine + + TimeMachine.Postgres.Create( + name="inst_@@{calm_time}@@_TM", + description="This is time machine's description", + sla=Ref.NutanixDB.SLA(name="DEFAULT_OOB_GOLD_SLA"), + snapshottimeofday__hours=12, + snapshottimeofday__minutes=0, + snapshottimeofday__seconds=0, + snapshots_perday=1, + logbackup_interval=60, + weeklyschedule__dayofweek="WEDNESDAY", + monthlyschedule__dayofmonth=17, + quartelyschedule__startmonth="FEBRUARY", + ) + ``` + + - Attributes supported for this class: + 1. **name**: (String) Name of the Time Machine + 2. **description**: (String) Description of the Time Machine + 3. **sla**: (NDB Ref/ Macro) SLA to use for the Time Machine + 4. **snapshottimeofday__hours**: (Integer) Hour of the day to take Snapshot + 5. **snapshottimeofday__minutes**: (Integer) Minute of the day to take Snapshot + 6. **snapshottimeofday__seconds**: (Integer) Second of the day to take Snapshot + 7. **snapshots_perday**: (Integer) Snapshots to take Per day + 8. **logbackup_interval**: (Integer) Log Backup Interval in minutes + 9. **weeklyschedule__dayofweek**: (String) Weekly Snapshot day of the week + 10. **monthlyschedule__dayofmonth**: (Integer) Monthly Snaphot day of the month + 11. **quartelyschedule__startmonth**: (String) Quarterly Snapshot start of the month + + - Clone: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import TimeMachine + + TimeMachine.Postgres.Clone( + time_machine_name=Ref.NutanixDB.TimeMachine("dnd-tm2"), + point_in_time="2023-02-12 10:01:40", + + time_zone="UTC", + ) + ``` + + - Attributes supported for this class: + 1. **time_machine**: (NDB Ref/ Macro) Name of the Time Machine + 2. **snapshot_with_timeStamp**: (NDB Ref/ Macro) Name of the snapshot along with TimeStamp (yyyy-mm-dd hh:mm:ss) Eg-> "era_auto_snapshot (2023-02-12 10:01:40)" + 3. **point in time**: (String) point in Time to Restore yyyy-mm-dd hh:mm:ss Eg -> "2023-02-12 10:01:40" + 4. **time_zone**: (String) Time Zone of the snapshot/point in time (If not given defaults to system timezone) + 5. **expiry_days**: (Integer) Number of days to expire + 6. **expiry_date_timezone** : (String) Timezone to be used for expiry date + 7. **delete_database**: (Boolean) Boolean input for deletion of database + 8. **refresh_in_days**: (Integer) Number of days to refresh + 9. **refresh_time**: (String) Time at which refresh should trigger + 10. **refresh_date_timezone**: (String) Timezone for the refresh time + + - Note: Either of snapshot_with_timeStamp and point_in_time can be specified + +## Tag: + +This model provides attributes for the tag related information. This is divided into different subclasses wrt to the Actions. + +- Create: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Tag + + Tag.Create( + database_tags=[ + DatabaseTag("prod_database", "true"), + DatabaseTag("database_type", "Postgres"), + ], + time_machine_tags=[ + TimemachineTag("type", "gold"), + ], + ) + ``` + + - Attributes supported for this class: + 1. **database**: ([NDB Ref]) array of NDB Database Tag Ref. Eg -> [ Ref.NutanixDB.Tag.Database(name1, value1), Ref.NutanixDB.Tag.Database(name=name2, value=value2) ] + 2. **time_machine**: ([NDB Ref]) array of NDB TimeMachine Tag Ref. Eg -> [ Ref.NutanixDB.Tag.TimeMachine(name=name1, value=value1), Ref.NutanixDB.Tag.TimeMachine(name2, value2) ] + +- Clone: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import Tag + + Tag.Clone(tags=[CloneTag("tag name", "")]) + ``` + + - Attributes supported for this class: + 1. **clone**: ([NDB Ref]) array of NDB Clone Tag Ref. Eg -> [ Ref.NutanixDB.Tag.Clone(name1, value1), Ref.NutanixDB.Tag.Clone(name=name2, value=value2) ] + +## PostgresDatabaseOutputVariables: + +This model provides information about the output variables associated to postgres actions. + +- Create: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import PostgresDatabaseOutputVariables + + PostgresDatabaseOutputVariables.Create( + database_name="postgres_database_name", + database_instance_id="", + tags="", + properties="", + time_machine="postgres_time_machine", + time_machine_id="", + metric="", + type="", + platform_data="", + ) + ``` + + - Attributes supported for this class: + 1. **database_name**: (String) Name of the database instance + 2. **database_instance_id**: (String) ID of database instance created + 3. **tags**: ([Dict]) A tag is a label consisting of a user-defined name and a value that makes it easier to manage, search for, and filter entities + 4. **properties**: ([Dict]) Properties of the entity, Eg -> Database instance, database, profiles + 5. **time_machine**: (Dict) Time machine details when an instance is created + 6. **time_machine_id**:(String) UUID of time machine + 7. **metric**: (Dict) Stores storage info regarding size, allocatedSize, usedSize and unit of calculation that have been fetched from PRISM + 8. **type**: (String) The type of the database created i.e., postgres_database + 9. **platform_data**: (Dict) Platform data is the aggregate data of all the output variables supported + +- Create Snapshot: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import PostgresDatabaseOutputVariables + + PostgresDatabaseOutputVariables.CreateSnapshot( + platform_data='myplatformdata' + ) + ``` + + - Attributes supported for this class: + 1. **database_snapshot**: (Dict) Snapshot of the database + 2. **properties**: (Dict) Properties of the entity, Eg -> Database instance, database, profiles + 3. **dbserver_name**: (String) Name of the database server VM + 4. **type**: (String) The type of the database created i.e., postgres_database + 5. **dbserver_ip**: (String) IP address of the database server VM + 6. **id**: (String) ID of database instance created + 7. **parent_snapshot**: (Dict) Snapshot used to clone the database + 8. **snapshot_uuid**: (String) Uuid of the Snapshot + 9. **platform_data**: (Dict) Platform data is the aggregate data of all the output variables supported + +- Restore From Time Machine: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import PostgresDatabaseOutputVariables + + PostgresDatabaseOutputVariables.RestoreFromTimeMachine( + database_name="postgres_database_name", + database_instance_id="", + tags="", + properties="", + time_machine="postgres_time_machine", + time_machine_id="", + metric="", + type="", + platform_data="", + ) + ``` + + - Attributes supported for this class: + 1. **database_name**: (String) Name of the database instance + 2. **database_instance_id**: (String) ID of database instance created + 3. **tags**: ([Dict]) A tag is a label consisting of a user-defined name and a value that makes it easier to manage, search for, and filter entities + 4. **properties**: (Dict) Properties of the entity, Eg -> Database instance, database, profiles + 5. **time_machine**: (Dict) Time machine details when an instance is created + 6. **time_machine_id**: (String) UUID of time machine + 7. **metric**: (Dict) Stores storage info regarding size, allocatedSize, usedSize and unit of calculation that seems to have been fetched from PRISM + 8. **type**: (String) The type of the database created i.e., postgres_database + 9. **platform_data**: (Dict) Platform data is the aggregate data of all the output variables supported + +- Clone: + - Model Definition: + ``` + from calm.dsl.builtins.models.ndb import PostgresDatabaseOutputVariables + + PostgresDatabaseOutputVariables.Clone( + id="postgres_Clone_id" + ) + ``` + + - Attributes supported for this class: + 1. **type**: (String) The type of the database created i.e., postgres_database + 2. **id**: (String) ID of database instance created + 3. **time_machine**: (Dict) Time machine details when an instance is created + 4. **linked_databases**: ([String]) These are databases which are created as a part of the instance + 5. **database_name**: (String) Name of the database instance + 6. **database_nodes**: ([Dict]) Info of nodes of databases + 7. **platform_data**: (Dict) Platform data is the aggregate data of all the output variables supported diff --git a/calm/dsl/builtins/models/providers.py b/calm/dsl/builtins/models/providers.py index c08a31cd..f48babfe 100644 --- a/calm/dsl/builtins/models/providers.py +++ b/calm/dsl/builtins/models/providers.py @@ -11,6 +11,22 @@ class AccountProviderType(EntityType): __schema_name__ = "AccountProvider" __openapi_type__ = "app_account_provider" + @classmethod + def pre_decompile(mcls, cdict, context=[], prefix=""): + + for _i in cdict.get("subnet_references", []): + _i["kind"] = "subnet" + cdict["subnet_reference_list"] = cdict.pop("subnet_references", []) + + for _i in cdict.get("cluster_references", []): + _i["kind"] = "cluster" + cdict["cluster_reference_list"] = cdict.pop("cluster_references", []) + + if cdict.get("default_subnet_reference", {}): + cdict["default_subnet_reference"]["kind"] = "subnet" + + return cdict + class AccountProviderValidator(PropertyValidator, openapi_type="app_account_provider"): __default__ = None diff --git a/calm/dsl/cli/environment_commands.py b/calm/dsl/cli/environment_commands.py index 6c6ed614..c0b433fd 100644 --- a/calm/dsl/cli/environment_commands.py +++ b/calm/dsl/cli/environment_commands.py @@ -1,12 +1,13 @@ import click -from .main import get, delete, create, update, compile +from .main import get, delete, create, update, compile, decompile from .environments import ( create_environment_from_dsl_file, get_environment_list, delete_environment, update_environment_from_dsl_file, compile_environment_command, + decompile_environment_command, ) from calm.dsl.log import get_logging_handle @@ -156,3 +157,34 @@ def _compile_environment_command(env_file, project_name, out): """Compiles a DSL (Python) environment into JSON or YAML""" compile_environment_command(env_file, project_name, out) + + +@decompile.command("environment", experimental=True) +@click.option( + "--name", + "-n", + "name", + default=None, + help="Environment name", +) +@click.option( + "--file", + "-f", + "environment_file", + type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True), + help="Path to Environment file", +) +@click.option("--project", "-p", "project_name", help="Project name") +@click.option( + "--dir", + "-d", + "environment_dir", + default=None, + help="Environment directory location used for placing decompiled entities", +) +def _decompile_environment_command( + name, environment_file, project_name, environment_dir +): + """Decompiles environment present on server or json file""" + + decompile_environment_command(name, environment_file, project_name, environment_dir) diff --git a/calm/dsl/cli/environments.py b/calm/dsl/cli/environments.py index e9f7b193..71cd248c 100644 --- a/calm/dsl/cli/environments.py +++ b/calm/dsl/cli/environments.py @@ -2,6 +2,7 @@ import uuid import click import json +import os import time import arrow from prettytable import PrettyTable @@ -9,12 +10,22 @@ from calm.dsl.config import get_context from calm.dsl.api import get_api_client -from calm.dsl.builtins import create_environment_payload, Environment +from calm.dsl.builtins import ( + create_environment_payload, + Environment, + get_valid_identifier, + MetadataType, + CredentialType, +) from calm.dsl.builtins.models.helper.common import get_project +from calm.dsl.decompile.main import init_decompile_context +from calm.dsl.decompile.decompile_render import create_environment_dir +from calm.dsl.decompile.file_handler import get_environment_dir from calm.dsl.tools import get_module_from_file from calm.dsl.store import Cache from calm.dsl.constants import CACHE from calm.dsl.log import get_logging_handle +from calm.dsl.builtins.models.environment import EnvironmentType from .utils import ( get_name_query, @@ -551,6 +562,96 @@ def delete_environment(environment_name, project_name, no_cache_update=False): LOG.info("[Done]") +def decompile_environment_command( + name, environment_file, project, environment_dir=None +): + """helper to decompile environment""" + if name and environment_file: + LOG.error( + "Please provide either environment file location or server environment name" + ) + sys.exit("Both environment name and file location provided.") + init_decompile_context() + + if name: + decompile_environment_from_server( + name=name, environment_dir=environment_dir, project=project + ) + + elif environment_file: + decompile_environment_from_file( + filename=environment_file, environment_dir=environment_dir + ) + else: + LOG.error( + "Please provide either environment file location or server environment name" + ) + sys.exit("Environment name or file location not provided.") + + +def decompile_environment_from_server(name, environment_dir, project): + """decompiles the environment by fetching it from server""" + + client = get_api_client() + environment = get_environment(name, project) + environment_uuid = environment["status"]["uuid"] + res, err = client.environment.read(environment_uuid) + if err: + LOG.error(err) + sys.exit("Not able to decompile environment from server.") + + environment = res.json() + _decompile_environment( + environment_payload=environment, environment_dir=environment_dir + ) + + +def decompile_environment_from_file(filename, environment_dir): + """decompile environment from local environment file""" + + environment_payload = json.loads(open(filename).read()) + _decompile_environment( + environment_payload=environment_payload, environment_dir=environment_dir + ) + + +def _decompile_environment(environment_payload, environment_dir): + """decompiles the environment from payload""" + + environment_name = environment_payload["status"].get("name", "DslEnvironment") + environment_description = environment_payload["status"].get("description", "") + + environment_metadata = environment_payload["metadata"] + # POP unnecessary keys + environment_metadata.pop("creation_time", None) + environment_metadata.pop("last_update_time", None) + + metadata_obj = MetadataType.decompile(environment_metadata) + + LOG.info("Decompiling environment {}".format(environment_name)) + environment_cls = EnvironmentType.decompile( + environment_payload["status"]["resources"] + ) + + credentials = environment_cls.credentials + + environment_cls.__name__ = get_valid_identifier(environment_name) + environment_cls.__doc__ = environment_description + + create_environment_dir( + environment_cls=environment_cls, + environment_dir=environment_dir, + metadata_obj=metadata_obj, + credentials=credentials, + ) + click.echo( + "\nSuccessfully decompiled. Directory location: {}. Environment location: {}".format( + highlight_text(get_environment_dir()), + highlight_text(os.path.join(get_environment_dir(), "environment.py")), + ) + ) + + def is_environment_exist(env_name, project_name): client = get_api_client() payload = { diff --git a/calm/dsl/decompile/decompile_render.py b/calm/dsl/decompile/decompile_render.py index 197bd75a..4e6552d1 100644 --- a/calm/dsl/decompile/decompile_render.py +++ b/calm/dsl/decompile/decompile_render.py @@ -4,7 +4,12 @@ from calm.dsl.log import get_logging_handle from calm.dsl.decompile.bp_file_helper import render_bp_file_template from calm.dsl.decompile.runbook import render_runbook_template -from calm.dsl.decompile.file_handler import init_bp_dir, init_runbook_dir +from calm.dsl.decompile.file_handler import ( + init_bp_dir, + init_runbook_dir, + init_environment_dir, +) +from calm.dsl.decompile.environments import render_environment_template LOG = get_logging_handle(__name__) @@ -23,6 +28,13 @@ def create_runbook_file(dir_name, runbook_data): fd.write(runbook_data) +def create_environment_file(dir_name, environment_data): + + environment_path = os.path.join(dir_name, "environment.py") + with open(environment_path, "w") as fd: + fd.write(environment_data) + + def create_bp_dir( bp_cls=None, bp_dir=None, @@ -73,3 +85,27 @@ def create_runbook_dir( runbook_data = format_str(runbook_data, mode=FileMode()) LOG.info("Creating runbook file") create_runbook_file(runbook_dir, runbook_data) + + +def create_environment_dir( + environment_cls=None, + environment_dir=None, + metadata_obj=None, + credentials=None, +): + if not environment_dir: + environment_dir = os.path.join(os.getcwd(), environment_cls.__name__) + + LOG.info("Creating environment directory") + _, _, _, _ = init_environment_dir(environment_dir) + LOG.info("Rendering environment file template") + environment_data = render_environment_template( + environment_cls=environment_cls, + credentials=credentials, + metadata_obj=metadata_obj, + ) + + LOG.info("Formatting environment file using black") + environment_data = format_str(environment_data, mode=FileMode()) + LOG.info("Creating environment file") + create_environment_file(environment_dir, environment_data) diff --git a/calm/dsl/decompile/environments.py b/calm/dsl/decompile/environments.py new file mode 100644 index 00000000..3d2cb12a --- /dev/null +++ b/calm/dsl/decompile/environments.py @@ -0,0 +1,86 @@ +from calm.dsl.decompile.render import render_template +from calm.dsl.decompile.ref import render_ref_template +from calm.dsl.decompile.provider import render_provider_template +from calm.dsl.decompile.substrate import render_substrate_template +from calm.dsl.builtins.models.environment import EnvironmentType +from .decompile_helpers import process_variable_name +from calm.dsl.decompile.variable import get_secret_variable_files +from calm.dsl.builtins import get_valid_identifier + +from calm.dsl.decompile.credential import ( + render_credential_template, + get_cred_files, + get_cred_var_name, +) + +from calm.dsl.log import get_logging_handle + +LOG = get_logging_handle(__name__) + + +def render_environment_template( + environment_cls, + metadata_obj=None, + entity_context="", + CONFIG_SPEC_MAP={}, + credentials=[], +): + LOG.debug("Rendering {} environment template".format(environment_cls.__name__)) + if not isinstance(environment_cls, EnvironmentType): + raise TypeError("{} is not of type {}".format(environment_cls, environment)) + + # Update entity context + entity_context = entity_context + "_Environment_" + environment_cls.__name__ + + environment_name = getattr(environment_cls, "name", "") or environment_cls.__name__ + + rendered_credential_list = [] + credentials_list = [] + for cred in credentials: + rendered_credential_list.append(render_credential_template(cred)) + credentials_list.append(get_cred_var_name(cred.name)) + + # Getting the local files used for secrets + secret_files = get_secret_variable_files() + secret_files.extend(get_cred_files()) + + class_name = "ENV_{}".format(get_valid_identifier(environment_cls.__name__)) + + user_attrs = { + "name": class_name, + "credentials": rendered_credential_list, + "credentials_list": credentials_list, + "secret_files": secret_files, + } + + rendered_substrates_list = [] + substrates_list = [] + substrate_name_counter = 1 + if environment_cls.substrates: + for substrate in environment_cls.substrates: + if substrate.name in substrates_list: + new_name = "{}_{}".format(substrate.name, str(substrate_name_counter)) + substrates_list.append(new_name) + + substrate.__name__ = new_name + rendered_substrates_list.append(render_substrate_template(substrate)) + + substrate_name_counter += 1 + else: + rendered_substrates_list.append(render_substrate_template(substrate)) + substrates_list.append(substrate.name) + user_attrs["substrates"] = rendered_substrates_list + user_attrs["substrates_list"] = substrates_list + + rendered_providers_list = [] + if environment_cls.providers: + for provider in environment_cls.providers: + rendered_providers_list.append(render_provider_template(provider)) + user_attrs["providers"] = rendered_providers_list + + gui_display_name = getattr(environment_cls, "name", "") or environment_cls.__name__ + if gui_display_name != environment_cls.__name__: + user_attrs["gui_display_name"] = gui_display_name + + text = render_template(schema_file="environments.py.jinja2", obj=user_attrs) + return text.strip() diff --git a/calm/dsl/decompile/file_handler.py b/calm/dsl/decompile/file_handler.py index ef277d02..4304ffc7 100644 --- a/calm/dsl/decompile/file_handler.py +++ b/calm/dsl/decompile/file_handler.py @@ -46,6 +46,26 @@ def make_runbook_dirs(runbook_dir): return (runbook_dir, local_dir, scripts_dir) +def make_environment_dirs(environment_dir): + + if not os.path.isdir(environment_dir): + os.makedirs(environment_dir) + + local_dir = os.path.join(environment_dir, LOCAL_DIR_KEY) + if not os.path.isdir(local_dir): + os.makedirs(local_dir) + + spec_dir = os.path.join(environment_dir, SPECS_DIR_KEY) + if not os.path.isdir(spec_dir): + os.makedirs(spec_dir) + + scripts_dir = os.path.join(environment_dir, SCRIPTS_DIR_KEY) + if not os.path.isdir(scripts_dir): + os.makedirs(scripts_dir) + + return (environment_dir, local_dir, spec_dir, scripts_dir) + + def init_bp_dir(bp_dir): global LOCAL_DIR, SCRIPTS_DIR, SPECS_DIR, BP_DIR @@ -62,6 +82,16 @@ def init_runbook_dir(runbook_dir): return (RUNBOOK_DIR, LOCAL_DIR, SCRIPTS_DIR) +def init_environment_dir(environment_dir): + + global LOCAL_DIR, SCRIPTS_DIR, SPECS_DIR, ENVIRONMENT_DIR + ENVIRONMENT_DIR, LOCAL_DIR, SPECS_DIR, SCRIPTS_DIR = make_environment_dirs( + environment_dir + ) + + return (ENVIRONMENT_DIR, LOCAL_DIR, SPECS_DIR, SCRIPTS_DIR) + + def get_bp_dir(): return BP_DIR @@ -70,6 +100,10 @@ def get_runbook_dir(): return RUNBOOK_DIR +def get_environment_dir(): + return ENVIRONMENT_DIR + + def get_local_dir(): return LOCAL_DIR diff --git a/calm/dsl/decompile/provider.py b/calm/dsl/decompile/provider.py new file mode 100644 index 00000000..c3e8af1b --- /dev/null +++ b/calm/dsl/decompile/provider.py @@ -0,0 +1,21 @@ +from calm.dsl.decompile.render import render_template +from calm.dsl.log import get_logging_handle + +LOG = get_logging_handle(__name__) + + +def render_provider_template(cls): + LOG.debug("Rendering {} provider template".format(cls.type)) + + if cls.type == "nutanix_pc": + schema_file = "provider_ntnx.py.jinja2" + elif cls.type == "aws": + schema_file = "provider_aws.py.jinja2" + elif cls.type == "gcp": + schema_file = "provider_gcp.py.jinja2" + elif cls.type == "vmware": + schema_file = "provider_vmware.py.jinja2" + + user_attrs = cls.get_user_attrs() + text = render_template(schema_file=schema_file, obj=user_attrs) + return text.strip() diff --git a/calm/dsl/decompile/schemas/environments.py.jinja2 b/calm/dsl/decompile/schemas/environments.py.jinja2 new file mode 100644 index 00000000..36532ec7 --- /dev/null +++ b/calm/dsl/decompile/schemas/environments.py.jinja2 @@ -0,0 +1,32 @@ +{% macro environment(obj) %} +# THIS FILE IS AUTOMATICALLY GENERATED. +# Disclaimer: Please test this file before using in production. +""" +Generated environment DSL (.py) +""" + +from calm.dsl.builtins import * +{% if obj.credentials %} +# Secret Variables +{%- for var_file in obj.secret_files %} +{{var_file}} = read_local_file('{{var_file}}') +{%- endfor %} +{%- endif %} +{%- for cred in obj.credentials %} +{{cred}} +{%- endfor %} +{% for entity in obj.substrates %} +{{entity}} +{% endfor %} +class {{obj.name}}(Environment): + {% if obj.substrates_list %}substrates = {{obj.substrates_list | replace("'","")}} {% endif %} + {% if obj.credentials_list %}credentials = {{obj.credentials_list | replace("'","")}}{% endif %} + {% if obj.providers %} + providers = [ + {% for entity in obj.providers %} + {{entity}} + {% endfor %} + ] + {% endif %} +{% endmacro %} +{{ environment(obj) }} \ No newline at end of file diff --git a/calm/dsl/decompile/schemas/provider_aws.py.jinja2 b/calm/dsl/decompile/schemas/provider_aws.py.jinja2 new file mode 100644 index 00000000..a04fee58 --- /dev/null +++ b/calm/dsl/decompile/schemas/provider_aws.py.jinja2 @@ -0,0 +1,6 @@ +{% macro aws_provider(obj) %} + Provider.Aws( + {% if obj.account_reference %}account=Ref.Account("{{obj.account_reference}}"),{% endif %} + ), +{% endmacro %} +{{ aws_provider(obj) }} \ No newline at end of file diff --git a/calm/dsl/decompile/schemas/provider_gcp.py.jinja2 b/calm/dsl/decompile/schemas/provider_gcp.py.jinja2 new file mode 100644 index 00000000..9f4858d7 --- /dev/null +++ b/calm/dsl/decompile/schemas/provider_gcp.py.jinja2 @@ -0,0 +1,6 @@ +{% macro gcp_provider(obj) %} + Provider.Gcp( + {% if obj.account_reference %}account=Ref.Account("{{obj.account_reference}}"),{% endif %} + ), +{% endmacro %} +{{ gcp_provider(obj) }} \ No newline at end of file diff --git a/calm/dsl/decompile/schemas/provider_ntnx.py.jinja2 b/calm/dsl/decompile/schemas/provider_ntnx.py.jinja2 new file mode 100644 index 00000000..51fa0281 --- /dev/null +++ b/calm/dsl/decompile/schemas/provider_ntnx.py.jinja2 @@ -0,0 +1,13 @@ +{% macro ntnx_provider(obj) %} + Provider.Ntnx( + {% if obj.account_reference %}account=Ref.Account("{{obj.account_reference}}"),{% endif %} + {% if obj.subnet_reference_list %}subnets=[ + {%- for subnet in obj.subnet_reference_list %} + Ref.Subnet( + {% if subnet.name %}name="{{subnet.name}}",{% endif %} + {% if subnet.cluster %}cluster="{{subnet.cluster}}",{% endif %} + ){%- endfor %} + ],{% endif %} + ), +{% endmacro %} +{{ ntnx_provider(obj) }} \ No newline at end of file diff --git a/calm/dsl/decompile/schemas/provider_vmware.py.jinja2 b/calm/dsl/decompile/schemas/provider_vmware.py.jinja2 new file mode 100644 index 00000000..ba2bdb4b --- /dev/null +++ b/calm/dsl/decompile/schemas/provider_vmware.py.jinja2 @@ -0,0 +1,6 @@ +{% macro vmware_provider(obj) %} + Provider.Vmware( + {% if obj.account_reference %}account=Ref.Account("{{obj.account_reference}}"),{% endif %} + ), +{% endmacro %} +{{ vmware_provider(obj) }} \ No newline at end of file diff --git a/test_decompile_env/assets/style.css b/test_decompile_env/assets/style.css new file mode 100644 index 00000000..3edac88e --- /dev/null +++ b/test_decompile_env/assets/style.css @@ -0,0 +1,186 @@ +body { + font-family: Helvetica, Arial, sans-serif; + font-size: 12px; + /* do not increase min-width as some may use split screens */ + min-width: 800px; + color: #999; +} + +h1 { + font-size: 24px; + color: black; +} + +h2 { + font-size: 16px; + color: black; +} + +p { + color: black; +} + +a { + color: #999; +} + +table { + border-collapse: collapse; +} + +/****************************** + * SUMMARY INFORMATION + ******************************/ +#environment td { + padding: 5px; + border: 1px solid #E6E6E6; +} +#environment tr:nth-child(odd) { + background-color: #f6f6f6; +} + +/****************************** + * TEST RESULT COLORS + ******************************/ +span.passed, +.passed .col-result { + color: green; +} + +span.skipped, +span.xfailed, +span.rerun, +.skipped .col-result, +.xfailed .col-result, +.rerun .col-result { + color: orange; +} + +span.error, +span.failed, +span.xpassed, +.error .col-result, +.failed .col-result, +.xpassed .col-result { + color: red; +} + +/****************************** + * RESULTS TABLE + * + * 1. Table Layout + * 2. Extra + * 3. Sorting items + * + ******************************/ +/*------------------ + * 1. Table Layout + *------------------*/ +#results-table { + border: 1px solid #e6e6e6; + color: #999; + font-size: 12px; + width: 100%; +} +#results-table th, +#results-table td { + padding: 5px; + border: 1px solid #E6E6E6; + text-align: left; +} +#results-table th { + font-weight: bold; +} + +/*------------------ + * 2. Extra + *------------------*/ +.log { + background-color: #e6e6e6; + border: 1px solid #e6e6e6; + color: black; + display: block; + font-family: "Courier New", Courier, monospace; + height: 230px; + overflow-y: scroll; + padding: 5px; + white-space: pre-wrap; +} +.log:only-child { + height: inherit; +} + +div.image { + border: 1px solid #e6e6e6; + float: right; + height: 240px; + margin-left: 5px; + overflow: hidden; + width: 320px; +} +div.image img { + width: 320px; +} + +div.video { + border: 1px solid #e6e6e6; + float: right; + height: 240px; + margin-left: 5px; + overflow: hidden; + width: 320px; +} +div.video video { + overflow: hidden; + width: 320px; + height: 240px; +} + +.collapsed { + display: none; +} + +.expander::after { + content: " (show details)"; + color: #BBB; + font-style: italic; + cursor: pointer; +} + +.collapser::after { + content: " (hide details)"; + color: #BBB; + font-style: italic; + cursor: pointer; +} + +/*------------------ + * 3. Sorting items + *------------------*/ +.sortable { + cursor: pointer; +} + +.sort-icon { + font-size: 0px; + float: left; + margin-right: 5px; + margin-top: 5px; + /*triangle*/ + width: 0; + height: 0; + border-left: 8px solid transparent; + border-right: 8px solid transparent; +} +.inactive .sort-icon { + /*finish triangle*/ + border-top: 8px solid #E6E6E6; +} +.asc.active .sort-icon { + /*finish triangle*/ + border-bottom: 8px solid #999; +} +.desc.active .sort-icon { + /*finish triangle*/ + border-top: 8px solid #999; +} diff --git a/test_decompile_env/environment.py b/test_decompile_env/environment.py new file mode 100644 index 00000000..dfc9dad6 --- /dev/null +++ b/test_decompile_env/environment.py @@ -0,0 +1,66 @@ +# THIS FILE IS AUTOMATICALLY GENERATED. +# Disclaimer: Please test this file before using in production. +""" +Generated environment DSL (.py) +""" + +from calm.dsl.builtins import * + +# Secret Variables +BP_CRED_test_creds_PASSWORD = read_local_file("BP_CRED_test_creds_PASSWORD") +BP_CRED_test_creds = basic_cred( + "admin", + BP_CRED_test_creds_PASSWORD, + name="test_creds", + type="PASSWORD", +) + + +class vmcalm_array_indexcalm_timeResources(AhvVmResources): + + memory = 1 + vCPUs = 1 + cores_per_vCPU = 1 + disks = [ + AhvVmDisk.Disk.Scsi.cloneFromImageService("Centos7HadoopMaster", bootable=True) + ] + + +class vmcalm_array_indexcalm_time(AhvVm): + + name = "vm-@@{calm_array_index}@@-@@{calm_time}@@" + resources = vmcalm_array_indexcalm_timeResources + cluster = Ref.Cluster(name="auto_cluster_prod_4faf4699cdea") + + +class Untitled(Substrate): + + account = Ref.Account("NTNX_LOCAL_AZ") + os_type = "Linux" + provider_type = "AHV_VM" + provider_spec = vmcalm_array_indexcalm_time + + readiness_probe = readiness_probe( + connection_type="SSH", + disabled=True, + retries="5", + connection_port=22, + delay_secs="0", + ) + + +class ENV_test_decompile_env(Environment): + substrates = [Untitled] + credentials = [BP_CRED_test_creds] + + providers = [ + Provider.Ntnx( + account=Ref.Account("NTNX_LOCAL_AZ"), + subnets=[ + Ref.Subnet( + name="vlan.154", + cluster="auto_cluster_prod_4faf4699cdea", + ) + ], + ), + ] diff --git a/test_decompile_env/test-result.html b/test_decompile_env/test-result.html new file mode 100644 index 00000000..ac76878e --- /dev/null +++ b/test_decompile_env/test-result.html @@ -0,0 +1,273 @@ + + + + + Test Report + + + +

test-result.html

+

Report generated on 02-Nov-2023 at 15:18:32 by pytest-html v3.1.1

+

Environment

+ + + + + + + + + + + + +
Packages{"pluggy": "0.13.1", "py": "1.11.0", "pytest": "5.3.5"}
PlatformLinux-3.10.0-1160.71.1.el7.x86_64-x86_64-with-centos-7.9.2009-Core
Plugins{"cov": "2.11.1", "csv": "2.0.2", "flake8": "1.0.7", "forked": "1.4.0", "gitignore": "1.3", "html": "3.1.1", "icdiff": "0.5", "metadata": "1.11.0", "randomly": "3.10.3", "reportportal": "1.0.9", "rerunfailures": "4.1", "sugar": "0.9.4", "xdist": "1.29.0"}
Python3.6.8
+

Summary

+

0 tests ran in 0.02 seconds.

+ 0 passed, 0 skipped, 0 failed, 0 errors, 0 expected failures, 0 unexpected passes, 0 rerun +

Results

+ + + + + + + + +
ResultTestDurationLinks
\ No newline at end of file diff --git a/test_decompile_env_aws/environment.py b/test_decompile_env_aws/environment.py new file mode 100644 index 00000000..c546cece --- /dev/null +++ b/test_decompile_env_aws/environment.py @@ -0,0 +1,65 @@ +# THIS FILE IS AUTOMATICALLY GENERATED. +# Disclaimer: Please test this file before using in production. +""" +Generated environment DSL (.py) +""" + +from calm.dsl.builtins import * + +# Secret Variables +BP_CRED_test_creds_PASSWORD = read_local_file("BP_CRED_test_creds_PASSWORD") +BP_CRED_test_creds = basic_cred( + "admin", + BP_CRED_test_creds_PASSWORD, + name="test_creds", + type="PASSWORD", +) + + +class Untitled(Substrate): + + os_type = "Windows" + provider_type = "AWS_VM" + provider_spec = read_provider_spec( + os.path.join("specs", "Untitled_provider_spec.yaml") + ) + + readiness_probe = readiness_probe( + connection_type="POWERSHELL", + disabled=False, + retries="5", + connection_port=5985, + address="@@{public_ip_address}@@", + delay_secs="60", + ) + + +class Untitled_1(Substrate): + + name = "Untitled" + + os_type = "Linux" + provider_type = "AWS_VM" + provider_spec = read_provider_spec( + os.path.join("specs", "Untitled_1_provider_spec.yaml") + ) + + readiness_probe = readiness_probe( + connection_type="SSH", + disabled=True, + retries="5", + connection_port=22, + address="@@{public_ip_address}@@", + delay_secs="60", + ) + + +class ENV_test_decompile_env_aws(Environment): + substrates = [Untitled, Untitled_1] + credentials = [BP_CRED_test_creds] + + providers = [ + Provider.Aws( + account=Ref.Account("primary"), + ), + ] diff --git a/test_decompile_env_aws/specs/Untitled_1_provider_spec.yaml b/test_decompile_env_aws/specs/Untitled_1_provider_spec.yaml new file mode 100644 index 00000000..38bdbf53 --- /dev/null +++ b/test_decompile_env_aws/specs/Untitled_1_provider_spec.yaml @@ -0,0 +1,34 @@ +availability_zone_reference: null +backup_policy: null +cluster_reference: null +name: vm-@@{calm_array_index}@@-@@{calm_time}@@ +resources: + account_uuid: e8c72001-e53b-f749-7254-e84f4abba73f + associate_public_ip_address: true + availability_zone: us-east-1a + block_device_map: + data_disk_list: [] + root_disk: + delete_on_termination: true + device_name: /dev/sda1 + iops: 100 + size_gb: 8 + snapshot_id: '' + type: '' + volume_type: GP2 + type: '' + image_id: ami-021d9d94f93a07a43 + instance_initiated_shutdown_behavior: '' + instance_profile_name: aws-controltower-AdministratorExecutionRole + instance_type: t3a.small + key_name: piu + private_ip_address: '' + region: us-east-1 + security_group_list: [] + state: RUNNING + subnet_id: subnet-9a9ea2b0 + tag_list: [] + type: '' + user_data: '' + vpc_id: vpc-dcd149bb +type: PROVISION_AWS_VM diff --git a/test_decompile_env_aws/specs/Untitled_provider_spec.yaml b/test_decompile_env_aws/specs/Untitled_provider_spec.yaml new file mode 100644 index 00000000..26cfb527 --- /dev/null +++ b/test_decompile_env_aws/specs/Untitled_provider_spec.yaml @@ -0,0 +1,34 @@ +availability_zone_reference: null +backup_policy: null +cluster_reference: null +name: vm-@@{calm_array_index}@@-@@{calm_time}@@ +resources: + account_uuid: e8c72001-e53b-f749-7254-e84f4abba73f + associate_public_ip_address: true + availability_zone: '' + block_device_map: + data_disk_list: [] + root_disk: + delete_on_termination: true + device_name: '' + iops: 100 + size_gb: 8 + snapshot_id: '' + type: '' + volume_type: GP2 + type: '' + image_id: '' + instance_initiated_shutdown_behavior: '' + instance_profile_name: '' + instance_type: '' + key_name: '' + private_ip_address: '' + region: '' + security_group_list: [] + state: RUNNING + subnet_id: '' + tag_list: [] + type: '' + user_data: '' + vpc_id: '' +type: PROVISION_AWS_VM diff --git a/test_decompile_env_gcp/environment.py b/test_decompile_env_gcp/environment.py new file mode 100644 index 00000000..0195226a --- /dev/null +++ b/test_decompile_env_gcp/environment.py @@ -0,0 +1,65 @@ +# THIS FILE IS AUTOMATICALLY GENERATED. +# Disclaimer: Please test this file before using in production. +""" +Generated environment DSL (.py) +""" + +from calm.dsl.builtins import * + +# Secret Variables +BP_CRED_test_cred_PASSWORD = read_local_file("BP_CRED_test_cred_PASSWORD") +BP_CRED_test_cred = basic_cred( + "admin", + BP_CRED_test_cred_PASSWORD, + name="test_cred", + type="PASSWORD", +) + + +class Untitled(Substrate): + + os_type = "Linux" + provider_type = "GCP_VM" + provider_spec = read_provider_spec( + os.path.join("specs", "Untitled_provider_spec.yaml") + ) + + readiness_probe = readiness_probe( + connection_type="SSH", + disabled=True, + retries="5", + connection_port=22, + address="@@{platform.networkInterfaces[0].accessConfigs[0].natIP}@@", + delay_secs="60", + ) + + +class Untitled_1(Substrate): + + name = "Untitled" + + os_type = "Windows" + provider_type = "GCP_VM" + provider_spec = read_provider_spec( + os.path.join("specs", "Untitled_1_provider_spec.yaml") + ) + + readiness_probe = readiness_probe( + connection_type="POWERSHELL", + disabled=True, + retries="5", + connection_port=5985, + address="@@{platform.networkInterfaces[0].accessConfigs[0].natIP}@@", + delay_secs="60", + ) + + +class ENV_test_decompile_env_gcp(Environment): + substrates = [Untitled, Untitled_1] + credentials = [BP_CRED_test_cred] + + providers = [ + Provider.Gcp( + account=Ref.Account("GCP"), + ), + ] diff --git a/test_decompile_env_gcp/specs/Untitled_1_provider_spec.yaml b/test_decompile_env_gcp/specs/Untitled_1_provider_spec.yaml new file mode 100644 index 00000000..658d2848 --- /dev/null +++ b/test_decompile_env_gcp/specs/Untitled_1_provider_spec.yaml @@ -0,0 +1,68 @@ +resources: + account_uuid: 1423621a-99ee-35be-e6d1-8f51d6986f41 + blankDisks: [] + canIpForward: false + description: '' + disks: + - autoDelete: true + boot: true + deviceName: '' + diskEncryptionKey: {} + disk_type: PERSISTENT + initializeParams: + diskName: '' + diskSizeGb: -1 + diskType: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/zones/us-central1-c/diskTypes/hyperdisk-throughput + sourceImage: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/global/images/centos-7 + sourceImageEncryptionKey: {} + type: '' + interface: '' + mode: '' + source: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/zones/us-central1-c/disks/bdisk2-0-231103-044630 + type: '' + guestCustomization: {} + labelFingerprint: '' + labels: [] + machineType: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/zones/us-central1-c/machineTypes/a2-highgpu-1g + metadata: + fingerprint: '' + items: [] + type: '' + minCpuPlatform: '' + name: test-vm-1 + networkInterfaces: + - accessConfigs: + - config_type: ONE_TO_ONE_NAT + name: test + natIP: '' + type: '' + aliasIpRanges: [] + associatePublicIP: true + network: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/global/networks/default + networkIP: '' + subnetwork: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/regions/us-central1/subnetworks/default + type: '' + scheduling: + automaticRestart: true + onHostMaintenance: TERMINATE + preemptible: false + type: '' + serviceAccounts: + - email: 108048128720-compute@developer.gserviceaccount.com + scopes: + - https://www.googleapis.com/auth/devstorage.read_only + - https://www.googleapis.com/auth/logging.write + - https://www.googleapis.com/auth/monitoring.write + - https://www.googleapis.com/auth/servicecontrol + - https://www.googleapis.com/auth/service.management.readonly + - https://www.googleapis.com/auth/trace.append + type: '' + sshKeys: [] + tags: + fingerprint: '' + items: + - ldap + type: '' + type: '' + zone: us-central1-c +type: PROVISION_GCP_VM diff --git a/test_decompile_env_gcp/specs/Untitled_provider_spec.yaml b/test_decompile_env_gcp/specs/Untitled_provider_spec.yaml new file mode 100644 index 00000000..5a10fbe8 --- /dev/null +++ b/test_decompile_env_gcp/specs/Untitled_provider_spec.yaml @@ -0,0 +1,68 @@ +resources: + account_uuid: 1423621a-99ee-35be-e6d1-8f51d6986f41 + blankDisks: [] + canIpForward: false + description: '' + disks: + - autoDelete: true + boot: true + deviceName: '' + diskEncryptionKey: {} + disk_type: PERSISTENT + initializeParams: + diskName: '' + diskSizeGb: -1 + diskType: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/zones/us-central1-c/diskTypes/hyperdisk-extreme + sourceImage: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/global/snapshots/postgres-image + sourceImageEncryptionKey: {} + type: '' + interface: '' + mode: '' + source: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/zones/us-central1-c/disks/bdisk-delete-0-54a65a75 + type: '' + guestCustomization: {} + labelFingerprint: '' + labels: [] + machineType: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/zones/us-central1-c/machineTypes/a2-highgpu-1g + metadata: + fingerprint: '' + items: [] + type: '' + minCpuPlatform: '' + name: test-vm-2 + networkInterfaces: + - accessConfigs: + - config_type: ONE_TO_ONE_NAT + name: test + natIP: '' + type: '' + aliasIpRanges: [] + associatePublicIP: true + network: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/global/networks/default + networkIP: '' + subnetwork: https://www.googleapis.com/compute/v1/projects/nucalm-devopos/regions/us-central1/subnetworks/default + type: '' + scheduling: + automaticRestart: true + onHostMaintenance: TERMINATE + preemptible: false + type: '' + serviceAccounts: + - email: 108048128720-compute@developer.gserviceaccount.com + scopes: + - https://www.googleapis.com/auth/devstorage.read_only + - https://www.googleapis.com/auth/logging.write + - https://www.googleapis.com/auth/monitoring.write + - https://www.googleapis.com/auth/servicecontrol + - https://www.googleapis.com/auth/service.management.readonly + - https://www.googleapis.com/auth/trace.append + type: '' + sshKeys: [] + tags: + fingerprint: '' + items: + - ldap + type: '' + type: '' + zone: us-central1-c +type: PROVISION_GCP_VM diff --git a/test_decompile_env_vmware/environment.py b/test_decompile_env_vmware/environment.py new file mode 100644 index 00000000..9bebef90 --- /dev/null +++ b/test_decompile_env_vmware/environment.py @@ -0,0 +1,45 @@ +# THIS FILE IS AUTOMATICALLY GENERATED. +# Disclaimer: Please test this file before using in production. +""" +Generated environment DSL (.py) +""" + +from calm.dsl.builtins import * + +# Secret Variables +BP_CRED_test_creds_PASSWORD = read_local_file("BP_CRED_test_creds_PASSWORD") +BP_CRED_test_creds = basic_cred( + "admin", + BP_CRED_test_creds_PASSWORD, + name="test_creds", + type="PASSWORD", +) + + +class Untitled(Substrate): + + os_type = "Linux" + provider_type = "VMWARE_VM" + provider_spec = read_vmw_spec(os.path.join("specs", "Untitled_provider_spec.yaml")) + provider_spec_editables = read_spec( + os.path.join("specs", "Untitled_create_spec_editables.yaml") + ) + readiness_probe = readiness_probe( + connection_type="SSH", + disabled=True, + retries="5", + connection_port=22, + address="@@{platform.ipAddressList[0]}@@", + delay_secs="60", + ) + + +class ENV_test_decompile_env_vmware(Environment): + substrates = [Untitled] + credentials = [BP_CRED_test_creds] + + providers = [ + Provider.Vmware( + account=Ref.Account("vmware_second"), + ), + ] diff --git a/test_decompile_env_vmware/specs/Untitled_create_spec_editables.yaml b/test_decompile_env_vmware/specs/Untitled_create_spec_editables.yaml new file mode 100644 index 00000000..5b7a6dfb --- /dev/null +++ b/test_decompile_env_vmware/specs/Untitled_create_spec_editables.yaml @@ -0,0 +1 @@ +resources: {} diff --git a/test_decompile_env_vmware/specs/Untitled_provider_spec.yaml b/test_decompile_env_vmware/specs/Untitled_provider_spec.yaml new file mode 100644 index 00000000..84de38a3 --- /dev/null +++ b/test_decompile_env_vmware/specs/Untitled_provider_spec.yaml @@ -0,0 +1,77 @@ +clone_is_template: false +cluster: '' +compute_drs_mode: false +datastore: ds:///vmfs/volumes/210932e4-ce99bf5c/ +drs_mode: false +folder: null +host: 00000000-0000-0000-0000-0cc47ac3fcb0 +library: null +name: vm-@@{calm_array_index}@@-@@{calm_time}@@ +resources: + account_uuid: 0b323c83-a649-7265-0176-14e3ae109f15 + controller_list: [] + cpu_hot_add: false + disk_list: [] + guest_customization: + cloud_init: '' + customization_name: '' + customization_type: GUEST_OS_LINUX + type: '' + memory_hot_plug: false + memory_size_mib: 2048 + nic_list: [] + num_sockets: 2 + num_vcpus_per_socket: 1 + power_state: poweron + tag_list: [] + template_controller_list: + - bus_sharing: noSharing + controller_type: VirtualLsiLogicSASController + is_deleted: false + key: 1000 + type: '' + template_disk_list: + - adapter_type: IDE + controller_key: 200 + device_slot: 0 + disk_mode: persistent + disk_size_mb: -1 + disk_type: cdrom + is_deleted: false + iso_path: '' + key: 3000 + location: '' + type: '' + - adapter_type: IDE + controller_key: 200 + device_slot: 1 + disk_mode: persistent + disk_size_mb: -1 + disk_type: cdrom + is_deleted: false + iso_path: '' + key: 3001 + location: '' + type: '' + - adapter_type: SCSI + controller_key: 1000 + device_slot: 0 + disk_mode: persistent + disk_size_mb: 40960 + disk_type: disk + is_deleted: false + iso_path: '' + key: 2000 + location: ds:///vmfs/volumes/99c8246a-966b6b35/ + type: '' + template_nic_list: + - is_deleted: false + key: 4000 + net_name: key-vim.host.PortGroup-Backplane Network + nic_type: e1000 + type: '' + type: '' +storage_drs_mode: false +storage_pod: '' +template: 503dbadd-2c28-deaf-201d-f1882d69a2fb +type: PROVISION_VMWARE_VM diff --git a/tests/unit/jsons/environment.json b/tests/unit/jsons/environment.json new file mode 100644 index 00000000..73705835 --- /dev/null +++ b/tests/unit/jsons/environment.json @@ -0,0 +1,242 @@ +{ + "status": { + "description": "", + "uuid": "9ebe0d43-dab6-76c0-cf28-662d0c9134f2", + "state": "ACTIVE", + "message_list": [], + "resources": { + "infra_inclusion_list": [ + { + "subnet_references": [ + { "uuid": "7829d1ee-8523-4b24-b14b-173a01fe6e6f" } + ], + "type": "nutanix_pc", + "cluster_references": [ + { "uuid": "00060786-eb2b-206b-1fc3-ac1f6b6029c1" } + ], + "account_reference": { + "kind": "account", + "name": "NTNX_LOCAL_AZ", + "uuid": "d267012b-3c2b-40a1-b457-b682e79c26ad" + }, + "default_subnet_reference": { + "uuid": "7829d1ee-8523-4b24-b14b-173a01fe6e6f" + } + } + ], + "substrate_definition_list": [ + { + "description": "", + "action_list": [], + "message_list": [], + "uuid": "eb13dac3-a010-dc36-ec6b-e6246bd93020", + "state": "ACTIVE", + "readiness_probe": { + "connection_type": "SSH", + "retries": "5", + "connection_protocol": "", + "disable_readiness_probe": true, + "address": "", + "delay_secs": "0", + "connection_port": 22 + }, + "editables": {}, + "os_type": "Linux", + "type": "AHV_VM", + "create_spec": { + "name": "vm-@@{calm_array_index}@@-@@{calm_time}@@", + "categories": {}, + "availability_zone_reference": null, + "backup_policy": null, + "type": "", + "cluster_reference": { + "kind": "cluster", + "type": "", + "name": "auto_cluster_prod_4faf4699cdea", + "uuid": "00060786-eb2b-206b-1fc3-ac1f6b6029c1" + }, + "resources": { + "nic_list": [], + "parent_reference": null, + "guest_tools": null, + "num_vcpus_per_socket": 1, + "num_sockets": 1, + "serial_port_list": [], + "gpu_list": [], + "memory_size_mib": 1024, + "power_state": "ON", + "hardware_clock_timezone": "", + "guest_customization": null, + "type": "", + "account_uuid": "923d1552-07ef-44c5-8322-4b5bb6d23948", + "boot_config": { + "boot_device": { + "type": "", + "disk_address": { + "adapter_type": "SCSI", + "device_index": 0, + "type": "" + } + }, + "type": "", + "boot_type": "LEGACY", + "mac_address": "" + }, + "disk_list": [ + { + "data_source_reference": { + "kind": "image", + "type": "", + "name": "Centos7HadoopMaster", + "uuid": "9c7fa711-cb07-4c6d-a96f-640012454864" + }, + "type": "", + "disk_size_mib": 0, + "volume_group_reference": null, + "device_properties": { + "type": "", + "device_type": "DISK", + "disk_address": { + "adapter_type": "SCSI", + "device_index": 0, + "type": "" + } + } + } + ] + } + }, + "variable_list": [], + "name": "Untitled" + } + ], + "credential_definition_list": [ + { + "username": "admin", + "description": "", + "state": "ACTIVE", + "message_list": [], + "uuid": "6e95d39a-55d8-7674-de31-c714e2f1f9b7", + "secret": { + "attrs": { "is_secret_modified": false, "secret_reference": {} } + }, + "editables": {}, + "cred_class": "static", + "type": "PASSWORD", + "name": "test_creds" + } + ] + }, + "name": "test_decompile_env" + }, + "spec": { + "name": "test_decompile_env", + "resources": { + "infra_inclusion_list": [ + { + "cluster_references": [ + { "uuid": "00060786-eb2b-206b-1fc3-ac1f6b6029c1" } + ], + "default_subnet_reference": { + "uuid": "7829d1ee-8523-4b24-b14b-173a01fe6e6f" + }, + "account_reference": { + "kind": "account", + "uuid": "d267012b-3c2b-40a1-b457-b682e79c26ad" + }, + "vpc_references": [], + "subnet_references": [ + { "uuid": "7829d1ee-8523-4b24-b14b-173a01fe6e6f" } + ], + "type": "nutanix_pc" + } + ], + "substrate_definition_list": [ + { + "uuid": "eb13dac3-a010-dc36-ec6b-e6246bd93020", + "action_list": [], + "readiness_probe": { + "connection_type": "SSH", + "retries": "5", + "connection_protocol": "", + "connection_port": 22, + "disable_readiness_probe": true + }, + "os_type": "Linux", + "type": "AHV_VM", + "create_spec": { + "resources": { + "nic_list": [], + "power_state": "ON", + "num_vcpus_per_socket": 1, + "num_sockets": 1, + "gpu_list": [], + "memory_size_mib": 1024, + "boot_config": { + "boot_device": { + "disk_address": { "device_index": 0, "adapter_type": "SCSI" } + }, + "boot_type": "LEGACY" + }, + "account_uuid": "923d1552-07ef-44c5-8322-4b5bb6d23948", + "disk_list": [ + { + "data_source_reference": { + "kind": "image", + "name": "Centos7HadoopMaster", + "uuid": "9c7fa711-cb07-4c6d-a96f-640012454864" + }, + "device_properties": { + "disk_address": { + "device_index": 0, + "adapter_type": "SCSI" + }, + "device_type": "DISK" + } + } + ] + }, + "name": "vm-@@{calm_array_index}@@-@@{calm_time}@@", + "categories": {}, + "cluster_reference": { + "name": "auto_cluster_prod_4faf4699cdea", + "uuid": "00060786-eb2b-206b-1fc3-ac1f6b6029c1" + } + }, + "variable_list": [], + "name": "Untitled" + } + ], + "credential_definition_list": [ + { + "username": "admin", + "uuid": "6e95d39a-55d8-7674-de31-c714e2f1f9b7", + "secret": { "attrs": { "is_secret_modified": false } }, + "cred_class": "static", + "type": "PASSWORD", + "name": "test_creds" + } + ] + }, + "description": "" + }, + "api_version": "3.0", + "metadata": { + "owner_reference": { + "kind": "user", + "uuid": "00000000-0000-0000-0000-000000000000", + "name": "admin" + }, + "kind": "environment", + "uuid": "9ebe0d43-dab6-76c0-cf28-662d0c9134f2", + "project_reference": { + "kind": "project", + "name": "test_dsl_decompile", + "uuid": "08c85b81-65fd-4b5d-b674-6ace2b885280" + }, + "spec_version": 2, + "name": "test_decompile_env", + "__name__": "test_decompile_env", + "__doc__": "" + } +} diff --git a/tests/unit/test_environment_decompile.py b/tests/unit/test_environment_decompile.py new file mode 100644 index 00000000..b066e183 --- /dev/null +++ b/tests/unit/test_environment_decompile.py @@ -0,0 +1,22 @@ +import os +import json +import shutil +from calm.dsl.builtins.models.environment import EnvironmentType +from calm.dsl.decompile.file_handler import init_environment_dir, get_environment_dir +from calm.dsl.decompile.environments import render_environment_template +from black import format_str, FileMode + + +def test_environment_decompile(): + _, _, _, scripts_dir = init_environment_dir("./tests/test_environment_decompile") + dir_path = os.path.dirname(os.path.realpath(__file__)) + file_path = os.path.join(dir_path, "./jsons/environment.json") + environment_dict = json.loads(open(file_path).read()) + cls = EnvironmentType.decompile(environment_dict["status"]["resources"]) + data = render_environment_template(cls) + data = format_str(data, mode=FileMode()) + assert "substrates = [Untitled]" in data, "expected substrate to be decompiled" + assert ( + 'account=Ref.Account("NTNX_LOCAL_AZ"),' in data + ), "expected provider account to be decompiled" + shutil.rmtree(scripts_dir)