Feature 11073: Enhanced OSM declarative modelling for applications. App as first class citizen
Change-Id: I6b750f4d862692ab885e98afe3771ba817dd6535
Signed-off-by: garciadeblas <gerardo.garciadeblas@telefonica.com>
diff --git a/osm_lcm/odu_libs/app.py b/osm_lcm/odu_libs/app.py
new file mode 100644
index 0000000..b23fb2a
--- /dev/null
+++ b/osm_lcm/odu_libs/app.py
@@ -0,0 +1,303 @@
+#######################################################################################
+# Copyright ETSI Contributors and Others.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#######################################################################################
+
+
+import yaml
+import tempfile
+import os
+
+
+MAP_PROFILE = {
+ "infra_controller_profiles": "infra-controller-profiles",
+ "infra_config_profiles": "infra-config-profiles",
+ "resource_profiles": "managed-resources",
+ "app_profiles": "app-profiles",
+}
+
+
+def merge_model(base, override):
+ """Recursively merge override dictionary into base dictionary."""
+ merge_model = base.copy()
+ for k, v in override.get("spec", {}).items():
+ if k != "ksus":
+ merge_model["spec"][k] = v
+ for ksu_override in override.get("spec", {}).get("ksus", []):
+ for ksu_base in merge_model.get("spec", {}).get("ksus", []):
+ if ksu_base.get("name") == ksu_override.get("name"):
+ for k, v in ksu_override.items():
+ if k != "patterns":
+ ksu_base[k] = v
+ continue
+ for pattern_override in ksu_override.get("patterns", []):
+ for pattern_base in ksu_base.get("patterns", []):
+ if pattern_base.get("name") == pattern_override.get("name"):
+ for kp, vp in pattern_override.items():
+ if kp != "bricks":
+ pattern_base[kp] = vp
+ continue
+ for brick_override in pattern_override.get(
+ "bricks", []
+ ):
+ for brick_base in pattern_base.get(
+ "bricks", []
+ ):
+ if brick_base.get(
+ "name"
+ ) == brick_override.get("name"):
+ for kb, vb in brick_override.items():
+ if kb != "hrset-values":
+ brick_base[kb] = vb
+ continue
+ for (
+ hrset_override
+ ) in brick_override.get(
+ "hrset-values", []
+ ):
+ for (
+ hrset_base
+ ) in brick_base.get(
+ "hrset-values", []
+ ):
+ if hrset_base.get(
+ "name"
+ ) == hrset_override.get(
+ "name"
+ ):
+ hrset_base |= (
+ hrset_override
+ )
+ break
+ else:
+ brick_base[
+ "hrset-values"
+ ].append(hrset_override)
+ break
+ else:
+ pattern_base["bricks"].append(
+ brick_override
+ )
+ break
+ else:
+ ksu_base["patterns"].append(pattern_override)
+ break
+ else:
+ merge_model["spec"]["ksus"].append(ksu_override)
+ return merge_model
+
+
+async def launch_app(self, op_id, op_params, workflow_content, operation_type):
+ self.logger.info(
+ f"launch_app Enter. Operation {op_id}. Operation Type: {operation_type}"
+ )
+ # self.logger.debug(f"Operation Params: {op_params}")
+ # self.logger.debug(f"Content: {workflow_content}")
+
+ db_app = workflow_content["app"]
+ db_profile = workflow_content.get("profile")
+
+ profile_t = db_app.get("profile_type")
+ profile_type = MAP_PROFILE[profile_t]
+ profile_name = db_profile.get("git_name").lower()
+ app_name = db_app["git_name"].lower()
+ app_command = f"app {operation_type} $environment"
+ age_public_key = db_profile.get("age_pubkey")
+
+ sw_catalog_model = workflow_content.get("model")
+ self.logger.debug(f"SW catalog model: {sw_catalog_model}")
+
+ # Update the app model, extending it also with the model from op_params
+ if operation_type == "update":
+ model = op_params.get("model", db_app.get("app_model", {}))
+ else:
+ model = op_params.get("model", {})
+ app_model = merge_model(sw_catalog_model, model)
+
+ app_model["kind"] = "AppInstantiation"
+ app_model["metadata"]["name"] = app_name
+ for ksu in app_model.get("spec", {}).get("ksus", []):
+ for pattern in ksu.get("patterns", []):
+ for brick in pattern.get("bricks", []):
+ brick["public-age-key"] = age_public_key
+ self.logger.debug(f"App model: {app_model}")
+
+ if operation_type == "update":
+ params = op_params.get("params", db_app.get("params", {}))
+ else:
+ params = op_params.get("params", {})
+ params["PROFILE_TYPE"] = profile_type
+ params["PROFILE_NAME"] = profile_name
+ params["APPNAME"] = app_name
+ self.logger.debug(f"Params: {params}")
+
+ if operation_type == "update":
+ secret_params = op_params.get("secret_params", db_app.get("secret_params", {}))
+ else:
+ secret_params = op_params.get("secret_params", {})
+ self.logger.debug(f"Secret Params: {secret_params}")
+
+ # Create temporary folder for the app model and the parameters
+ temp_dir = tempfile.mkdtemp(prefix=f"app-{operation_type}-{op_id}-")
+ self.logger.debug(f"Temporary dir created: {temp_dir}")
+ with open(f"{temp_dir}/app_instance_model.yaml", "w") as f:
+ yaml.safe_dump(
+ app_model, f, indent=2, default_flow_style=False, sort_keys=False
+ )
+
+ os.makedirs(f"{temp_dir}/parameters/clear", exist_ok=True)
+ with open(f"{temp_dir}/parameters/clear/environment.yaml", "w") as f:
+ yaml.safe_dump(params, f, indent=2, default_flow_style=False, sort_keys=False)
+
+ # Create PVC and copy app model and parameters to PVC
+ app_model_pvc = f"temp-pvc-app-{op_id}"
+ src_files = [
+ f"{temp_dir}/app_instance_model.yaml",
+ f"{temp_dir}/parameters/clear/environment.yaml",
+ ]
+ dest_files = [
+ "app_instance_model.yaml",
+ "parameters/clear/environment.yaml",
+ ]
+ self.logger.debug(
+ f"Copying files to PVC {app_model_pvc}: {src_files} -> {dest_files}"
+ )
+ await self._kubectl.create_pvc_with_content(
+ name=app_model_pvc,
+ namespace="osm-workflows",
+ src_files=src_files,
+ dest_files=dest_files,
+ )
+
+ # Create secret with secret_params
+ secret_name = f"secret-app-{op_id}"
+ secret_namespace = "osm-workflows"
+ secret_key = "environment.yaml"
+ secret_value = yaml.safe_dump(
+ secret_params, indent=2, default_flow_style=False, sort_keys=False
+ )
+ try:
+ self.logger.debug(f"Testing kubectl: {self._kubectl}")
+ self.logger.debug(
+ f"Testing kubectl configuration: {self._kubectl.configuration}"
+ )
+ self.logger.debug(
+ f"Testing kubectl configuration Host: {self._kubectl.configuration.host}"
+ )
+ self.logger.debug(
+ f"Creating secret {secret_name} in namespace {secret_namespace}"
+ )
+ await self.create_secret(
+ secret_name,
+ secret_namespace,
+ secret_key,
+ secret_value,
+ )
+ except Exception as e:
+ self.logger.info(
+ f"Cannot create secret {secret_name} in namespace {secret_namespace}: {e}"
+ )
+ return (
+ False,
+ f"Cannot create secret {secret_name} in namespace {secret_namespace}: {e}",
+ )
+
+ # Create workflow to launch the app
+ workflow_template = "launcher-app.j2"
+ workflow_name = f"{operation_type}-app-{op_id}"
+ # Additional params for the workflow
+ osm_project_name = workflow_content.get("project_name", "osm_admin")
+
+ # Render workflow
+ manifest = self.render_jinja_template(
+ workflow_template,
+ output_file=None,
+ workflow_name=workflow_name,
+ app_command=app_command,
+ app_model_pvc=app_model_pvc,
+ app_secret_name=secret_name,
+ git_fleet_url=self._repo_fleet_url,
+ git_sw_catalogs_url=self._repo_sw_catalogs_url,
+ app_name=app_name,
+ profile_name=profile_name,
+ profile_type=profile_type,
+ osm_project_name=osm_project_name,
+ workflow_debug=self._workflow_debug,
+ workflow_dry_run=self._workflow_dry_run,
+ )
+ self.logger.debug(f"Workflow manifest: {manifest}")
+
+ # Submit workflow
+ self._kubectl.create_generic_object(
+ namespace="osm-workflows",
+ manifest_dict=yaml.safe_load(manifest),
+ api_group="argoproj.io",
+ api_plural="workflows",
+ api_version="v1alpha1",
+ )
+ workflow_resources = {
+ "app_model": app_model,
+ "secret_params": secret_params,
+ "params": params,
+ }
+ return True, workflow_name, workflow_resources
+
+
+async def create_app(self, op_id, op_params, content):
+ self.logger.info(f"create_app Enter. Operation {op_id}")
+ # self.logger.debug(f"Operation Params: {op_params}")
+ # self.logger.debug(f"Content: {workflow_content}")
+ return await self.launch_app(op_id, op_params, content, "create")
+
+
+async def update_app(self, op_id, op_params, content):
+ self.logger.info(f"update_app Enter. Operation {op_id}")
+ # self.logger.debug(f"Operation Params: {op_params}")
+ # self.logger.debug(f"Content: {workflow_content}")
+ return await self.launch_app(op_id, op_params, content, "update")
+
+
+async def delete_app(self, op_id, op_params, content):
+ self.logger.info(f"delete_app Enter. Operation {op_id}")
+ # self.logger.debug(f"Operation Params: {op_params}")
+ # self.logger.debug(f"Content: {workflow_content}")
+ return await self.launch_app(op_id, op_params, content, "delete")
+
+
+async def clean_items_app_launch(self, op_id, op_params, workflow_content):
+ self.logger.info(f"clean_items_app_launch Enter. Operation {op_id}")
+ # self.logger.debug(f"Operation Params: {op_params}")
+ # self.logger.debug(f"Content: {workflow_content}")
+ try:
+ secret_name = f"secret-app-{op_id}"
+ volume_name = f"temp-pvc-app-{op_id}"
+ items = {
+ "secrets": [
+ {
+ "name": secret_name,
+ "namespace": "osm-workflows",
+ }
+ ],
+ "pvcs": [
+ {
+ "name": volume_name,
+ "namespace": "osm-workflows",
+ }
+ ],
+ }
+ await self.clean_items(items)
+ return True, "OK"
+ except Exception as e:
+ return False, f"Error while cleaning items: {e}"
diff --git a/osm_lcm/odu_libs/cluster_mgmt.py b/osm_lcm/odu_libs/cluster_mgmt.py
index 7f35c9d..9c0cbaf 100644
--- a/osm_lcm/odu_libs/cluster_mgmt.py
+++ b/osm_lcm/odu_libs/cluster_mgmt.py
@@ -73,7 +73,7 @@
)
except Exception as e:
self.logger.info(f"Cannot create secret {secret_name}: {e}")
- return False, f"Cannot create secret {secret_name}: {e}"
+ return False, f"Cannot create secret {secret_name}: {e}", None
# Additional params for the workflow
cluster_kustomization_name = cluster_name
@@ -119,7 +119,7 @@
)
except Exception as e:
self.logger.info(f"Cannot create configmap {configmap_name}: {e}")
- return False, f"Cannot create configmap {configmap_name}: {e}"
+ return False, f"Cannot create configmap {configmap_name}: {e}", None
# Render workflow
# workflow_kwargs = {
@@ -174,7 +174,7 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def update_cluster(self, op_id, op_params, content):
@@ -215,7 +215,7 @@
)
except Exception as e:
self.logger.info(f"Cannot create secret {secret_name}: {e}")
- return False, f"Cannot create secret {secret_name}: {e}"
+ return False, f"Cannot create secret {secret_name}: {e}", None
# Additional params for the workflow
cluster_kustomization_name = cluster_name
@@ -274,7 +274,7 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def delete_cluster(self, op_id, op_params, content):
@@ -320,7 +320,7 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def register_cluster(self, op_id, op_params, content):
@@ -364,6 +364,7 @@
return (
False,
f"Cannot create secret {secret_name} in namespace {secret_namespace}: {e}",
+ None,
)
# Create secret with kubeconfig
@@ -394,6 +395,7 @@
return (
False,
f"Cannot create secret {secret_name} in namespace {secret_namespace}: {e}",
+ None,
)
# Additional params for the workflow
@@ -441,7 +443,7 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def deregister_cluster(self, op_id, op_params, content):
@@ -486,7 +488,7 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def get_cluster_credentials(self, db_cluster):
diff --git a/osm_lcm/odu_libs/ksu.py b/osm_lcm/odu_libs/ksu.py
index e7c2f82..c7fd50d 100644
--- a/osm_lcm/odu_libs/ksu.py
+++ b/osm_lcm/odu_libs/ksu.py
@@ -177,7 +177,7 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def update_ksus(self, op_id, op_params_list, content_list):
@@ -328,7 +328,7 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def delete_ksus(self, op_id, op_params_list, content_list):
@@ -374,21 +374,21 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def clone_ksu(self, op_id, op_params, content):
self.logger.info(f"clone_ksu Enter. Operation {op_id}. Params: {op_params}")
# self.logger.debug(f"Content: {content}")
workflow_name = f"clone-ksu-{content['_id']}"
- return True, workflow_name
+ return True, workflow_name, None
async def move_ksu(self, op_id, op_params, content):
self.logger.info(f"move_ksu Enter. Operation {op_id}. Params: {op_params}")
# self.logger.debug(f"Content: {content}")
workflow_name = f"move-ksu-{content['_id']}"
- return True, workflow_name
+ return True, workflow_name, None
async def clean_items_ksu_create(self, op_id, op_params_list, content_list):
diff --git a/osm_lcm/odu_libs/nodegroup.py b/osm_lcm/odu_libs/nodegroup.py
index ff40f9c..d703858 100644
--- a/osm_lcm/odu_libs/nodegroup.py
+++ b/osm_lcm/odu_libs/nodegroup.py
@@ -60,7 +60,7 @@
)
except Exception as e:
self.logger.info(f"Cannot create secret {secret_name}: {e}")
- return False, f"Cannot create secret {secret_name}: {e}"
+ return False, f"Cannot create secret {secret_name}: {e}", None
private_subnet = op_params.get("private_subnet", [])
public_subnet = op_params.get("public_subnet", [])
@@ -78,7 +78,7 @@
)
except Exception as e:
self.logger.info(f"Cannot create configmap {configmap_name}: {e}")
- return False, f"Cannot create configmap {configmap_name}: {e}"
+ return False, f"Cannot create configmap {configmap_name}: {e}", None
# Additional params for the workflow
nodegroup_kustomization_name = nodegroup_name
@@ -135,7 +135,7 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def scale_nodegroup(self, op_id, op_params, content):
@@ -170,7 +170,7 @@
)
except Exception as e:
self.logger.info(f"Cannot create secret {secret_name}: {e}")
- return False, f"Cannot create secret {secret_name}: {e}"
+ return False, f"Cannot create secret {secret_name}: {e}", None
# Additional params for the workflow
nodegroup_kustomization_name = nodegroup_name
@@ -213,7 +213,7 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def delete_nodegroup(self, op_id, op_params, content):
@@ -254,7 +254,7 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def clean_items_nodegroup_add(self, op_id, op_params, content):
diff --git a/osm_lcm/odu_libs/oka.py b/osm_lcm/odu_libs/oka.py
index 63c4d36..564acbe 100644
--- a/osm_lcm/odu_libs/oka.py
+++ b/osm_lcm/odu_libs/oka.py
@@ -55,8 +55,8 @@
await self._kubectl.create_pvc_with_content(
name=temp_volume_name,
namespace="osm-workflows",
- src_file=f"{oka_folder}/{oka_filename}",
- dest_filename=f"{oka_name}.tar.gz",
+ src_files=[f"{oka_folder}/{oka_filename}"],
+ dest_files=[f"{oka_name}.tar.gz"],
)
# Render workflow
@@ -83,7 +83,7 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def update_oka(self, op_id, op_params, content):
@@ -111,8 +111,8 @@
await self._kubectl.create_pvc_with_content(
name=temp_volume_name,
namespace="osm-workflows",
- src_folder=oka_folder,
- filename=oka_filename,
+ src_files=[f"{oka_folder}/{oka_filename}"],
+ dest_files=[f"{oka_name}.tar.gz"],
)
# Render workflow
@@ -139,7 +139,7 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def delete_oka(self, op_id, op_params, content):
@@ -178,7 +178,7 @@
api_plural="workflows",
api_version="v1alpha1",
)
- return True, workflow_name
+ return True, workflow_name, None
async def clean_items_oka_create(self, op_id, op_params_list, content_list):
diff --git a/osm_lcm/odu_libs/templates/launcher-app.j2 b/osm_lcm/odu_libs/templates/launcher-app.j2
new file mode 100644
index 0000000..87b52d7
--- /dev/null
+++ b/osm_lcm/odu_libs/templates/launcher-app.j2
@@ -0,0 +1,87 @@
+#######################################################################################
+# Copyright ETSI Contributors and Others.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#######################################################################################
+
+apiVersion: argoproj.io/v1alpha1
+kind: Workflow
+metadata:
+ name: {{ workflow_name }}
+spec:
+ arguments:
+ parameters:
+ # Full OSM SDK command to execute
+ - name: command
+ description: |
+ Full command string to execute with the OSM SDK.
+ Examples:
+ - "app create $environment": Deploy new application instance
+ - "app update $environment": Update existing application instance
+ - "app delete $environment": Remove application instance
+ This parameter accepts any valid OSM SDK command for maximum flexibility.
+ value: {{ app_command }}
+ # Volume reference for application models and parameters
+ # NOTE: The PVC must be created and populated with the following structure:
+ # /model/app_instance_model.yaml
+ # /model/parameters/clear/environment.yaml
+ - name: model_volume_name
+ value: "{{ app_model_pvc }}"
+ # Secret reference for mounting sensitive parameters
+ # This secret will be mounted as a file at:
+ # /model/parameters/secret/environment.yaml
+ - name: secret_name
+ value: "{{ app_secret_name }}"
+ # Fleet repo configuration
+ - name: git_fleet_url
+ value: "{{ git_fleet_url }}"
+ - name: fleet_destination_folder
+ value: "/fleet/fleet-osm"
+ - name: git_fleet_cred_secret
+ value: fleet-repo
+ # SW-Catalogs repo configuration
+ - name: git_sw_catalogs_url
+ value: "{{ git_sw_catalogs_url }}"
+ - name: sw_catalogs_destination_folder
+ value: "/sw-catalogs/sw-catalogs-osm"
+ - name: git_sw_catalogs_cred_secret
+ value: sw-catalogs
+ # Target deployment information
+ - name: app_name
+ value: "{{ app_name }}"
+ - name: profile_name
+ value: "{{ profile_name }}"
+ - name: profile_type
+ value: "{{ profile_type }}"
+ - name: project_name
+ value: "{{ osm_project_name }}"
+ # OSM SDK container configuration
+ - name: osm_sdk_image_repository
+ value: "opensourcemano/osm-nushell-krm-functions"
+ - name: osm_sdk_image_tag
+ value: "testing-daily"
+ # Debug and dry-run flags
+ - name: debug
+ value: "{{ workflow_debug }}"
+ - name: dry_run
+ value: "{{ workflow_dry_run }}"
+
+ # Cleanup policy
+ ttlStrategy:
+ secondsAfterCompletion: 1800 # Time to live after workflow is completed
+ secondsAfterSuccess: 1800 # Time to live after workflow is successful
+ secondsAfterFailure: 1800 # Time to live after workflow fails
+
+ workflowTemplateRef:
+ name: full-app-management-wft