X-Git-Url: https://osm.etsi.org/gitweb/?p=osm%2FN2VC.git;a=blobdiff_plain;f=n2vc%2Fk8s_helm_conn.py;h=fdae32f39859784c41097f006bdba19b7140c78c;hp=9364d931e2646922c8b0aa5555df0f37406fcbff;hb=83e558987289d47f338ecd989ee0997987673f4a;hpb=1c83f2e4d061ad37ba898e114cb42e70fdee5145;ds=sidebyside diff --git a/n2vc/k8s_helm_conn.py b/n2vc/k8s_helm_conn.py index 9364d93..fdae32f 100644 --- a/n2vc/k8s_helm_conn.py +++ b/n2vc/k8s_helm_conn.py @@ -43,6 +43,7 @@ class K8sHelmConnector(K8sHelmBaseConnector): helm_command: str = "/usr/bin/helm", log: object = None, on_update_db=None, + vca_config: dict = None, ): """ Initializes helm connector for helm v2 @@ -64,13 +65,15 @@ class K8sHelmConnector(K8sHelmBaseConnector): kubectl_command=kubectl_command, helm_command=helm_command, on_update_db=on_update_db, + vca_config=vca_config, ) self.log.info("Initializing K8S Helm2 connector") # initialize helm client-only self.log.debug("Initializing helm client-only...") - command = "{} init --client-only".format(self._helm_command) + command = "{} init --client-only --stable-repo-url {} ".format( + self._helm_command, self._stable_repo_url) try: asyncio.ensure_future( self._local_async_exec(command=command, raise_exception_on_error=False) @@ -85,6 +88,45 @@ class K8sHelmConnector(K8sHelmBaseConnector): self.log.info("K8S Helm2 connector initialized") + async def install( + self, + cluster_uuid: str, + kdu_model: str, + atomic: bool = True, + timeout: float = 300, + params: dict = None, + db_dict: dict = None, + kdu_name: str = None, + namespace: str = None, + ): + _, cluster_id = self._get_namespace_cluster_id(cluster_uuid) + self.log.debug("installing {} in cluster {}".format(kdu_model, cluster_id)) + + # sync local dir + self.fs.sync(from_path=cluster_id) + + # init env, paths + paths, env = self._init_paths_env( + cluster_name=cluster_id, create_if_not_exist=True + ) + + kdu_instance = await self._install_impl(cluster_id, + kdu_model, + paths, + env, + atomic=atomic, + timeout=timeout, + params=params, + db_dict=db_dict, + kdu_name=kdu_name, + namespace=namespace) + + # sync fs + self.fs.reverse_sync(from_path=cluster_id) + + self.log.debug("Returning kdu_instance {}".format(kdu_instance)) + return kdu_instance + async def inspect_kdu(self, kdu_model: str, repo_url: str = None) -> str: self.log.debug( @@ -222,13 +264,14 @@ class K8sHelmConnector(K8sHelmBaseConnector): command = ( "{} --kubeconfig={} --tiller-namespace={} --home={} --service-account {} " - "init" + " --stable-repo-url {} init" ).format( self._helm_command, paths["kube_config"], namespace, paths["helm_dir"], self.service_account, + self._stable_repo_url ) _, _rc = await self._local_async_exec( command=command, raise_exception_on_error=True, env=env @@ -243,12 +286,13 @@ class K8sHelmConnector(K8sHelmBaseConnector): self.log.info("Initializing helm in client: {}".format(cluster_id)) command = ( "{} --kubeconfig={} --tiller-namespace={} " - "--home={} init --client-only" + "--home={} init --client-only --stable-repo-url {} " ).format( self._helm_command, paths["kube_config"], namespace, paths["helm_dir"], + self._stable_repo_url, ) output, _rc = await self._local_async_exec( command=command, raise_exception_on_error=True, env=env @@ -256,6 +300,19 @@ class K8sHelmConnector(K8sHelmBaseConnector): else: self.log.info("Helm client already initialized") + # remove old stable repo and add new one + cluster_uuid = "{}:{}".format(namespace, cluster_id) + repo_list = await self.repo_list(cluster_uuid) + for repo in repo_list: + if repo["name"] == "stable" and repo["url"] != self._stable_repo_url: + self.log.debug("Add new stable repo url: {}") + await self.repo_remove(cluster_uuid, + "stable") + await self.repo_add(cluster_uuid, + "stable", + self._stable_repo_url) + break + return n2vc_installed_sw async def _uninstall_sw(self, cluster_id: str, namespace: str): @@ -406,6 +463,18 @@ class K8sHelmConnector(K8sHelmBaseConnector): return data + def _get_helm_chart_repos_ids(self, cluster_uuid) -> list: + repo_ids = [] + cluster_filter = {"_admin.helm-chart.id": cluster_uuid} + cluster = self.db.get_one("k8sclusters", cluster_filter) + if cluster: + repo_ids = cluster.get("_admin").get("helm_chart_repos") or [] + return repo_ids + else: + raise K8sException( + "k8cluster with helm-id : {} not found".format(cluster_uuid) + ) + async def _is_install_completed(self, cluster_id: str, kdu_instance: str) -> bool: status = await self._status_kdu(