projects
/
osm
/
N2VC.git
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
Fix bug 2099: Modify helm init command so that all parameters are at the end
[osm/N2VC.git]
/
n2vc
/
k8s_helm_conn.py
diff --git
a/n2vc/k8s_helm_conn.py
b/n2vc/k8s_helm_conn.py
index
ff5bab7
..
b20ee87
100644
(file)
--- a/
n2vc/k8s_helm_conn.py
+++ b/
n2vc/k8s_helm_conn.py
@@
-20,6
+20,7
@@
# contact with: nfvlabs@tid.es
##
import asyncio
# contact with: nfvlabs@tid.es
##
import asyncio
+from typing import Union
import os
import yaml
import os
import yaml
@@
-131,18
+132,19
@@
class K8sHelmConnector(K8sHelmBaseConnector):
:param kwargs: Additional parameters (None yet)
:return: True if successful
"""
:param kwargs: Additional parameters (None yet)
:return: True if successful
"""
- self.log.debug("installing {} in cluster {}".format(kdu_model, cluster_uuid))
+ _, cluster_id = self._get_namespace_cluster_id(cluster_uuid)
+ self.log.debug("installing {} in cluster {}".format(kdu_model, cluster_id))
# sync local dir
# sync local dir
- self.fs.sync(from_path=cluster_
uu
id)
+ self.fs.sync(from_path=cluster_id)
# init env, paths
paths, env = self._init_paths_env(
# init env, paths
paths, env = self._init_paths_env(
- cluster_name=cluster_
uu
id, create_if_not_exist=True
+ cluster_name=cluster_id, create_if_not_exist=True
)
await self._install_impl(
)
await self._install_impl(
- cluster_
uu
id,
+ cluster_id,
kdu_model,
paths,
env,
kdu_model,
paths,
env,
@@
-156,7
+158,7
@@
class K8sHelmConnector(K8sHelmBaseConnector):
)
# sync fs
)
# sync fs
- self.fs.reverse_sync(from_path=cluster_
uu
id)
+ self.fs.reverse_sync(from_path=cluster_id)
self.log.debug("Returning kdu_instance {}".format(kdu_instance))
return True
self.log.debug("Returning kdu_instance {}".format(kdu_instance))
return True
@@
-300,8
+302,8
@@
class K8sHelmConnector(K8sHelmBaseConnector):
)
command = (
)
command = (
- "{} --kubeconfig={} --tiller-namespace={} --home={} --service-account {} "
- " {}
init
"
+ "{}
init
--kubeconfig={} --tiller-namespace={} --home={} --service-account {} "
+ " {}"
).format(
self._helm_command,
paths["kube_config"],
).format(
self._helm_command,
paths["kube_config"],
@@
-324,8
+326,8
@@
class K8sHelmConnector(K8sHelmBaseConnector):
):
self.log.info("Initializing helm in client: {}".format(cluster_id))
command = (
):
self.log.info("Initializing helm in client: {}".format(cluster_id))
command = (
- "{} --kubeconfig={} --tiller-namespace={} "
- "--home={}
init
--client-only {} "
+ "{}
init
--kubeconfig={} --tiller-namespace={} "
+ "--home={} --client-only {} "
).format(
self._helm_command,
paths["kube_config"],
).format(
self._helm_command,
paths["kube_config"],
@@
-341,13
+343,15
@@
class K8sHelmConnector(K8sHelmBaseConnector):
else:
self.log.info("Helm client already initialized")
else:
self.log.info("Helm client already initialized")
- repo_list = await self.repo_list(cluster_id)
+ # remove old stable repo and add new one
+ cluster_uuid = "{}:{}".format(namespace, cluster_id)
+ repo_list = await self.repo_list(cluster_uuid)
for repo in repo_list:
if repo["name"] == "stable" and repo["url"] != self._stable_repo_url:
self.log.debug("Add new stable repo url: {}")
for repo in repo_list:
if repo["name"] == "stable" and repo["url"] != self._stable_repo_url:
self.log.debug("Add new stable repo url: {}")
- await self.repo_remove(cluster_id, "stable")
+ await self.repo_remove(cluster_
uu
id, "stable")
if self._stable_repo_url:
if self._stable_repo_url:
- await self.repo_add(cluster_id, "stable", self._stable_repo_url)
+ await self.repo_add(cluster_
uu
id, "stable", self._stable_repo_url)
break
return n2vc_installed_sw
break
return n2vc_installed_sw
@@
-404,13
+408,8
@@
class K8sHelmConnector(K8sHelmBaseConnector):
output, _rc = await self._local_async_exec(
command=command, raise_exception_on_error=False, env=env
)
output, _rc = await self._local_async_exec(
command=command, raise_exception_on_error=False, env=env
)
- command = (
- "{} --kubeconfig={} --namespace {} delete serviceaccount/{}".format(
- self.kubectl_command,
- paths["kube_config"],
- namespace,
- self.service_account,
- )
+ command = "{} --kubeconfig={} --namespace kube-system delete serviceaccount/{}".format(
+ self.kubectl_command, paths["kube_config"], self.service_account
)
output, _rc = await self._local_async_exec(
command=command, raise_exception_on_error=False, env=env
)
output, _rc = await self._local_async_exec(
command=command, raise_exception_on_error=False, env=env
@@
-456,9
+455,9
@@
class K8sHelmConnector(K8sHelmBaseConnector):
cluster_id: str,
kdu_instance: str,
namespace: str = None,
cluster_id: str,
kdu_instance: str,
namespace: str = None,
+ yaml_format: bool = False,
show_error_log: bool = False,
show_error_log: bool = False,
- return_text: bool = False,
- ):
+ ) -> Union[str, dict]:
self.log.debug(
"status of kdu_instance: {}, namespace: {} ".format(kdu_instance, namespace)
self.log.debug(
"status of kdu_instance: {}, namespace: {} ".format(kdu_instance, namespace)
@@
-478,7
+477,7
@@
class K8sHelmConnector(K8sHelmBaseConnector):
env=env,
)
env=env,
)
- if
return_tex
t:
+ if
yaml_forma
t:
return str(output)
if rc != 0:
return str(output)
if rc != 0:
@@
-536,7
+535,7
@@
class K8sHelmConnector(K8sHelmBaseConnector):
)
status = await self._status_kdu(
)
status = await self._status_kdu(
- cluster_id=cluster_id, kdu_instance=kdu_instance,
return_tex
t=False
+ cluster_id=cluster_id, kdu_instance=kdu_instance,
yaml_forma
t=False
)
# extract info.status.resources-> str
)
# extract info.status.resources-> str