Merge remote-tracking branch 'origin/master' into paas
Change-Id: I1eb78ad5a4f1e6d32741ef12a0231726083c4ae6
Signed-off-by: Mark Beierl <mark.beierl@canonical.com>
diff --git a/n2vc/k8s_conn.py b/n2vc/k8s_conn.py
index 1c88653..3a1a5ef 100644
--- a/n2vc/k8s_conn.py
+++ b/n2vc/k8s_conn.py
@@ -484,7 +484,6 @@
self.log.debug("status={}".format(status))
try:
-
the_table = db_dict["collection"]
the_filter = db_dict["filter"]
the_path = db_dict["path"]
diff --git a/n2vc/k8s_helm3_conn.py b/n2vc/k8s_helm3_conn.py
index 3d7e3b2..037ed66 100644
--- a/n2vc/k8s_helm3_conn.py
+++ b/n2vc/k8s_helm3_conn.py
@@ -149,7 +149,6 @@
return True
async def inspect_kdu(self, kdu_model: str, repo_url: str = None) -> str:
-
self.log.debug(
"inspect kdu_model {} from (optional) repo: {}".format(kdu_model, repo_url)
)
@@ -250,7 +249,6 @@
return namespace in namespaces if namespaces else False
async def _get_namespaces(self, cluster_id: str):
-
self.log.debug("get namespaces cluster_id {}".format(cluster_id))
# init config, env
@@ -272,7 +270,6 @@
return namespaces
async def _create_namespace(self, cluster_id: str, namespace: str):
-
self.log.debug(f"create namespace: {cluster_id} for cluster_id: {namespace}")
# init config, env
@@ -293,7 +290,6 @@
async def _get_services(
self, cluster_id: str, kdu_instance: str, namespace: str, kubeconfig: str
):
-
# init config, env
paths, env = self._init_paths_env(
cluster_name=cluster_id, create_if_not_exist=True
@@ -333,7 +329,6 @@
pass
async def _instances_list(self, cluster_id: str):
-
# init paths, env
paths, env = self._init_paths_env(
cluster_name=cluster_id, create_if_not_exist=True
@@ -389,7 +384,6 @@
yaml_format: bool = False,
show_error_log: bool = False,
) -> Union[str, dict]:
-
self.log.debug(
"status of kdu_instance: {}, namespace: {} ".format(kdu_instance, namespace)
)
@@ -448,7 +442,6 @@
timeout: float,
kubeconfig: str,
) -> str:
-
timeout_str = ""
if timeout:
timeout_str = "--timeout {}s".format(timeout)
@@ -614,7 +607,6 @@
def _get_uninstall_command(
self, kdu_instance: str, namespace: str, kubeconfig: str
) -> str:
-
return "env KUBECONFIG={} {} uninstall {} --namespace={}".format(
kubeconfig, self._helm_command, kdu_instance, namespace
)
diff --git a/n2vc/k8s_helm_base_conn.py b/n2vc/k8s_helm_base_conn.py
index 65f898c..5588c3d 100644
--- a/n2vc/k8s_helm_base_conn.py
+++ b/n2vc/k8s_helm_base_conn.py
@@ -450,7 +450,6 @@
output, rc = exec_task.result()
else:
-
output, rc = await self._local_async_exec(
command=command, raise_exception_on_error=False, env=env
)
@@ -534,7 +533,6 @@
self.log.debug("upgrading: {}".format(command))
if atomic:
-
# exec helm in a task
exec_task = asyncio.ensure_future(
coro_or_future=self._local_async_exec(
@@ -560,7 +558,6 @@
output, rc = exec_task.result()
else:
-
output, rc = await self._local_async_exec(
command=command, raise_exception_on_error=False, env=env
)
@@ -1048,7 +1045,6 @@
async def get_service(
self, cluster_uuid: str, service_name: str, namespace: str
) -> object:
-
self.log.debug(
"get service, service_name: {}, namespace: {}, cluster_uuid: {}".format(
service_name, namespace, cluster_uuid
@@ -1131,7 +1127,6 @@
async def get_values_kdu(
self, kdu_instance: str, namespace: str, kubeconfig: str
) -> str:
-
self.log.debug("get kdu_instance values {}".format(kdu_instance))
return await self._exec_get_command(
@@ -1163,7 +1158,6 @@
)
async def help_kdu(self, kdu_model: str, repo_url: str = None) -> str:
-
self.log.debug(
"inspect kdu_model {} readme.md from repo: {}".format(kdu_model, repo_url)
)
@@ -1173,7 +1167,6 @@
)
async def synchronize_repos(self, cluster_uuid: str):
-
self.log.debug("synchronize repos for cluster helm-id: {}".format(cluster_uuid))
try:
db_repo_ids = self._get_helm_chart_repos_ids(cluster_uuid)
@@ -1546,7 +1539,6 @@
encode_utf8: bool = False,
env: dict = None,
) -> (str, int):
-
command = K8sHelmBaseConnector._remove_multiple_spaces(command)
self.log.debug(
"Executing async local command: {}, env: {}".format(command, env)
@@ -1621,7 +1613,6 @@
encode_utf8: bool = False,
env: dict = None,
):
-
command1 = K8sHelmBaseConnector._remove_multiple_spaces(command1)
command2 = K8sHelmBaseConnector._remove_multiple_spaces(command2)
command = "{} | {}".format(command1, command2)
@@ -1967,7 +1958,6 @@
# params for use in -f file
# returns values file option and filename (in order to delete it at the end)
def _params_to_file_option(self, cluster_id: str, params: dict) -> (str, str):
-
if params and len(params) > 0:
self._init_paths_env(cluster_name=cluster_id, create_if_not_exist=True)
diff --git a/n2vc/k8s_helm_conn.py b/n2vc/k8s_helm_conn.py
index 0ea8920..84879c8 100644
--- a/n2vc/k8s_helm_conn.py
+++ b/n2vc/k8s_helm_conn.py
@@ -164,7 +164,6 @@
return True
async def inspect_kdu(self, kdu_model: str, repo_url: str = None) -> str:
-
self.log.debug(
"inspect kdu_model {} from (optional) repo: {}".format(kdu_model, repo_url)
)
@@ -235,7 +234,6 @@
return paths, env
async def _get_services(self, cluster_id, kdu_instance, namespace, kubeconfig):
-
# init config, env
paths, env = self._init_paths_env(
cluster_name=cluster_id, create_if_not_exist=True
@@ -422,7 +420,6 @@
self.log.debug("namespace not found")
async def _instances_list(self, cluster_id):
-
# init paths, env
paths, env = self._init_paths_env(
cluster_name=cluster_id, create_if_not_exist=True
@@ -469,7 +466,6 @@
yaml_format: bool = False,
show_error_log: bool = False,
) -> Union[str, dict]:
-
self.log.debug(
"status of kdu_instance: {}, namespace: {} ".format(kdu_instance, namespace)
)
@@ -606,7 +602,6 @@
timeout,
kubeconfig,
) -> str:
-
timeout_str = ""
if timeout:
timeout_str = "--timeout {}".format(timeout)
diff --git a/n2vc/k8s_juju_conn.py b/n2vc/k8s_juju_conn.py
index eabc619..babe239 100644
--- a/n2vc/k8s_juju_conn.py
+++ b/n2vc/k8s_juju_conn.py
@@ -330,7 +330,14 @@
previous_workdir = "/app/storage"
self.log.debug("[install] deploying {}".format(bundle))
- await libjuju.deploy(bundle, model_name=namespace, wait=atomic, timeout=timeout)
+ instantiation_params = params.get("overlay") if params else None
+ await libjuju.deploy(
+ bundle,
+ model_name=namespace,
+ wait=atomic,
+ timeout=timeout,
+ instantiation_params=instantiation_params,
+ )
os.chdir(previous_workdir)
# update information in the database (first, the VCA status, and then, the namespace)
diff --git a/n2vc/libjuju.py b/n2vc/libjuju.py
index 053aaa8..55ca859 100644
--- a/n2vc/libjuju.py
+++ b/n2vc/libjuju.py
@@ -14,15 +14,20 @@
import asyncio
import logging
+import os
import typing
+import yaml
import time
import juju.errors
+from juju.bundle import BundleHandler
from juju.model import Model
from juju.machine import Machine
from juju.application import Application
from juju.unit import Unit
+from juju.url import URL
+from juju.version import DEFAULT_ARCHITECTURE
from juju.client._definitions import (
FullStatus,
QueryApplicationOffersResults,
@@ -549,27 +554,122 @@
return machine_id
async def deploy(
- self, uri: str, model_name: str, wait: bool = True, timeout: float = 3600
+ self,
+ uri: str,
+ model_name: str,
+ wait: bool = True,
+ timeout: float = 3600,
+ instantiation_params: dict = None,
):
"""
Deploy bundle or charm: Similar to the juju CLI command `juju deploy`
- :param: uri: Path or Charm Store uri in which the charm or bundle can be found
- :param: model_name: Model name
- :param: wait: Indicates whether to wait or not until all applications are active
- :param: timeout: Time in seconds to wait until all applications are active
+ :param uri: Path or Charm Store uri in which the charm or bundle can be found
+ :param model_name: Model name
+ :param wait: Indicates whether to wait or not until all applications are active
+ :param timeout: Time in seconds to wait until all applications are active
+ :param instantiation_params: To be applied as overlay bundle over primary bundle.
"""
controller = await self.get_controller()
model = await self.get_model(controller, model_name)
+ overlays = []
try:
- await model.deploy(uri, trust=True)
+ await self._validate_instantiation_params(uri, model, instantiation_params)
+ overlays = self._get_overlays(model_name, instantiation_params)
+ await model.deploy(uri, trust=True, overlays=overlays)
if wait:
await JujuModelWatcher.wait_for_model(model, timeout=timeout)
self.log.debug("All units active in model {}".format(model_name))
finally:
+ self._remove_overlay_file(overlays)
await self.disconnect_model(model)
await self.disconnect_controller(controller)
+ async def _validate_instantiation_params(
+ self, uri: str, model, instantiation_params: dict
+ ) -> None:
+ """Checks if all the applications in instantiation_params
+ exist ins the original bundle.
+
+ Raises:
+ JujuApplicationNotFound if there is an invalid app in
+ the instantiation params.
+ """
+ overlay_apps = self._get_apps_in_instantiation_params(instantiation_params)
+ if not overlay_apps:
+ return
+ original_apps = await self._get_apps_in_original_bundle(uri, model)
+ if not all(app in original_apps for app in overlay_apps):
+ raise JujuApplicationNotFound(
+ "Cannot find application {} in original bundle {}".format(
+ overlay_apps, original_apps
+ )
+ )
+
+ async def _get_apps_in_original_bundle(self, uri: str, model) -> set:
+ """Bundle is downloaded in BundleHandler.fetch_plan.
+ That method takes care of opening and exception handling.
+
+ Resolve method gets all the information regarding the channel,
+ track, revision, type, source.
+
+ Returns:
+ Set with the names of the applications in original bundle.
+ """
+ url = URL.parse(uri)
+ architecture = DEFAULT_ARCHITECTURE # only AMD64 is allowed
+ res = await model.deploy_types[str(url.schema)].resolve(
+ url, architecture, entity_url=uri
+ )
+ handler = BundleHandler(model, trusted=True, forced=False)
+ await handler.fetch_plan(url, res.origin)
+ return handler.applications
+
+ def _get_apps_in_instantiation_params(self, instantiation_params: dict) -> list:
+ """Extract applications key in instantiation params.
+
+ Returns:
+ List with the names of the applications in instantiation params.
+
+ Raises:
+ JujuError if applications key is not found.
+ """
+ if not instantiation_params:
+ return []
+ try:
+ return [key for key in instantiation_params.get("applications")]
+ except Exception as e:
+ raise JujuError("Invalid overlay format. {}".format(str(e)))
+
+ def _get_overlays(self, model_name: str, instantiation_params: dict) -> list:
+ """Creates a temporary overlay file which includes the instantiation params.
+ Only one overlay file is created.
+
+ Returns:
+ List with one overlay filename. Empty list if there are no instantiation params.
+ """
+ if not instantiation_params:
+ return []
+ file_name = model_name + "-overlay.yaml"
+ self._write_overlay_file(file_name, instantiation_params)
+ return [file_name]
+
+ def _write_overlay_file(self, file_name: str, instantiation_params: dict) -> None:
+ with open(file_name, "w") as file:
+ yaml.dump(instantiation_params, file)
+
+ def _remove_overlay_file(self, overlay: list) -> None:
+ """Overlay contains either one or zero file names."""
+ if not overlay:
+ return
+ try:
+ filename = overlay[0]
+ os.remove(filename)
+ except OSError as e:
+ self.log.warning(
+ "Overlay file {} could not be removed: {}".format(filename, e)
+ )
+
async def add_unit(
self,
application_name: str,
@@ -598,7 +698,6 @@
application = self._get_application(model, application_name)
if application is not None:
-
# Checks if the given machine id in the model,
# otherwise function raises an error
_machine, _series = self._get_machine_info(model, machine_id)
@@ -753,7 +852,6 @@
try:
if application_name not in model.applications:
-
if machine_id is not None:
machine, series = self._get_machine_info(model, machine_id)
@@ -893,7 +991,6 @@
return application
async def resolve_application(self, model_name: str, application_name: str):
-
controller = await self.get_controller()
model = await self.get_model(controller, model_name)
@@ -927,7 +1024,6 @@
await self.disconnect_controller(controller)
async def resolve(self, model_name: str):
-
controller = await self.get_controller()
model = await self.get_model(controller, model_name)
all_units_active = False
diff --git a/n2vc/loggable.py b/n2vc/loggable.py
index d588a1d..cbaa116 100644
--- a/n2vc/loggable.py
+++ b/n2vc/loggable.py
@@ -31,7 +31,6 @@
class Loggable:
def __init__(self, log, log_to_console: bool = False, prefix: str = ""):
-
self._last_log_time = None # used for time increment in logging
self._log_to_console = log_to_console
self._prefix = prefix
@@ -93,7 +92,6 @@
include_thread: bool = False,
include_coroutine: bool = True,
) -> str:
-
# time increment from last log
now = time.perf_counter()
if self._last_log_time is None:
diff --git a/n2vc/n2vc_conn.py b/n2vc/n2vc_conn.py
index d38bcad..5752da7 100644
--- a/n2vc/n2vc_conn.py
+++ b/n2vc/n2vc_conn.py
@@ -456,7 +456,6 @@
# .format(str(status.value), detailed_status, vca_status, entity_type))
try:
-
the_table = db_dict["collection"]
the_filter = db_dict["filter"]
the_path = db_dict["path"]
diff --git a/n2vc/n2vc_juju_conn.py b/n2vc/n2vc_juju_conn.py
index 2c2f6af..cbca396 100644
--- a/n2vc/n2vc_juju_conn.py
+++ b/n2vc/n2vc_juju_conn.py
@@ -1096,7 +1096,6 @@
)
try:
-
await libjuju.upgrade_charm(
application_name=application_name,
path=path,
@@ -1156,7 +1155,6 @@
return Libjuju(vca_connection, loop=self.loop, log=self.log, n2vc=self)
def _write_ee_id_db(self, db_dict: dict, ee_id: str):
-
# write ee_id to database: _admin.deployed.VCA.x
try:
the_table = db_dict["collection"]
diff --git a/n2vc/store.py b/n2vc/store.py
index cd6c6fb..e9586d7 100644
--- a/n2vc/store.py
+++ b/n2vc/store.py
@@ -14,15 +14,14 @@
import abc
import asyncio
-from base64 import b64decode
-import re
import typing
-from Crypto.Cipher import AES
from motor.motor_asyncio import AsyncIOMotorClient
from n2vc.config import EnvironConfig
from n2vc.vca.connection_data import ConnectionData
from osm_common.dbmongo import DbMongo, DbException
+from osm_common.dbbase import Encryption
+
DB_NAME = "osm"
@@ -195,6 +194,13 @@
self.loop = loop or asyncio.get_event_loop()
self._secret_key = None
self._config = EnvironConfig(prefixes=["OSMLCM_", "OSMMON_"])
+ self.encryption = Encryption(
+ uri=uri,
+ config=self._config,
+ encoding_type="utf-8",
+ loop=self.loop,
+ logger_name="db",
+ )
@property
def _database(self):
@@ -223,7 +229,7 @@
data = await self._vca_collection.find_one({"_id": vca_id})
if not data:
raise Exception("vca with id {} not found".format(vca_id))
- await self.decrypt_fields(
+ await self.encryption.decrypt_fields(
data,
["secret", "cacert"],
schema_version=data["schema_version"],
@@ -294,114 +300,3 @@
async def _get_juju_info(self):
"""Get Juju information (the default VCA) from the admin collection"""
return await self._admin_collection.find_one({"_id": "juju"})
-
- # DECRYPT METHODS
- async def decrypt_fields(
- self,
- item: dict,
- fields: typing.List[str],
- schema_version: str = None,
- salt: str = None,
- ):
- """
- Decrypt fields
-
- Decrypt fields from a dictionary. Follows the same logic as in osm_common.
-
- :param: item: Dictionary with the keys to be decrypted
- :param: fields: List of keys to decrypt
- :param: schema version: Schema version. (i.e. 1.11)
- :param: salt: Salt for the decryption
- """
- flags = re.I
-
- async def process(_item):
- if isinstance(_item, list):
- for elem in _item:
- await process(elem)
- elif isinstance(_item, dict):
- for key, val in _item.items():
- if isinstance(val, str):
- if any(re.search(f, key, flags) for f in fields):
- _item[key] = await self.decrypt(val, schema_version, salt)
- else:
- await process(val)
-
- await process(item)
-
- async def decrypt(self, value, schema_version=None, salt=None):
- """
- Decrypt an encrypted value
- :param value: value to be decrypted. It is a base64 string
- :param schema_version: used for known encryption method used. If None or '1.0' no encryption has been done.
- If '1.1' symmetric AES encryption has been done
- :param salt: optional salt to be used
- :return: Plain content of value
- """
- await self.get_secret_key()
- if not self.secret_key or not schema_version or schema_version == "1.0":
- return value
- else:
- secret_key = self._join_secret_key(salt)
- encrypted_msg = b64decode(value)
- cipher = AES.new(secret_key)
- decrypted_msg = cipher.decrypt(encrypted_msg)
- try:
- unpadded_private_msg = decrypted_msg.decode().rstrip("\0")
- except UnicodeDecodeError:
- raise DbException(
- "Cannot decrypt information. Are you using same COMMONKEY in all OSM components?",
- http_code=500,
- )
- return unpadded_private_msg
-
- def _join_secret_key(self, update_key: typing.Any) -> bytes:
- """
- Join key with secret key
-
- :param: update_key: str or bytes with the to update
-
- :return: Joined key
- """
- return self._join_keys(update_key, self.secret_key)
-
- def _join_keys(self, key: typing.Any, secret_key: bytes) -> bytes:
- """
- Join key with secret_key
-
- :param: key: str or bytesof the key to update
- :param: secret_key: bytes of the secret key
-
- :return: Joined key
- """
- if isinstance(key, str):
- update_key_bytes = key.encode()
- else:
- update_key_bytes = key
- new_secret_key = bytearray(secret_key) if secret_key else bytearray(32)
- for i, b in enumerate(update_key_bytes):
- new_secret_key[i % 32] ^= b
- return bytes(new_secret_key)
-
- @property
- def secret_key(self):
- return self._secret_key
-
- async def get_secret_key(self):
- """
- Get secret key using the database key and the serial key in the DB
- The key is populated in the property self.secret_key
- """
- if self.secret_key:
- return
- secret_key = None
- if self.database_key:
- secret_key = self._join_keys(self.database_key, None)
- version_data = await self._admin_collection.find_one({"_id": "version"})
- if version_data and version_data.get("serial"):
- secret_key = self._join_keys(b64decode(version_data["serial"]), secret_key)
- self._secret_key = secret_key
-
- @property
- def database_key(self):
- return self._config["database_commonkey"]
diff --git a/n2vc/tests/unit/test_juju_watcher.py b/n2vc/tests/unit/test_juju_watcher.py
index 07b2127..b9e9e36 100644
--- a/n2vc/tests/unit/test_juju_watcher.py
+++ b/n2vc/tests/unit/test_juju_watcher.py
@@ -181,7 +181,6 @@
os.path.join(os.path.dirname(__file__), "testdata", filename),
"r",
) as self.upgrade_file:
-
all_changes = AsyncMock()
all_changes.Next.side_effect = self._fetch_next_delta
mock_all_watcher.return_value = all_changes
diff --git a/n2vc/tests/unit/test_k8s_helm3_conn.py b/n2vc/tests/unit/test_k8s_helm3_conn.py
index 33add05..a2e75e1 100644
--- a/n2vc/tests/unit/test_k8s_helm3_conn.py
+++ b/n2vc/tests/unit/test_k8s_helm3_conn.py
@@ -134,7 +134,6 @@
@asynctest.fail_on(active_handles=True)
async def test_repo_list(self):
-
self.helm_conn._local_async_exec = asynctest.CoroutineMock(return_value=("", 0))
await self.helm_conn.repo_list(self.cluster_uuid)
@@ -150,7 +149,6 @@
@asynctest.fail_on(active_handles=True)
async def test_repo_remove(self):
-
self.helm_conn._local_async_exec = asynctest.CoroutineMock(return_value=("", 0))
repo_name = "bitnami"
await self.helm_conn.repo_remove(self.cluster_uuid, repo_name)
diff --git a/n2vc/tests/unit/test_k8s_juju_conn.py b/n2vc/tests/unit/test_k8s_juju_conn.py
index ead7b53..1cc0809 100644
--- a/n2vc/tests/unit/test_k8s_juju_conn.py
+++ b/n2vc/tests/unit/test_k8s_juju_conn.py
@@ -227,6 +227,7 @@
kdu_name=self.kdu_name,
db_dict=self.db_dict,
timeout=1800,
+ params=None,
)
)
self.assertEqual(mock_chdir.call_count, 2)
@@ -236,6 +237,7 @@
model_name=self.default_namespace,
wait=True,
timeout=1800,
+ instantiation_params=None,
)
def test_success_cs(self, mock_chdir):
@@ -248,14 +250,20 @@
kdu_name=self.kdu_name,
db_dict=self.db_dict,
timeout=1800,
+ params={},
)
)
self.k8s_juju_conn.libjuju.add_model.assert_called_once()
self.k8s_juju_conn.libjuju.deploy.assert_called_once_with(
- self.cs_bundle, model_name=self.default_namespace, wait=True, timeout=1800
+ self.cs_bundle,
+ model_name=self.default_namespace,
+ wait=True,
+ timeout=1800,
+ instantiation_params=None,
)
def test_success_http(self, mock_chdir):
+ params = {"overlay": {"applications": {"squid": {"scale": 2}}}}
self.loop.run_until_complete(
self.k8s_juju_conn.install(
self.cluster_uuid,
@@ -265,14 +273,20 @@
kdu_name=self.kdu_name,
db_dict=self.db_dict,
timeout=1800,
+ params=params,
)
)
self.k8s_juju_conn.libjuju.add_model.assert_called_once()
self.k8s_juju_conn.libjuju.deploy.assert_called_once_with(
- self.http_bundle, model_name=self.default_namespace, wait=True, timeout=1800
+ self.http_bundle,
+ model_name=self.default_namespace,
+ wait=True,
+ timeout=1800,
+ instantiation_params=params.get("overlay"),
)
def test_success_not_kdu_name(self, mock_chdir):
+ params = {"some_key": {"applications": {"squid": {"scale": 2}}}}
self.loop.run_until_complete(
self.k8s_juju_conn.install(
self.cluster_uuid,
@@ -281,11 +295,16 @@
atomic=True,
db_dict=self.db_dict,
timeout=1800,
+ params=params,
)
)
self.k8s_juju_conn.libjuju.add_model.assert_called_once()
self.k8s_juju_conn.libjuju.deploy.assert_called_once_with(
- self.cs_bundle, model_name=self.default_namespace, wait=True, timeout=1800
+ self.cs_bundle,
+ model_name=self.default_namespace,
+ wait=True,
+ timeout=1800,
+ instantiation_params=None,
)
def test_missing_db_dict(self, mock_chdir):
@@ -321,7 +340,11 @@
)
self.k8s_juju_conn.libjuju.add_model.assert_called_once()
self.k8s_juju_conn.libjuju.deploy.assert_called_once_with(
- self.cs_bundle, model_name=self.default_namespace, wait=True, timeout=1800
+ self.cs_bundle,
+ model_name=self.default_namespace,
+ wait=True,
+ timeout=1800,
+ instantiation_params=None,
)
def test_missing_bundle(self, mock_chdir):
@@ -360,6 +383,7 @@
model_name=self.default_namespace,
wait=True,
timeout=1800,
+ instantiation_params=None,
)
diff --git a/n2vc/tests/unit/test_libjuju.py b/n2vc/tests/unit/test_libjuju.py
index 1bbe556..9f21bc6 100644
--- a/n2vc/tests/unit/test_libjuju.py
+++ b/n2vc/tests/unit/test_libjuju.py
@@ -496,70 +496,408 @@
# TODO test provision machine
+@asynctest.mock.patch("os.remove")
+@asynctest.mock.patch("n2vc.libjuju.yaml.dump")
+@asynctest.mock.patch("builtins.open", create=True)
@asynctest.mock.patch("n2vc.libjuju.Libjuju.get_controller")
@asynctest.mock.patch("n2vc.libjuju.Libjuju.get_model")
@asynctest.mock.patch("n2vc.libjuju.Libjuju.disconnect_model")
@asynctest.mock.patch("n2vc.libjuju.Libjuju.disconnect_controller")
@asynctest.mock.patch("n2vc.juju_watcher.JujuModelWatcher.wait_for_model")
@asynctest.mock.patch("juju.model.Model.deploy")
+@asynctest.mock.patch("juju.model.CharmhubDeployType.resolve")
+@asynctest.mock.patch("n2vc.libjuju.BundleHandler")
+@asynctest.mock.patch("juju.url.URL.parse")
class DeployTest(LibjujuTestCase):
def setUp(self):
super(DeployTest, self).setUp()
+ self.instantiation_params = {"applications": {"squid": {"scale": 2}}}
+ self.architecture = "amd64"
+ self.uri = "cs:osm"
+ self.url = AsyncMock()
+ self.url.schema = juju.url.Schema.CHARM_HUB
+ self.bundle_instance = None
+
+ def setup_bundle_download_mocks(
+ self, mock_url_parse, mock_bundle, mock_resolve, mock_get_model
+ ):
+ mock_url_parse.return_value = self.url
+ mock_bundle.return_value = AsyncMock()
+ mock_resolve.return_value = AsyncMock()
+ mock_resolve.origin = AsyncMock()
+ mock_get_model.return_value = juju.model.Model()
+ self.bundle_instance = mock_bundle.return_value
+ self.bundle_instance.applications = {"squid"}
+
+ def assert_overlay_file_is_written(self, filename, mocked_file, mock_yaml, mock_os):
+ mocked_file.assert_called_once_with(filename, "w")
+ mock_yaml.assert_called_once_with(
+ self.instantiation_params, mocked_file.return_value.__enter__.return_value
+ )
+ mock_os.assert_called_once_with(filename)
+
+ def assert_overlay_file_is_not_written(self, mocked_file, mock_yaml, mock_os):
+ mocked_file.assert_not_called()
+ mock_yaml.assert_not_called()
+ mock_os.assert_not_called()
+
+ def assert_bundle_is_downloaded(self, mock_resolve, mock_url_parse):
+ mock_resolve.assert_called_once_with(
+ self.url, self.architecture, entity_url=self.uri
+ )
+ mock_url_parse.assert_called_once_with(self.uri)
+ self.bundle_instance.fetch_plan.assert_called_once_with(
+ self.url, mock_resolve.origin
+ )
+
+ def assert_bundle_is_not_downloaded(self, mock_resolve, mock_url_parse):
+ mock_resolve.assert_not_called()
+ mock_url_parse.assert_not_called()
+ self.bundle_instance.fetch_plan.assert_not_called()
def test_deploy(
self,
+ mock_url_parse,
+ mock_bundle,
+ mock_resolve,
mock_deploy,
mock_wait_for_model,
mock_disconnect_controller,
mock_disconnect_model,
mock_get_model,
mock_get_controller,
+ mocked_file,
+ mock_yaml,
+ mock_os,
):
- mock_get_model.return_value = juju.model.Model()
- self.loop.run_until_complete(
- self.libjuju.deploy("cs:osm", "model", wait=True, timeout=0)
+ self.setup_bundle_download_mocks(
+ mock_url_parse, mock_bundle, mock_resolve, mock_get_model
)
- mock_deploy.assert_called_once()
+ model_name = "model1"
+
+ self.loop.run_until_complete(
+ self.libjuju.deploy(
+ "cs:osm",
+ model_name,
+ wait=True,
+ timeout=0,
+ instantiation_params=None,
+ )
+ )
+ self.assert_overlay_file_is_not_written(mocked_file, mock_yaml, mock_os)
+ self.assert_bundle_is_not_downloaded(mock_resolve, mock_url_parse)
+ mock_deploy.assert_called_once_with("cs:osm", trust=True, overlays=[])
mock_wait_for_model.assert_called_once()
mock_disconnect_controller.assert_called_once()
mock_disconnect_model.assert_called_once()
def test_deploy_no_wait(
self,
+ mock_url_parse,
+ mock_bundle,
+ mock_resolve,
mock_deploy,
mock_wait_for_model,
mock_disconnect_controller,
mock_disconnect_model,
mock_get_model,
mock_get_controller,
+ mocked_file,
+ mock_yaml,
+ mock_os,
):
- mock_get_model.return_value = juju.model.Model()
- self.loop.run_until_complete(
- self.libjuju.deploy("cs:osm", "model", wait=False, timeout=0)
+ self.setup_bundle_download_mocks(
+ mock_url_parse, mock_bundle, mock_resolve, mock_get_model
)
- mock_deploy.assert_called_once()
+ self.loop.run_until_complete(
+ self.libjuju.deploy(
+ "cs:osm", "model", wait=False, timeout=0, instantiation_params={}
+ )
+ )
+ self.assert_overlay_file_is_not_written(mocked_file, mock_yaml, mock_os)
+ self.assert_bundle_is_not_downloaded(mock_resolve, mock_url_parse)
+ mock_deploy.assert_called_once_with("cs:osm", trust=True, overlays=[])
mock_wait_for_model.assert_not_called()
mock_disconnect_controller.assert_called_once()
mock_disconnect_model.assert_called_once()
def test_deploy_exception(
self,
+ mock_url_parse,
+ mock_bundle,
+ mock_resolve,
mock_deploy,
mock_wait_for_model,
mock_disconnect_controller,
mock_disconnect_model,
mock_get_model,
mock_get_controller,
+ mocked_file,
+ mock_yaml,
+ mock_os,
):
+ self.setup_bundle_download_mocks(
+ mock_url_parse, mock_bundle, mock_resolve, mock_get_model
+ )
mock_deploy.side_effect = Exception()
- mock_get_model.return_value = juju.model.Model()
with self.assertRaises(Exception):
self.loop.run_until_complete(self.libjuju.deploy("cs:osm", "model"))
+ self.assert_overlay_file_is_not_written(mocked_file, mock_yaml, mock_os)
+ self.assert_bundle_is_not_downloaded(mock_resolve, mock_url_parse)
mock_deploy.assert_called_once()
mock_wait_for_model.assert_not_called()
mock_disconnect_controller.assert_called_once()
mock_disconnect_model.assert_called_once()
+ def test_deploy_with_instantiation_params(
+ self,
+ mock_url_parse,
+ mock_bundle,
+ mock_resolve,
+ mock_deploy,
+ mock_wait_for_model,
+ mock_disconnect_controller,
+ mock_disconnect_model,
+ mock_get_model,
+ mock_get_controller,
+ mocked_file,
+ mock_yaml,
+ mock_os,
+ ):
+ self.setup_bundle_download_mocks(
+ mock_url_parse, mock_bundle, mock_resolve, mock_get_model
+ )
+ model_name = "model1"
+ expected_filename = "{}-overlay.yaml".format(model_name)
+ self.loop.run_until_complete(
+ self.libjuju.deploy(
+ self.uri,
+ model_name,
+ wait=True,
+ timeout=0,
+ instantiation_params=self.instantiation_params,
+ )
+ )
+ self.assert_overlay_file_is_written(
+ expected_filename, mocked_file, mock_yaml, mock_os
+ )
+ self.assert_bundle_is_downloaded(mock_resolve, mock_url_parse)
+ mock_deploy.assert_called_once_with(
+ self.uri, trust=True, overlays=[expected_filename]
+ )
+ mock_wait_for_model.assert_called_once()
+ mock_disconnect_controller.assert_called_once()
+ mock_disconnect_model.assert_called_once()
+
+ def test_deploy_with_instantiation_params_no_applications(
+ self,
+ mock_url_parse,
+ mock_bundle,
+ mock_resolve,
+ mock_deploy,
+ mock_wait_for_model,
+ mock_disconnect_controller,
+ mock_disconnect_model,
+ mock_get_model,
+ mock_get_controller,
+ mocked_file,
+ mock_yaml,
+ mock_os,
+ ):
+ self.instantiation_params = {"applications": {}}
+ self.setup_bundle_download_mocks(
+ mock_url_parse, mock_bundle, mock_resolve, mock_get_model
+ )
+
+ model_name = "model3"
+ expected_filename = "{}-overlay.yaml".format(model_name)
+ self.loop.run_until_complete(
+ self.libjuju.deploy(
+ self.uri,
+ model_name,
+ wait=False,
+ timeout=0,
+ instantiation_params=self.instantiation_params,
+ )
+ )
+
+ self.assert_overlay_file_is_written(
+ expected_filename, mocked_file, mock_yaml, mock_os
+ )
+ self.assert_bundle_is_not_downloaded(mock_resolve, mock_url_parse)
+ mock_deploy.assert_called_once_with(
+ self.uri, trust=True, overlays=[expected_filename]
+ )
+ mock_wait_for_model.assert_not_called()
+ mock_disconnect_controller.assert_called_once()
+ mock_disconnect_model.assert_called_once()
+
+ def test_deploy_with_instantiation_params_applications_not_found(
+ self,
+ mock_url_parse,
+ mock_bundle,
+ mock_resolve,
+ mock_deploy,
+ mock_wait_for_model,
+ mock_disconnect_controller,
+ mock_disconnect_model,
+ mock_get_model,
+ mock_get_controller,
+ mocked_file,
+ mock_yaml,
+ mock_os,
+ ):
+ self.instantiation_params = {"some_key": {"squid": {"scale": 2}}}
+ self.setup_bundle_download_mocks(
+ mock_url_parse, mock_bundle, mock_resolve, mock_get_model
+ )
+
+ with self.assertRaises(JujuError):
+ self.loop.run_until_complete(
+ self.libjuju.deploy(
+ self.uri,
+ "model1",
+ wait=True,
+ timeout=0,
+ instantiation_params=self.instantiation_params,
+ )
+ )
+
+ self.assert_overlay_file_is_not_written(mocked_file, mock_yaml, mock_os)
+ self.assert_bundle_is_not_downloaded(mock_resolve, mock_url_parse)
+ mock_deploy.assert_not_called()
+ mock_wait_for_model.assert_not_called()
+ mock_disconnect_controller.assert_called_once()
+ mock_disconnect_model.assert_called_once()
+
+ def test_deploy_overlay_contains_invalid_app(
+ self,
+ mock_url_parse,
+ mock_bundle,
+ mock_resolve,
+ mock_deploy,
+ mock_wait_for_model,
+ mock_disconnect_controller,
+ mock_disconnect_model,
+ mock_get_model,
+ mock_get_controller,
+ mocked_file,
+ mock_yaml,
+ mock_os,
+ ):
+ self.setup_bundle_download_mocks(
+ mock_url_parse, mock_bundle, mock_resolve, mock_get_model
+ )
+ self.bundle_instance.applications = {"new_app"}
+
+ with self.assertRaises(JujuApplicationNotFound) as error:
+ self.loop.run_until_complete(
+ self.libjuju.deploy(
+ self.uri,
+ "model2",
+ wait=True,
+ timeout=0,
+ instantiation_params=self.instantiation_params,
+ )
+ )
+ error_msg = "Cannot find application ['squid'] in original bundle {'new_app'}"
+ self.assertEqual(str(error.exception), error_msg)
+
+ self.assert_overlay_file_is_not_written(mocked_file, mock_yaml, mock_os)
+ self.assert_bundle_is_downloaded(mock_resolve, mock_url_parse)
+ mock_deploy.assert_not_called()
+ mock_wait_for_model.assert_not_called()
+ mock_disconnect_controller.assert_called_once()
+ mock_disconnect_model.assert_called_once()
+
+ def test_deploy_exception_with_instantiation_params(
+ self,
+ mock_url_parse,
+ mock_bundle,
+ mock_resolve,
+ mock_deploy,
+ mock_wait_for_model,
+ mock_disconnect_controller,
+ mock_disconnect_model,
+ mock_get_model,
+ mock_get_controller,
+ mocked_file,
+ mock_yaml,
+ mock_os,
+ ):
+ self.setup_bundle_download_mocks(
+ mock_url_parse, mock_bundle, mock_resolve, mock_get_model
+ )
+
+ mock_deploy.side_effect = Exception()
+ model_name = "model2"
+ expected_filename = "{}-overlay.yaml".format(model_name)
+ with self.assertRaises(Exception):
+ self.loop.run_until_complete(
+ self.libjuju.deploy(
+ self.uri,
+ model_name,
+ instantiation_params=self.instantiation_params,
+ )
+ )
+
+ self.assert_overlay_file_is_written(
+ expected_filename, mocked_file, mock_yaml, mock_os
+ )
+ self.assert_bundle_is_downloaded(mock_resolve, mock_url_parse)
+ mock_deploy.assert_called_once_with(
+ self.uri, trust=True, overlays=[expected_filename]
+ )
+ mock_wait_for_model.assert_not_called()
+ mock_disconnect_controller.assert_called_once()
+ mock_disconnect_model.assert_called_once()
+
+ @asynctest.mock.patch("logging.Logger.warning")
+ def test_deploy_exception_when_deleting_file_is_not_propagated(
+ self,
+ mock_warning,
+ mock_url_parse,
+ mock_bundle,
+ mock_resolve,
+ mock_deploy,
+ mock_wait_for_model,
+ mock_disconnect_controller,
+ mock_disconnect_model,
+ mock_get_model,
+ mock_get_controller,
+ mocked_file,
+ mock_yaml,
+ mock_os,
+ ):
+ self.setup_bundle_download_mocks(
+ mock_url_parse, mock_bundle, mock_resolve, mock_get_model
+ )
+
+ mock_os.side_effect = OSError("Error")
+ model_name = "model2"
+ expected_filename = "{}-overlay.yaml".format(model_name)
+ self.loop.run_until_complete(
+ self.libjuju.deploy(
+ self.uri,
+ model_name,
+ instantiation_params=self.instantiation_params,
+ )
+ )
+
+ self.assert_overlay_file_is_written(
+ expected_filename, mocked_file, mock_yaml, mock_os
+ )
+ self.assert_bundle_is_downloaded(mock_resolve, mock_url_parse)
+ mock_deploy.assert_called_once_with(
+ self.uri, trust=True, overlays=[expected_filename]
+ )
+ mock_wait_for_model.assert_called_once()
+ mock_disconnect_controller.assert_called_once()
+ mock_disconnect_model.assert_called_once()
+ mock_warning.assert_called_with(
+ "Overlay file {} could not be removed: Error".format(expected_filename)
+ )
+
@asynctest.mock.patch("n2vc.libjuju.Libjuju.get_controller")
@asynctest.mock.patch("n2vc.libjuju.Libjuju.get_model")
@@ -778,7 +1116,6 @@
mock_get_model,
mock_get_controller,
):
-
mock_get_model.return_value = juju.model.Model()
mock__get_application.return_value = FakeApplication()
output = None
@@ -1295,7 +1632,6 @@
mock_get_model,
mock_get_controller,
):
-
mock_get_application.return_value = FakeApplication()
self.loop.run_until_complete(
@@ -1317,7 +1653,6 @@
mock_get_model,
mock_get_controller,
):
-
mock_get_application.side_effect = Exception()
with self.assertRaises(Exception):
@@ -1339,7 +1674,6 @@
mock_get_model,
mock_get_controller,
):
-
result = {"error": "not found", "response": "response", "request-id": 1}
mock_get_controller.side_effect = JujuAPIError(result)
@@ -1364,7 +1698,6 @@
mock_get_model,
mock_get_controller,
):
-
result = {"error": "not found", "response": "response", "request-id": 1}
mock_get_model.side_effect = JujuAPIError(result)
diff --git a/n2vc/tests/unit/test_n2vc_juju_conn.py b/n2vc/tests/unit/test_n2vc_juju_conn.py
index df7be50..deb98ce 100644
--- a/n2vc/tests/unit/test_n2vc_juju_conn.py
+++ b/n2vc/tests/unit/test_n2vc_juju_conn.py
@@ -397,7 +397,6 @@
class GenerateApplicationNameTest(N2VCJujuConnTestCase):
-
vnf_id = "dbfbd751-3de4-4e68-bd40-ec5ae0a53898"
def setUp(self):
diff --git a/n2vc/tests/unit/test_store.py b/n2vc/tests/unit/test_store.py
index c7aa2d6..abc5e13 100644
--- a/n2vc/tests/unit/test_store.py
+++ b/n2vc/tests/unit/test_store.py
@@ -138,12 +138,20 @@
self.vca_collection.find_one = AsyncMock()
self.vca_collection.insert_one = AsyncMock()
self.vca_collection.replace_one = AsyncMock()
+ self.encryption = Mock()
+ self.encryption.admin_collection = Mock()
+ self.encryption.admin_collection.find_one = AsyncMock()
self.admin_collection = Mock()
self.admin_collection.find_one = AsyncMock()
self.admin_collection.insert_one = AsyncMock()
self.admin_collection.replace_one = AsyncMock()
self.vim_accounts_collection = Mock()
self.vim_accounts_collection.find_one = AsyncMock()
+ self.store.encryption._client = {
+ "osm": {
+ "admin": self.encryption.admin_collection,
+ }
+ }
self.store._client = {
"osm": {
"vca": self.vca_collection,
@@ -152,7 +160,7 @@
}
}
self.store._config = {"database_commonkey": "osm"}
- # self.store.decrypt_fields = Mock()
+ self.store.encryption._config = {"database_commonkey": "osm"}
self.loop = asyncio.get_event_loop()
@patch("n2vc.vca.connection_data.base64_to_cacert")
@@ -174,7 +182,7 @@
db_find_one = conn_data.copy()
db_find_one.update({"schema_version": "1.1", "_id": "id"})
self.vca_collection.find_one.return_value = db_find_one
- self.store.decrypt_fields = AsyncMock()
+ self.store.encryption.decrypt_fields = AsyncMock()
connection_data = self.loop.run_until_complete(
self.store.get_vca_connection_data("vca_id")
)
@@ -207,7 +215,6 @@
encrypted_secret = "kI46kRJh828ExSNpr16OG/q5a5/qTsE0bsHrv/W/2/g="
cacert = "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUQ4ekNDQWx1Z0F3SUJBZ0lVRWlzTTBoQWxiYzQ0Z1ZhZWh6bS80ZUsyNnRZd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0lURU5NQXNHQTFVRUNoTUVTblZxZFRFUU1BNEdBMVVFQXhNSGFuVnFkUzFqWVRBZUZ3MHlNVEEwTWpNeApNRFV3TXpSYUZ3MHpNVEEwTWpNeE1EVTFNelJhTUNFeERUQUxCZ05WQkFvVEJFcDFhblV4RURBT0JnTlZCQU1UCkIycDFhblV0WTJFd2dnR2lNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUJqd0F3Z2dHS0FvSUJnUUNhTmFvNGZab2gKTDJWYThtdy9LdCs3RG9tMHBYTlIvbEUxSHJyVmZvbmZqZFVQV01zSHpTSjJZZXlXcUNSd3BiaHlLaE82N1c1dgpUY2RsV3Y3WGFLTGtsdVkraDBZY3BQT3BFTmZZYmxrNGk0QkV1L0wzYVY5MFFkUFFrMG94S01CS2R5QlBNZVNNCkJmS2pPWXdyOGgzM0ZWUWhmVkJnMXVGZ2tGaDdTamNuNHczUFdvc1BCMjNiVHBCbGR3VE9zemN4Qm9TaDNSVTkKTzZjb3lQdDdEN0drOCtHRlA3RGRUQTdoV1RkaUM4cDBkeHp2RUNmY0psMXNFeFEyZVprS1QvVzZyelNtVDhUTApCM0ErM1FDRDhEOEVsQU1IVy9zS25SeHphYU8welpNVmVlQnRnNlFGZ1F3M0dJMGo2ZTY0K2w3VExoOW8wSkZVCjdpUitPY01xUzVDY0NROGpWV3JPSk9Xc2dEbDZ4T2FFREczYnR5SVJHY29jbVcvcEZFQjNZd1A2S1BRTUIrNXkKWDdnZExEWmFGRFBVakZmblhkMnhHdUZlMnpRTDNVbXZEUkZuUlBBaW02QlpQbWo1OFh2emFhZXROa3lyaUZLZwp4Z0Z1dVpTcDUwV2JWdjF0MkdzOTMrRE53NlhFZHRFYnlWWUNBa28xTTY0MkozczFnN3NoQnRFQ0F3RUFBYU1qCk1DRXdEZ1lEVlIwUEFRSC9CQVFEQWdLa01BOEdBMVVkRXdFQi93UUZNQU1CQWY4d0RRWUpLb1pJaHZjTkFRRUwKQlFBRGdnR0JBRXYxM2o2ZGFVbDBqeERPSnNTV1ZJZS9JdXNXVTRpN2ZXSWlqMHAwRU1GNS9LTE8yemRndTR5SQoreVd2T3N5aVFPanEzMlRYVlo2bTRDSnBkR1dGVE5HK2lLdXVOU3M0N3g3Q3dmVUNBWm5VVzhyamd3ZWJyS3BmCkJMNEVQcTZTcW0rSmltN0VPankyMWJkY2cyUXdZb3A3eUhvaHcveWEvL0l6RTMzVzZxNHlJeEFvNDBVYUhPTEMKTGtGbnNVYitjcFZBeFlPZGp6bjFzNWhnclpuWXlETEl3WmtIdFdEWm94alUzeC9jdnZzZ1FzLytzTWYrRFU4RgpZMkJKRHJjQ1VQM2xzclc0QVpFMFplZkEwOTlncFEvb3dSN0REYnMwSjZUeFM4NGt6Tldjc1FuWnRraXZheHJNClkyVHNnaWVndFExVFdGRWpxLy9sUFV4emJCdmpnd1FBZm5CQXZGeVNKejdTa0VuVm5rUXJGaUlUQVArTHljQVIKMlg4UFI2ZGI1bEt0SitBSENDM3kvZmNQS2k0ZzNTL3djeXRRdmdvOXJ6ODRFalp5YUNTaGJXNG9jNzNrMS9RcAowQWtHRDU0ZGVDWWVPYVJNbW96c0w3ZzdxWkpFekhtODdOcVBYSy9EZFoweWNxaVFhMXY2T3QxNjdXNUlzMUkzCjBWb0IzUzloSlE9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCgo=" # noqa: E501
encrypted_cacert = "QeV4evTLXzcKwZZvmXQ/OvSHToXH3ISwfoLmU+Q9JlQWAFUHSJ9IhO0ewaQrJmx3NkfFb7NCxsQhh+wE57zDW4rWgn4w/SWkzvwSi1h2xYOO3ECEHzzVqgUm15Sk0xaj1Fv9Ed4hipf6PRijeOZ7A1G9zekr1w9WIvebMyJZrK+f6QJ8AP20NUZqG/3k+MeJr3kjrl+8uwU5aPOrHAexSQGAqSKTkWzW7glmlyMWTjwkuSgNVgFg0ctdWTZ5JnNwxXbpjwIKrC4E4sIHcxko2vsTeLF8pZFPk+3QUZIg8BrgtyM3lJC2kO1g3emPQhCIk3VDb5GBgssc/GyFyRXNS651d5BNgcABOKZ4Rv/gGnprB35zP7TKJKkST44XJTEBiugWMkSZg+T9H98/l3eE34O6thfTZXgIyG+ZM6uGlW2XOce0OoEIyJiEL039WJe3izjbD3b9sCCdgQc0MgS+hTaayJI6oCUWPsJLmRji19jLi/wjOsU5gPItCFWw3pBye/A4Zf8Hxm+hShvqBnk8R2yx1fPTiyw/Zx4Jn8m49XQJyjDSZnhIck0PVHR9xWzKCr++PKljLMLdkdFxVRVPFQk/FBbesqofjSXsq9DASY6ACTL3Jmignx2OXD6ac4SlBqCTjV2dIM0yEgZF7zwMNCtppRdXTV8S29JP4W2mfaiqXCUSRTggv8EYU+9diCE+8sPB6HjuLrsfiySbFlYR2m4ysDGXjsVx5CDAf0Nh4IRfcSceYnnBGIQ2sfgGcJFOZoJqr/QeE2NWz6jlWYbWT7MjS/0decpKxP7L88qrR+F48WXQvfsvjWgKjlMKw7lHmFF8FeY836VWWICTRZx+y6IlY1Ys2ML4kySF27Hal4OPhOOoBljMNMVwUEvBulOnKUWw4BGz8eGCl8Hw6tlyJdC7kcBj/aCyNCR/NnuDk4Wck6e//He8L6mS83OJi/hIFc8vYQxnCJMXj9Ou7wr5hxtBnvxXzZM3kFHxCDO24Cd5UyBV9GD8TiQJfBGAy7a2BCBMb5ESVX8NOkyyv2hXMHOjpnKhUM9yP3Ke4CBImO7mCKJNHdFVtAmuyVKJ+jT6ooAAArkX2xwEAvBEpvGNmW2jgs6wxSuKY0h5aUm0rA4v/s8fqSZhzdInB54sMldyAnt9G+9e+g933DfyA/tkc56Ed0vZ/XEvTkThVHyUbfYR/Gjsoab1RpnDBi4aZ2E7iceoBshy+L6NXdL0jlWEs4ZubiWlbVNWlN/MqJcjV/quLU7q4HtkG0MDEFm6To3o48x7xpv8otih6YBduNqBFnwQ6Qz9rM2chFgOR4IgNSZKPxHO0AGCi1gnK/CeCvrSfWYAMn+2rmw0hMZybqKMStG28+rXsKDdqmy6vAwL/+dJwkAW+ix68rWRXpeqHlWidu4SkIBELuwEkFIC/GJU/DRvcN2GG9uP1m+VFifCIS2UdiO4OVrP6PVoW1O+jBJvFH3K1YT7CRqevb9OzjS9fO1wjkOff0W8zZyJK9Mp25aynpf0k3oMpZDpjnlOsFXFUb3N6SvXD1Yi95szIlmsr5yRYaeGUJH7/SAmMr8R6RqsCR0ANptL2dtRoGPi/qcDQE15vnjJ+QMYCg9KbCdV+Qq5di93XAjmwPj6tKZv0aXQuaTZgYR7bdLmAnJaFLbHWcQG1k6F/vdKNEb7llLsoAD9KuKXPZT/LErIyKcI0RZySy9yvhTZb4jQWn17b83yfvqfd5/2NpcyaY4gNERhDRJHw7VhoS5Leai5ZnFaO3C1vU9tIJ85XgCUASTsBLoQWVCKPSQZGxzF7PVLnHui3YA5OsOQpVqAPtgGZ12tP9XkEKj+u2/Atj2bgYrqBF7zUL64X/AQpwr/UElWDhJLSD/KStVeDOUx3AwAVVi9eTUJr6NiNMutCE1sqUf9XVIddgZ/BaG5t3NV2L+T+11QzAl+Xrh8wH/XeUCTmnU3NGkvCz/9Y7PMS+qQL7T7WeGdYmEhb5s/5p/yjSYeqybr5sANOHs83OdeSXbop9cLWW+JksHmS//rHHcrrJhZgCb3P0EOpEoEMCarT6sJq0V1Hwf/YNFdJ9V7Ac654ALS+a9ffNthMUEJeY21QMtNOrEg3QH5RWBPn+yOYN/f38tzwlT1k6Ec94y/sBmeQVv8rRzkkiMSXeAL5ATdJntq8NQq5JbvLQDNnZnHQthZt+uhcUf08mWlRrxxBUaE6xLppgMqFdYSjLGvgn/d8FZ9y7UCg5ZBhgP1rrRQL1COpNKKlJLf5laqwiGAucIDmzSbhO+MidSauDLWuv+fsdd2QYk98PHxqNrPYLrlAlABFi3JEApBm4IlrGbHxKg6dRiy7L1c9xWnAD7E3XrZrSc6DXvGRsjMXWoQdlp4CX5H3cdH9sjIE6akWqiwwrOP6QTbJcxmJGv/MVhsDVrVKmrKSn2H0/Us1fyYCHCOyCSc2L96uId8i9wQO1NXj+1PJmUq3tJ8U0TUwTblOEQdYej99xEI8EzsXLjNJHCgbDygtHBYd/SHToXH3ISwfoLmU+Q9JlS1woaUpVa5sdvbsr4BXR6J" # noqa: E501
-
self.vca_collection.find_one.return_value = {
"_id": "2ade7f0e-9b58-4dbd-93a3-4ec076185d39",
"schema_version": "1.11",
@@ -216,7 +223,7 @@
"secret": encrypted_secret,
"cacert": encrypted_cacert,
}
- self.admin_collection.find_one.return_value = {
+ self.encryption.admin_collection.find_one.return_value = {
"serial": b"l+U3HDp9td+UjQ+AN+Ypj/Uh7n3C+rMJueQNNxkIpWI="
}
connection_data = self.loop.run_until_complete(
diff --git a/n2vc/tests/unit/utils.py b/n2vc/tests/unit/utils.py
index b2d5c60..807c892 100644
--- a/n2vc/tests/unit/utils.py
+++ b/n2vc/tests/unit/utils.py
@@ -130,7 +130,6 @@
class FakeWatcher(AsyncMock):
-
delta_to_return = None
async def Next(self):
diff --git a/requirements-dev.in b/requirements-dev.in
index 279709a..51cb825 100644
--- a/requirements-dev.in
+++ b/requirements-dev.in
@@ -12,5 +12,5 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-git+https://osm.etsi.org/gerrit/osm/common.git@master#egg=osm-common
--r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=master
+git+https://osm.etsi.org/gerrit/osm/common.git@paas#egg=osm-common
+-r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=paas
diff --git a/requirements-dev.txt b/requirements-dev.txt
index ed7864e..0304e4f 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -14,19 +14,31 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#######################################################################################
-aiokafka==0.7.2
+aiokafka==0.8.0
# via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=paas
+async-timeout==4.0.2
+ # via
+ # -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=paas
+ # aiokafka
dataclasses==0.6
# via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=paas
kafka-python==2.0.2
# via
# -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=paas
# aiokafka
+motor==1.3.1
+ # via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=paas
osm-common @ git+https://osm.etsi.org/gerrit/osm/common.git@paas
# via -r requirements-dev.in
-pycrypto==2.6.1
+packaging==23.0
+ # via
+ # -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=paas
+ # aiokafka
+pycryptodome==3.17
# via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=paas
-pymongo==3.12.3
- # via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=paas
+pymongo==3.13.0
+ # via
+ # -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=paas
+ # motor
pyyaml==5.4.1
# via -r https://osm.etsi.org/gitweb/?p=osm/common.git;a=blob_plain;f=requirements.txt;hb=paas
diff --git a/requirements-test.in b/requirements-test.in
index 1b33834..9d355d8 100644
--- a/requirements-test.in
+++ b/requirements-test.in
@@ -13,6 +13,7 @@
# limitations under the License.
asynctest
+charset-normalizer<3 # Required by aiohttp in LCM
coverage
flake8<5.0.0
mock
diff --git a/requirements-test.txt b/requirements-test.txt
index bac6eac..3d3325c 100644
--- a/requirements-test.txt
+++ b/requirements-test.txt
@@ -16,11 +16,13 @@
#######################################################################################
asynctest==0.13.0
# via -r requirements-test.in
-certifi==2022.9.24
+certifi==2022.12.7
# via requests
charset-normalizer==2.1.1
- # via requests
-coverage==6.5.0
+ # via
+ # -r requirements-test.in
+ # requests
+coverage==7.1.0
# via -r requirements-test.in
flake8==4.0.1
# via -r requirements-test.in
@@ -28,7 +30,7 @@
# via requests
mccabe==0.6.1
# via flake8
-mock==4.0.3
+mock==5.0.1
# via -r requirements-test.in
nose2==0.12.0
# via -r requirements-test.in
@@ -36,11 +38,11 @@
# via flake8
pyflakes==2.4.0
# via flake8
-requests==2.28.1
+requests==2.28.2
# via requests-mock
requests-mock==1.10.0
# via -r requirements-test.in
six==1.16.0
# via requests-mock
-urllib3==1.26.12
+urllib3==1.26.14
# via requests
diff --git a/requirements.in b/requirements.in
index 03898e9..661da61 100644
--- a/requirements.in
+++ b/requirements.in
@@ -11,12 +11,10 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-async-timeout<4
+charset-normalizer<3 # Required by aiohttp in LCM
juju==3.0.0
kubernetes
motor==1.3.1
pyasn1
-pyyaml<6
+pyyaml==5.4.1
retrying-async
-certifi==2022.9.24
diff --git a/requirements.txt b/requirements.txt
index 31621dd..c539a67 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -14,17 +14,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#######################################################################################
-async-timeout==3.0.1
- # via
- # -r requirements.in
- # retrying-async
+async-timeout==4.0.2
+ # via retrying-async
bcrypt==4.0.1
# via paramiko
-cachetools==5.2.0
+cachetools==5.3.0
# via google-auth
-certifi==2022.9.24
+certifi==2022.12.7
# via
- # -r requirements.in
# kubernetes
# requests
cffi==1.15.1
@@ -32,10 +29,12 @@
# cryptography
# pynacl
charset-normalizer==2.1.1
- # via requests
-cryptography==38.0.1
+ # via
+ # -r requirements.in
+ # requests
+cryptography==39.0.0
# via paramiko
-google-auth==2.12.0
+google-auth==2.16.0
# via kubernetes
idna==3.4
# via requests
@@ -43,7 +42,7 @@
# via -r requirements.in
jujubundlelib==0.5.7
# via theblues
-kubernetes==24.2.0
+kubernetes==25.3.0
# via
# -r requirements.in
# juju
@@ -55,9 +54,9 @@
# via -r requirements.in
mypy-extensions==0.4.3
# via typing-inspect
-oauthlib==3.2.1
+oauthlib==3.2.2
# via requests-oauthlib
-paramiko==2.11.0
+paramiko==2.12.0
# via juju
protobuf==3.20.3
# via macaroonbakery
@@ -73,7 +72,7 @@
# via cffi
pymacaroons==0.13.0
# via macaroonbakery
-pymongo==3.12.3
+pymongo==3.13.0
# via motor
pynacl==1.5.0
# via
@@ -86,7 +85,7 @@
# macaroonbakery
python-dateutil==2.8.2
# via kubernetes
-pytz==2022.4
+pytz==2022.7.1
# via pyrfc3339
pyyaml==5.4.1
# via
@@ -94,7 +93,7 @@
# juju
# jujubundlelib
# kubernetes
-requests==2.28.1
+requests==2.28.2
# via
# kubernetes
# macaroonbakery
@@ -116,17 +115,17 @@
# python-dateutil
theblues==0.5.2
# via juju
-toposort==1.7
+toposort==1.9
# via juju
typing-extensions==4.4.0
# via typing-inspect
typing-inspect==0.8.0
# via juju
-urllib3==1.26.12
+urllib3==1.26.14
# via
# kubernetes
# requests
-websocket-client==1.4.1
+websocket-client==1.5.0
# via kubernetes
websockets==7.0
# via juju