import os
import shutil
import functools
+import re
# import logging
from deepdiff import DeepDiff
from uuid import uuid4
from re import fullmatch
from zipfile import ZipFile
+from urllib.parse import urlparse
from osm_nbi.validation import (
ValidationError,
pdu_new_schema,
validate_input,
vnfpkgop_new_schema,
)
-from osm_nbi.base_topic import BaseTopic, EngineException, get_iterable
+from osm_nbi.base_topic import (
+ BaseTopic,
+ EngineException,
+ get_iterable,
+ detect_descriptor_usage,
+)
from osm_im import etsi_nfv_vnfd, etsi_nfv_nsd
from osm_im.nst import nst as nst_im
from pyangbind.lib.serialise import pybindJSONDecoder
__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
+valid_helm_chart_re = re.compile(
+ r"^[a-z0-9]([-a-z0-9]*[a-z0-9]/)?([a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
+)
+
class DescriptorTopic(BaseTopic):
def __init__(self, db, fs, msg, auth):
+ super().__init__(db, fs, msg, auth)
- BaseTopic.__init__(self, db, fs, msg, auth)
+ def _validate_input_new(self, indata, storage_params, force=False):
+ return indata
def check_conflict_on_edit(self, session, final_content, edit_content, _id):
final_content = super().check_conflict_on_edit(
if self.db.get_one(self.topic, _filter, fail_on_empty=False):
raise EngineException(
"{} with id '{}' already exists for this project".format(
- self.topic[:-1], final_content["id"]
+ (str(self.topic))[:-1], final_content["id"]
),
HTTPStatus.CONFLICT,
)
self.fs.file_delete(_id + ":" + str(revision), ignore_non_exist=True)
revision = revision - 1
-
@staticmethod
def get_one_by_id(db, session, topic, id):
# find owned by this project
# Avoid override in this case as the target is userDefinedData, but not vnfd,nsd descriptors
# indata = DescriptorTopic._validate_input_new(self, indata, project_id=session["force"])
- content = {"_admin": {
- "userDefinedData": indata,
- "revision": 0
- }}
+ content = {"_admin": {"userDefinedData": indata, "revision": 0}}
self.format_on_new(
content, session["project_id"], make_public=session["public"]
or "application/x-gzip" in content_type
):
compressed = "gzip"
- if (
- content_type
- and "application/zip" in content_type
- ):
+ if content_type and "application/zip" in content_type:
compressed = "zip"
filename = headers.get("Content-Filename")
if not filename and compressed:
# TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
file_pkg = None
error_text = ""
+ fs_rollback = []
+
try:
if content_range_text:
content_range = (
else:
self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
self.fs.mkdir(proposed_revision_path)
+ fs_rollback.append(proposed_revision_path)
storage = self.fs.get_params()
- storage["folder"] = _id
+ storage["folder"] = proposed_revision_path
file_path = (proposed_revision_path, filename)
if self.fs.file_exists(file_path, "file"):
)
if (
- (
- zipfilename.endswith(".yaml")
- or zipfilename.endswith(".json")
- or zipfilename.endswith(".yml")
- ) and (
- zipfilename.find("/") < 0
- or zipfilename.find("Definitions") >= 0
- )
+ zipfilename.endswith(".yaml")
+ or zipfilename.endswith(".json")
+ or zipfilename.endswith(".yml")
+ ) and (
+ zipfilename.find("/") < 0
+ or zipfilename.find("Definitions") >= 0
):
storage["pkg-dir"] = ""
if descriptor_file_name:
indata = json.load(content)
else:
error_text = "Invalid yaml format "
- indata = yaml.load(content, Loader=yaml.SafeLoader)
+ indata = yaml.safe_load(content)
# Need to close the file package here so it can be copied from the
# revision to the current, unrevisioned record
if revision > 1:
try:
self._validate_descriptor_changes(
+ _id,
descriptor_file_name,
current_revision_path,
- proposed_revision_path)
+ proposed_revision_path,
+ )
except Exception as e:
- shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
- shutil.rmtree(self.fs.path + proposed_revision_path, ignore_errors=True)
+ shutil.rmtree(
+ self.fs.path + current_revision_path, ignore_errors=True
+ )
+ shutil.rmtree(
+ self.fs.path + proposed_revision_path, ignore_errors=True
+ )
# Only delete the new revision. We need to keep the original version in place
# as it has not been changed.
self.fs.file_delete(proposed_revision_path, ignore_non_exist=True)
raise e
- # Copy the revision to the active package name by its original id
- shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
- os.rename(self.fs.path + proposed_revision_path, self.fs.path + current_revision_path)
- self.fs.file_delete(current_revision_path, ignore_non_exist=True)
- self.fs.mkdir(current_revision_path)
- self.fs.reverse_sync(from_path=current_revision_path)
- shutil.rmtree(self.fs.path + _id)
-
- current_desc["_admin"]["storage"] = storage
- current_desc["_admin"]["onboardingState"] = "ONBOARDED"
- current_desc["_admin"]["operationalState"] = "ENABLED"
-
indata = self._remove_envelop(indata)
# Override descriptor with query string kwargs
if kwargs:
self._update_input_with_kwargs(indata, kwargs)
+ current_desc["_admin"]["storage"] = storage
+ current_desc["_admin"]["onboardingState"] = "ONBOARDED"
+ current_desc["_admin"]["operationalState"] = "ENABLED"
+ current_desc["_admin"]["modified"] = time()
+ current_desc["_admin"]["revision"] = revision
+
deep_update_rfc7396(current_desc, indata)
current_desc = self.check_conflict_on_edit(
session, current_desc, indata, _id=_id
)
- current_desc["_admin"]["modified"] = time()
- current_desc["_admin"]["revision"] = revision
+
+ # Copy the revision to the active package name by its original id
+ shutil.rmtree(self.fs.path + current_revision_path, ignore_errors=True)
+ os.rename(
+ self.fs.path + proposed_revision_path,
+ self.fs.path + current_revision_path,
+ )
+ self.fs.file_delete(current_revision_path, ignore_non_exist=True)
+ self.fs.mkdir(current_revision_path)
+ self.fs.reverse_sync(from_path=current_revision_path)
+
+ shutil.rmtree(self.fs.path + _id)
+
self.db.replace(self.topic, _id, current_desc)
# Store a copy of the package as a point in time revision
revision_desc = dict(current_desc)
revision_desc["_id"] = _id + ":" + str(revision_desc["_admin"]["revision"])
self.db.create(self.topic + "_revisions", revision_desc)
+ fs_rollback = []
indata["_id"] = _id
self._send_msg("edited", indata)
finally:
if file_pkg:
file_pkg.close()
+ for file in fs_rollback:
+ self.fs.file_delete(file, ignore_non_exist=True)
def get_file(self, session, _id, path=None, accept_header=None):
"""
)
storage = content["_admin"]["storage"]
if path is not None and path != "$DESCRIPTOR": # artifacts
- if not storage.get("pkg-dir"):
+ if not storage.get("pkg-dir") and not storage.get("folder"):
raise EngineException(
"Packages does not contains artifacts",
http_code=HTTPStatus.BAD_REQUEST,
# to preserve current expected behaviour
if "userDefinedData" in indata:
data = indata.pop("userDefinedData")
- if type(data) == dict:
+ if isinstance(data, dict):
indata["_admin"]["userDefinedData"] = data
else:
raise EngineException(
return indata
- def _validate_descriptor_changes(self,
+ def _validate_descriptor_changes(
+ self,
+ descriptor_id,
descriptor_file_name,
old_descriptor_directory,
- new_descriptor_directory):
- # Todo: compare changes and throw a meaningful exception for the user to understand
+ new_descriptor_directory,
+ ):
# Example:
# raise EngineException(
# "Error in validating new descriptor: <NODE> cannot be modified",
# )
pass
+
class VnfdTopic(DescriptorTopic):
topic = "vnfds"
topic_msg = "vnfd"
self.validate_internal_virtual_links(indata)
self.validate_monitoring_params(indata)
self.validate_scaling_group_descriptor(indata)
+ self.validate_helm_chart(indata)
return indata
+ @staticmethod
+ def validate_helm_chart(indata):
+ def is_url(url):
+ result = urlparse(url)
+ return all([result.scheme, result.netloc])
+
+ kdus = indata.get("kdu", [])
+ for kdu in kdus:
+ helm_chart_value = kdu.get("helm-chart")
+ if not helm_chart_value:
+ continue
+ if not (
+ valid_helm_chart_re.match(helm_chart_value) or is_url(helm_chart_value)
+ ):
+ raise EngineException(
+ "helm-chart '{}' is not valid".format(helm_chart_value),
+ http_code=HTTPStatus.UNPROCESSABLE_ENTITY,
+ )
+
@staticmethod
def validate_mgmt_interface_connection_point(indata):
if not indata.get("vdu"):
return False
elif not storage_params.get("pkg-dir"):
if self.fs.file_exists("{}_".format(storage_params["folder"]), "dir"):
- f = "{}_/{}".format(
- storage_params["folder"], folder
- )
+ f = "{}_/{}".format(storage_params["folder"], folder)
else:
- f = "{}/{}".format(
- storage_params["folder"], folder
- )
+ f = "{}/{}".format(storage_params["folder"], folder)
if file:
return self.fs.file_exists("{}/{}".format(f, file), "file")
else:
"""
super().delete_extra(session, _id, db_content, not_send_msg)
self.db.del_list("vnfpkgops", {"vnfPkgId": _id})
- self.db.del_list(self.topic+"_revisions", {"_id": {"$regex": _id}})
+ self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
def sol005_projection(self, data):
data["onboardingState"] = data["_admin"]["onboardingState"]
Returns:
vnfd (dict): VNFD which does not include policies
"""
- # TODO: Extract the policy related parts from the VNFD
+ for df in vnfd.get("df", {}):
+ for policy in ["scaling-aspect", "healing-aspect"]:
+ if df.get(policy, {}):
+ df.pop(policy)
+ for vdu in vnfd.get("vdu", {}):
+ for alarm_policy in ["alarm", "monitoring-parameter"]:
+ if vdu.get(alarm_policy, {}):
+ vdu.pop(alarm_policy)
return vnfd
@staticmethod
def _validate_descriptor_changes(
self,
+ descriptor_id: str,
descriptor_file_name: str,
old_descriptor_directory: str,
new_descriptor_directory: str,
Args:
old_descriptor_directory (str): Directory of descriptor which is in-use
- new_descriptor_directory (str): Directory of directory which is proposed to update (new revision)
+ new_descriptor_directory (str): Directory of descriptor which is proposed to update (new revision)
Returns:
None
EngineException: In case of error when there are unallowed changes
"""
try:
+ # If VNFD does not exist in DB or it is not in use by any NS,
+ # validation is not required.
+ vnfd = self.db.get_one("vnfds", {"_id": descriptor_id})
+ if not vnfd or not detect_descriptor_usage(vnfd, "vnfds", self.db):
+ return
+
+ # Get the old and new descriptor contents in order to compare them.
with self.fs.file_open(
(old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
) as old_descriptor_file:
with self.fs.file_open(
- (new_descriptor_directory, descriptor_file_name), "r"
+ (new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
) as new_descriptor_file:
- old_content = yaml.load(
- old_descriptor_file.read(), Loader=yaml.SafeLoader
- )
- new_content = yaml.load(
- new_descriptor_file.read(), Loader=yaml.SafeLoader
- )
+ old_content = yaml.safe_load(old_descriptor_file.read())
+ new_content = yaml.safe_load(new_descriptor_file.read())
+
+ # If software version has changed, we do not need to validate
+ # the differences anymore.
if old_content and new_content:
if self.find_software_version(
old_content
) != self.find_software_version(new_content):
return
+
disallowed_change = DeepDiff(
self.remove_modifiable_items(old_content),
self.remove_modifiable_items(new_content),
)
+
if disallowed_change:
changed_nodes = functools.reduce(
lambda a, b: a + " , " + b,
).keys()
],
)
+
raise EngineException(
f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
"there are disallowed changes in the vnf descriptor.",
topic_msg = "nsd"
def __init__(self, db, fs, msg, auth):
- DescriptorTopic.__init__(self, db, fs, msg, auth)
+ super().__init__(db, fs, msg, auth)
def pyangbind_validation(self, item, data, force=False):
if self._descriptor_data_is_in_old_format(data):
# to preserve current expected behaviour
if "userDefinedData" in indata:
data = indata.pop("userDefinedData")
- if type(data) == dict:
+ if isinstance(data, dict):
indata["_admin"]["userDefinedData"] = data
else:
raise EngineException(
:raises: FsException in case of error while deleting associated storage
"""
super().delete_extra(session, _id, db_content, not_send_msg)
- self.db.del_list(self.topic+"_revisions", { "_id": { "$regex": _id}})
+ self.db.del_list(self.topic + "_revisions", {"_id": {"$regex": _id}})
@staticmethod
def extract_day12_primitives(nsd: dict) -> dict:
def _validate_descriptor_changes(
self,
+ descriptor_id: str,
descriptor_file_name: str,
old_descriptor_directory: str,
new_descriptor_directory: str,
Args:
old_descriptor_directory: Directory of descriptor which is in-use
- new_descriptor_directory: Directory of directory which is proposed to update (new revision)
+ new_descriptor_directory: Directory of descriptor which is proposed to update (new revision)
Returns:
None
"""
try:
+ # If NSD does not exist in DB, or it is not in use by any NS,
+ # validation is not required.
+ nsd = self.db.get_one("nsds", {"_id": descriptor_id}, fail_on_empty=False)
+ if not nsd or not detect_descriptor_usage(nsd, "nsds", self.db):
+ return
+
+ # Get the old and new descriptor contents in order to compare them.
with self.fs.file_open(
- (old_descriptor_directory, descriptor_file_name), "r"
+ (old_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
) as old_descriptor_file:
with self.fs.file_open(
(new_descriptor_directory.rstrip("/"), descriptor_file_name), "r"
) as new_descriptor_file:
- old_content = yaml.load(
- old_descriptor_file.read(), Loader=yaml.SafeLoader
- )
- new_content = yaml.load(
- new_descriptor_file.read(), Loader=yaml.SafeLoader
- )
+ old_content = yaml.safe_load(old_descriptor_file.read())
+ new_content = yaml.safe_load(new_descriptor_file.read())
+
if old_content and new_content:
disallowed_change = DeepDiff(
self.remove_modifiable_items(old_content),
self.remove_modifiable_items(new_content),
)
+
if disallowed_change:
changed_nodes = functools.reduce(
lambda a, b: a + ", " + b,
).keys()
],
)
+
raise EngineException(
f"Error in validating new descriptor: {changed_nodes} cannot be modified, "
"there are disallowed changes in the ns descriptor. ",