From 722a3209e07362fc2de8b9a75d3938bef1c78783 Mon Sep 17 00:00:00 2001 From: bravof Date: Fri, 15 Jan 2021 11:54:45 -0300 Subject: [PATCH 01/16] feat(dbmongo): upsert option for set_one Change-Id: I3dd38738e7ef8c1b70df7742d08eb8f8d52529e1 Signed-off-by: bravof --- osm_common/dbmongo.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/osm_common/dbmongo.py b/osm_common/dbmongo.py index 56f387b..7fc29dc 100644 --- a/osm_common/dbmongo.py +++ b/osm_common/dbmongo.py @@ -363,13 +363,13 @@ class DbMongo(DbBase): raise DbException(e) def set_one(self, table, q_filter, update_dict, fail_on_empty=True, unset=None, pull=None, push=None, - push_list=None, pull_list=None): + push_list=None, pull_list=None, upsert=False): """ Modifies an entry at database :param table: collection or table :param q_filter: Filter :param update_dict: Plain dictionary with the content to be updated. It is a dot separated keys and a value - :param fail_on_empty: If nothing matches filter it returns None unless this flag is set tu True, in which case + :param fail_on_empty: If nothing matches filter it returns None unless this flag is set to True, in which case it raises a DbException :param unset: Plain dictionary with the content to be removed if exist. It is a dot separated keys, value is ignored. If not exist, it is ignored @@ -380,6 +380,8 @@ class DbMongo(DbBase): is appended to the end of the array :param push_list: Same as push but values are arrays where each item is and appended instead of appending the whole array + :param upsert: If this parameter is set to True and no document is found using 'q_filter' it will be created. + By default this is false. :return: Dict with the number of entries modified. None if no matching is found. """ try: @@ -399,7 +401,7 @@ class DbMongo(DbBase): with self.lock: collection = self.db[table] - rows = collection.update_one(self._format_filter(q_filter), db_oper) + rows = collection.update_one(self._format_filter(q_filter), db_oper, upsert=upsert) if rows.matched_count == 0: if fail_on_empty: raise DbException("Not found any {} with filter='{}'".format(table[:-1], q_filter), -- 2.25.1 From b483b920caa7737203e316cfd6f44fa0eb861e47 Mon Sep 17 00:00:00 2001 From: beierlm Date: Thu, 11 Feb 2021 14:57:14 -0500 Subject: [PATCH 02/16] Feature 10296 Pip Standardization Creates standard template for tox.ini Introduces 'safety' for listing upstream CVEs Pins all versions of all upstream modules Removes Makefile step and places all build logic into tox.ini Change-Id: I31129ce9de342595f7ad24603107697ddab831c4 Feature: 10296 Signed-off-by: beierlm --- .gitignore | 92 ++++++++++++++++++++++ Dockerfile | 27 +++++-- MANIFEST.in | 2 +- debian/python3-osm-common.postinst | 31 -------- devops-stages/stage-build.sh | 5 +- devops-stages/stage-test.sh | 5 +- Makefile => nose2.cfg | 18 ++--- requirements-dist.in | 17 +++++ requirements-dist.txt | 23 ++++++ requirements-test.in | 18 +++++ requirements-test.txt | 40 ++++++++++ requirements.in | 20 +++++ requirements.txt | 28 +++++++ setup.py | 24 ++---- stdeb.cfg | 16 +++- tox.ini | 119 ++++++++++++++++++++--------- 16 files changed, 378 insertions(+), 107 deletions(-) create mode 100644 .gitignore delete mode 100755 debian/python3-osm-common.postinst rename Makefile => nose2.cfg (62%) create mode 100644 requirements-dist.in create mode 100644 requirements-dist.txt create mode 100644 requirements-test.in create mode 100644 requirements-test.txt create mode 100644 requirements.in create mode 100644 requirements.txt diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a022457 --- /dev/null +++ b/.gitignore @@ -0,0 +1,92 @@ +# Copyright 2017 Intel Research and Development Ireland Limited +# ************************************************************* + +# This file is part of OSM Monitoring module +# All Rights Reserved to Intel Corporation + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# For those usages not covered by the Apache License, Version 2.0 please +# contact: helena.mcgough@intel.com or adrian.hoban@intel.com +## +*.py[cod] + +# C extensions +*.so + +# log files +*.log + +# Packages +*.egg +*.egg-info +dist +build +.eggs +eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +nohup.out + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +cover +coverage.xml +.tox +nosetests.xml +.testrepository +.venv +.cache + +# Translations +*.mo + +# Complexity +output/*.html +output/*/index.html + +# Sphinx +doc/build + +# pbr generates these +AUTHORS +ChangeLog + +# Editors +*~ +.*.swp +.*sw? +.settings/ +__pycache__/ +.idea +*.db +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace +.vscode +.project +.pydevproject +deb_dist +*.tar.gz diff --git a/Dockerfile b/Dockerfile index 43afa1c..3fad212 100644 --- a/Dockerfile +++ b/Dockerfile @@ -10,16 +10,27 @@ # implied. # See the License for the specific language governing permissions and # limitations under the License. - -# This Dockerfile is intented for devops and deb package generation +######################################################################################## +# This Dockerfile is intented for devops testing and deb package generation +# +# To run stage 2 locally: +# +# docker build -t stage2 . +# docker run -ti -v `pwd`:/work -w /work --entrypoint /bin/bash stage2 +# devops-stages/stage-test.sh +# devops-stages/stage-build.sh # -# Use Dockerfile.local for running osm/LCM in a docker container from source FROM ubuntu:18.04 -RUN apt-get update && apt-get -y install git make python python3 \ - libcurl4-gnutls-dev libgnutls28-dev tox python3-dev python3-pip \ - debhelper python-setuptools python-all python3-all apt-utils \ - python-magic && \ - DEBIAN_FRONTEND=noninteractive pip3 install -U stdeb setuptools-version-command +RUN DEBIAN_FRONTEND=noninteractive apt-get update && \ + DEBIAN_FRONTEND=noninteractive apt-get -y install \ + debhelper \ + git \ + python3 \ + python3-all \ + python3-dev \ + python3-setuptools +RUN python3 -m easy_install pip==21.0.1 +RUN pip3 install tox==3.22.0 diff --git a/MANIFEST.in b/MANIFEST.in index cbc5d1e..e8fc479 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -17,5 +17,5 @@ ## include README.rst -recursive-include osm_common *.py *.xml *.sh +recursive-include osm_common *.py *.xml *.sh *.txt recursive-include devops-stages * \ No newline at end of file diff --git a/debian/python3-osm-common.postinst b/debian/python3-osm-common.postinst deleted file mode 100755 index fa43cbc..0000000 --- a/debian/python3-osm-common.postinst +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash - -## -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# For those usages not covered by the Apache License, Version 2.0 please -# contact with: OSM_TECH@list.etsi.org -## - -echo "POST INSTALL OSM-COMMON" -echo "Installing python dependencies via pip..." - -pip3 install dataclasses -pip3 install --upgrade aiokafka -pip3 install --upgrade pymongo -pip3 install --upgrade pyyaml==5.* -pip3 install pycrypto # added dependency python3-crypto seems not working - -#Creation of log folder -mkdir -p /var/log/osm - diff --git a/devops-stages/stage-build.sh b/devops-stages/stage-build.sh index dfb7978..05b232e 100755 --- a/devops-stages/stage-build.sh +++ b/devops-stages/stage-build.sh @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# moved to a Makefile in order to add post install. Needed for "pip3 install aiokafka", -# that is not available with a package +rm -rf dist deb_dist osm_common-*.tar.gz osm_common.egg-info .eggs .tox -make clean package +tox -e dist diff --git a/devops-stages/stage-test.sh b/devops-stages/stage-test.sh index 2e7f488..e5a8e85 100755 --- a/devops-stages/stage-test.sh +++ b/devops-stages/stage-test.sh @@ -13,6 +13,5 @@ # See the License for the specific language governing permissions and # limitations under the License. -rm -f nosetests.xml -tox # flake8 unittest pytest cover - +OUTPUT=$(TOX_PARALLEL_NO_SPINNER=1 tox --parallel=auto) +printf "$OUTPUT" diff --git a/Makefile b/nose2.cfg similarity index 62% rename from Makefile rename to nose2.cfg index ed4ad8d..6823267 100644 --- a/Makefile +++ b/nose2.cfg @@ -1,4 +1,5 @@ -# Copyright 2018 Telefonica S.A. +# -*- coding: utf-8 -*- +# Copyright ETSI Contributors and Others. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,12 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -all: clean package +[unittest] +plugins = nose2.plugins.junitxml -clean: - rm -rf dist deb_dist osm_common-*.tar.gz osm_common.egg-info .eggs - -package: - python3 setup.py --command-packages=stdeb.command sdist_dsc - cp debian/python3-osm-common.postinst deb_dist/osm-common*/debian - cd deb_dist/osm-common*/ && dpkg-buildpackage -rfakeroot -uc -us +[junit-xml] +always-on = True +keep_restricted = False +path = nosetests.xml +test_fullname = False diff --git a/requirements-dist.in b/requirements-dist.in new file mode 100644 index 0000000..11f0a2a --- /dev/null +++ b/requirements-dist.in @@ -0,0 +1,17 @@ +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +stdeb +setuptools-version-command \ No newline at end of file diff --git a/requirements-dist.txt b/requirements-dist.txt new file mode 100644 index 0000000..7393626 --- /dev/null +++ b/requirements-dist.txt @@ -0,0 +1,23 @@ +setuptools-version-command==2.2 + # via -r requirements-dist.in +stdeb==0.10.0 + # via -r requirements-dist.in + +# The following packages are considered to be unsafe in a requirements file: +# setuptools +####################################################################################### +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +####################################################################################### diff --git a/requirements-test.in b/requirements-test.in new file mode 100644 index 0000000..0378739 --- /dev/null +++ b/requirements-test.in @@ -0,0 +1,18 @@ +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +coverage +nose2 +pytest \ No newline at end of file diff --git a/requirements-test.txt b/requirements-test.txt new file mode 100644 index 0000000..58e185a --- /dev/null +++ b/requirements-test.txt @@ -0,0 +1,40 @@ +attrs==20.3.0 + # via pytest +coverage==5.5 + # via + # -r requirements-test.in + # nose2 +iniconfig==1.1.1 + # via pytest +nose2==0.10.0 + # via -r requirements-test.in +packaging==20.9 + # via pytest +pluggy==0.13.1 + # via pytest +py==1.10.0 + # via pytest +pyparsing==2.4.7 + # via packaging +pytest==6.2.2 + # via -r requirements-test.in +six==1.15.0 + # via nose2 +toml==0.10.2 + # via pytest +####################################################################################### +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +####################################################################################### diff --git a/requirements.in b/requirements.in new file mode 100644 index 0000000..1ce6a65 --- /dev/null +++ b/requirements.in @@ -0,0 +1,20 @@ +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +pymongo +aiokafka +pyyaml +pycrypto +dataclasses \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..2ca19f5 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,28 @@ +aiokafka==0.7.0 + # via -r requirements.in +dataclasses==0.6 + # via -r requirements.in +kafka-python==2.0.2 + # via aiokafka +pycrypto==2.6.1 + # via -r requirements.in +pymongo==3.11.3 + # via -r requirements.in +pyyaml==5.4.1 + # via -r requirements.in +####################################################################################### +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +####################################################################################### diff --git a/setup.py b/setup.py index 157ffec..f53c839 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2018 Telefonica S.A. +# Copyright ETSI OSM Contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,31 +25,21 @@ _name = "osm_common" README = open(os.path.join(here, 'README.rst')).read() setup( - # python_requires='>=3.5', name=_name, description='OSM common utilities', long_description=README, version_command=('git describe --tags --long --dirty --match v*', 'pep440-git-full'), - # version=VERSION, - # python_requires='>3.5', + author='ETSI OSM', - author_email='alfonso.tiernosepulveda@telefonica.com', - maintainer='Alfonso Tierno', - maintainer_email='alfonso.tiernosepulveda@telefonica.com', + author_email='osmsupport@etsi.com', + maintainer='ETSI OSM', + maintainer_email='osmsupport@etsi.com', + url='https://osm.etsi.org/gitweb/?p=osm/common.git;a=summary', license='Apache 2.0', setup_requires=['setuptools-version-command'], packages=[_name], include_package_data=True, - # scripts=['nbi.py'], - - # dataclasses is required by aiokafka for Python <3.7.0 - install_requires=[ - 'pymongo', - 'aiokafka', - 'PyYAML', - 'pycrypto', - 'dataclasses', - ], + ) diff --git a/stdeb.cfg b/stdeb.cfg index 5bd8819..c6a1b3a 100644 --- a/stdeb.cfg +++ b/stdeb.cfg @@ -1,3 +1,17 @@ +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + [DEFAULT] X-Python3-Version : >= 3.5 -Depends3 : python3-pip, python3-crypto diff --git a/tox.ini b/tox.ini index a7cf7b4..c3341ea 100644 --- a/tox.ini +++ b/tox.ini @@ -1,4 +1,5 @@ -# Copyright 2018 Telefonica S.A. +####################################################################################### +# Copyright ETSI Contributors and Others. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,53 +13,103 @@ # implied. # See the License for the specific language governing permissions and # limitations under the License. +####################################################################################### [tox] -envlist = cover, flake8, unittest, pytest +envlist = black, cover, flake8, pylint, safety + +[tox:jenkins] +toxworkdir = /tmp/.tox [testenv] usedevelop = True basepython = python3 +setenv = VIRTUAL_ENV={envdir} + PYTHONDONTWRITEBYTECODE = 1 +deps = -r{toxinidir}/requirements.txt -[testenv:cover] -basepython = python3 -deps = - nose2 - nose2-cov - coverage - pytest - pytest-asyncio - pycrypto +####################################################################################### +[testenv:black] +deps = black +skip_install = true commands = - coverage erase - nose2 -C --coverage osm_common --plugin nose2.plugins.junitxml -s osm_common/tests - coverage report --omit='*tests*' - coverage html -d ./cover --omit='*tests*' - coverage xml -o coverage.xml --omit='*tests*' + - black --check --diff osm_common/ -[testenv:pytest] -basepython = python3 -deps = pytest - pytest-asyncio - pycrypto -commands = pytest osm_common +####################################################################################### +[testenv:cover] +deps = {[testenv]deps} + -r{toxinidir}/requirements-test.txt +commands = + sh -c 'rm -f nosetests.xml' + coverage erase + nose2 -C --coverage osm_common -s osm_common/tests + coverage report --omit='*tests*' + coverage html -d ./cover --omit='*tests*' + coverage xml -o coverage.xml --omit=*tests* +whitelist_externals = sh +####################################################################################### [testenv:flake8] -basepython = python3 deps = flake8 commands = - flake8 osm_common/ setup.py --max-line-length 120 --exclude .svn,CVS,.gz,.git,__pycache__,.tox,local,temp --ignore W291,W293,E226 + flake8 osm_common/ setup.py -[testenv:unittest] -basepython = python3 -deps = pycrypto - pytest -commands = python3 -m unittest discover osm_common.tests +####################################################################################### +[testenv:pylint] +deps = {[testenv]deps} + -r{toxinidir}/requirements-test.txt + pylint +commands = + pylint -E osm_common -[testenv:build] -basepython = python3 -deps = stdeb - setuptools-version-command -commands = python3 setup.py --command-packages=stdeb.command bdist_deb +####################################################################################### +[testenv:safety] +setenv = + LC_ALL=C.UTF-8 + LANG=C.UTF-8 +deps = {[testenv]deps} + safety +commands = + - safety check --full-report + +####################################################################################### +[testenv:pip-compile] +deps = pip-tools==5.5.0 +commands = + - sh -c 'for file in requirements*.in ; do pip-compile -rU --no-header $file ;\ + out=`echo $file | sed "s/.in/.txt/"` ; \ + head -16 tox.ini >> $out ;\ + done' +whitelist_externals = sh + +####################################################################################### +[testenv:dist] +deps = {[testenv]deps} + -r{toxinidir}/requirements-dist.txt + +# In the commands, we copy the requirements.txt to be presented as a source file (.py) +# so it gets included in the .deb package for others to consume +commands = + sh -c 'cp requirements.txt osm_common/requirements.txt' + python3 setup.py --command-packages=stdeb.command sdist_dsc + sh -c 'cd deb_dist/osm-common*/ && dpkg-buildpackage -rfakeroot -uc -us' + sh -c 'rm osm_common/requirements.txt' +whitelist_externals = sh +####################################################################################### +[flake8] +ignore = + W291, + W293, + E123, + E125, + E226, + E241 +exclude = + .git, + __pycache__, + .tox, +max-line-length = 120 +show-source = True +builtins = _ -- 2.25.1 From fee977a304dbfadee797e25906df2db3cf51f1e3 Mon Sep 17 00:00:00 2001 From: beierlm Date: Mon, 19 Apr 2021 15:15:31 -0400 Subject: [PATCH 03/16] Adding GitLab Scanner Adds gitlab-ci yaml file to enable security scans in GitLab mirror Change-Id: I1771c5fcd9d9fb77baaddd7b425054f5e5347dc6 Signed-off-by: beierlm --- .gitlab-ci.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 .gitlab-ci.yml diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000..eb9750e --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,17 @@ +# Copyright Contributors to ETSI OSM +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +include: + - project: 'devops/cicd' + file: '/templates/osm-base.yml' -- 2.25.1 From 2644b76248a1b96f7a47013b414e31b4e3feecf8 Mon Sep 17 00:00:00 2001 From: garciadeblas Date: Wed, 24 Mar 2021 09:21:01 +0100 Subject: [PATCH 04/16] Reformat common to standardized format Change-Id: I9ba54f3510e17de285e1e774d18ee5c7205818ad Signed-off-by: garciadeblas --- Dockerfile | 3 + osm_common/__init__.py | 5 +- osm_common/common_utils.py | 1 + osm_common/dbbase.py | 177 ++- osm_common/dbmemory.py | 262 +++- osm_common/dbmongo.py | 156 ++- osm_common/fsbase.py | 2 +- osm_common/fslocal.py | 37 +- osm_common/fsmongo.py | 191 +-- osm_common/msgbase.py | 24 +- osm_common/msgkafka.py | 75 +- osm_common/msglocal.py | 24 +- osm_common/sol004_package.py | 107 +- .../Scripts/charms/simple/src/charm.py | 1 - .../packages/invalid_package_vnf/manifest.mf | 2 +- .../Scripts/charms/simple/src/charm.py | 1 - .../manifest.mf | 2 +- .../Scripts/charms/simple/src/charm.py | 1 - .../native_charm_vnfd.mf | 2 +- osm_common/tests/test_dbbase.py | 161 ++- osm_common/tests/test_dbmemory.py | 1106 +++++++++++------ osm_common/tests/test_fsbase.py | 24 +- osm_common/tests/test_fslocal.py | 323 +++-- osm_common/tests/test_fsmongo.py | 659 +++++----- osm_common/tests/test_msgbase.py | 20 +- osm_common/tests/test_msglocal.py | 366 +++--- osm_common/tests/test_sol004_package.py | 114 +- setup.py | 27 +- tox.ini | 7 +- 29 files changed, 2486 insertions(+), 1394 deletions(-) diff --git a/Dockerfile b/Dockerfile index 3fad212..8230f42 100644 --- a/Dockerfile +++ b/Dockerfile @@ -34,3 +34,6 @@ RUN DEBIAN_FRONTEND=noninteractive apt-get update && \ RUN python3 -m easy_install pip==21.0.1 RUN pip3 install tox==3.22.0 + +ENV LC_ALL C.UTF-8 +ENV LANG C.UTF-8 diff --git a/osm_common/__init__.py b/osm_common/__init__.py index 96de80f..8bc5507 100644 --- a/osm_common/__init__.py +++ b/osm_common/__init__.py @@ -15,12 +15,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -version = '7.0.0.post4' -date_version = '2019-01-21' +version = "7.0.0.post4" +date_version = "2019-01-21" # try to get version from installed package. Skip if fails try: from pkg_resources import get_distribution + version = get_distribution("osm_common").version except Exception: pass diff --git a/osm_common/common_utils.py b/osm_common/common_utils.py index 4cb5857..ff7bc6d 100644 --- a/osm_common/common_utils.py +++ b/osm_common/common_utils.py @@ -21,6 +21,7 @@ __author__ = "Alfonso Tierno " class FakeLock: """Implements a fake lock that can be called with the "with" statement or acquire, release methods""" + def __enter__(self): pass diff --git a/osm_common/dbbase.py b/osm_common/dbbase.py index 8d694a2..74378d0 100644 --- a/osm_common/dbbase.py +++ b/osm_common/dbbase.py @@ -29,15 +29,13 @@ __author__ = "Alfonso Tierno " class DbException(Exception): - def __init__(self, message, http_code=HTTPStatus.NOT_FOUND): self.http_code = http_code Exception.__init__(self, "database exception " + str(message)) class DbBase(object): - - def __init__(self, logger_name='db', lock=False): + def __init__(self, logger_name="db", lock=False): """ Constructor of dbBase :param logger_name: logging name @@ -151,8 +149,18 @@ class DbBase(object): """ raise DbException("Method 'create_list' not implemented") - def set_one(self, table, q_filter, update_dict, fail_on_empty=True, unset=None, pull=None, push=None, - push_list=None, pull_list=None): + def set_one( + self, + table, + q_filter, + update_dict, + fail_on_empty=True, + unset=None, + pull=None, + push=None, + push_list=None, + pull_list=None, + ): """ Modifies an entry at database :param table: collection or table @@ -173,7 +181,17 @@ class DbBase(object): """ raise DbException("Method 'set_one' not implemented") - def set_list(self, table, q_filter, update_dict, unset=None, pull=None, push=None, push_list=None, pull_list=None): + def set_list( + self, + table, + q_filter, + update_dict, + unset=None, + pull=None, + push=None, + push_list=None, + pull_list=None, + ): """ Modifies al matching entries at database :param table: collection or table @@ -219,7 +237,9 @@ class DbBase(object): else: update_key_bytes = update_key - new_secret_key = bytearray(self.secret_key) if self.secret_key else bytearray(32) + new_secret_key = ( + bytearray(self.secret_key) if self.secret_key else bytearray(32) + ) for i, b in enumerate(update_key_bytes): new_secret_key[i % 32] ^= b return bytes(new_secret_key) @@ -252,12 +272,12 @@ class DbBase(object): :return: Encrypted content of value """ self.get_secret_key() - if not self.secret_key or not schema_version or schema_version == '1.0': + if not self.secret_key or not schema_version or schema_version == "1.0": return value else: secret_key = self._join_secret_key(salt) cipher = AES.new(secret_key) - padded_private_msg = value + ('\0' * ((16-len(value)) % 16)) + padded_private_msg = value + ("\0" * ((16 - len(value)) % 16)) encrypted_msg = cipher.encrypt(padded_private_msg) encoded_encrypted_msg = b64encode(encrypted_msg) return encoded_encrypted_msg.decode("ascii") @@ -272,7 +292,7 @@ class DbBase(object): :return: Plain content of value """ self.get_secret_key() - if not self.secret_key or not schema_version or schema_version == '1.0': + if not self.secret_key or not schema_version or schema_version == "1.0": return value else: secret_key = self._join_secret_key(salt) @@ -280,21 +300,27 @@ class DbBase(object): cipher = AES.new(secret_key) decrypted_msg = cipher.decrypt(encrypted_msg) try: - unpadded_private_msg = decrypted_msg.decode().rstrip('\0') + unpadded_private_msg = decrypted_msg.decode().rstrip("\0") except UnicodeDecodeError: - raise DbException("Cannot decrypt information. Are you using same COMMONKEY in all OSM components?", - http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise DbException( + "Cannot decrypt information. Are you using same COMMONKEY in all OSM components?", + http_code=HTTPStatus.INTERNAL_SERVER_ERROR, + ) return unpadded_private_msg - def encrypt_decrypt_fields(self, item, action, fields=None, flags=None, schema_version=None, salt=None): + def encrypt_decrypt_fields( + self, item, action, fields=None, flags=None, schema_version=None, salt=None + ): if not fields: return self.get_secret_key() - actions = ['encrypt', 'decrypt'] + actions = ["encrypt", "decrypt"] if action.lower() not in actions: - raise DbException("Unknown action ({}): Must be one of {}".format(action, actions), - http_code=HTTPStatus.INTERNAL_SERVER_ERROR) - method = self.encrypt if action.lower() == 'encrypt' else self.decrypt + raise DbException( + "Unknown action ({}): Must be one of {}".format(action, actions), + http_code=HTTPStatus.INTERNAL_SERVER_ERROR, + ) + method = self.encrypt if action.lower() == "encrypt" else self.decrypt if flags is None: flags = re.I @@ -309,6 +335,7 @@ class DbBase(object): _item[key] = method(val, schema_version, salt) else: process(val) + process(item) @@ -341,6 +368,7 @@ def deep_update_rfc7396(dict_to_change, dict_reference, key_list=None): :param key_list: This is used internally for recursive calls. Do not fill this parameter. :return: none or raises and exception only at array modification when there is a bad format or conflict. """ + def _deep_update_array(array_to_change, _dict_reference, _key_list): to_append = {} to_insert_at_index = {} @@ -352,26 +380,33 @@ def deep_update_rfc7396(dict_to_change, dict_reference, key_list=None): _key_list[-1] = str(k) if not isinstance(k, str) or not k.startswith("$"): if array_edition is True: - raise DbException("Found array edition (keys starting with '$') and pure dictionary edition in the" - " same dict at '{}'".format(":".join(_key_list[:-1]))) + raise DbException( + "Found array edition (keys starting with '$') and pure dictionary edition in the" + " same dict at '{}'".format(":".join(_key_list[:-1])) + ) array_edition = False continue else: if array_edition is False: - raise DbException("Found array edition (keys starting with '$') and pure dictionary edition in the" - " same dict at '{}'".format(":".join(_key_list[:-1]))) + raise DbException( + "Found array edition (keys starting with '$') and pure dictionary edition in the" + " same dict at '{}'".format(":".join(_key_list[:-1])) + ) array_edition = True insert = False indexes = [] # indexes to edit or insert kitem = k[1:] - if kitem.startswith('+'): + if kitem.startswith("+"): insert = True kitem = kitem[1:] if _dict_reference[k] is None: - raise DbException("A value of None has not sense for insertions at '{}'".format( - ":".join(_key_list))) + raise DbException( + "A value of None has not sense for insertions at '{}'".format( + ":".join(_key_list) + ) + ) - if kitem.startswith('[') and kitem.endswith(']'): + if kitem.startswith("[") and kitem.endswith("]"): try: index = int(kitem[1:-1]) if index < 0: @@ -380,18 +415,29 @@ def deep_update_rfc7396(dict_to_change, dict_reference, key_list=None): index = 0 # skip outside index edition indexes.append(index) except Exception: - raise DbException("Wrong format at '{}'. Expecting integer index inside quotes".format( - ":".join(_key_list))) + raise DbException( + "Wrong format at '{}'. Expecting integer index inside quotes".format( + ":".join(_key_list) + ) + ) elif kitem: # match_found_skip = False try: filter_in = yaml.safe_load(kitem) except Exception: - raise DbException("Wrong format at '{}'. Expecting '$'".format(":".join(_key_list))) + raise DbException( + "Wrong format at '{}'. Expecting '$'".format( + ":".join(_key_list) + ) + ) if isinstance(filter_in, dict): for index, item in enumerate(array_to_change): for filter_k, filter_v in filter_in.items(): - if not isinstance(item, dict) or filter_k not in item or item[filter_k] != filter_v: + if ( + not isinstance(item, dict) + or filter_k not in item + or item[filter_k] != filter_v + ): break else: # match found if insert: @@ -417,20 +463,35 @@ def deep_update_rfc7396(dict_to_change, dict_reference, key_list=None): # if match_found_skip: # continue elif not insert: - raise DbException("Wrong format at '{}'. Expecting '$+', '$[]'".format( - ":".join(_key_list))) + raise DbException( + "Wrong format at '{}'. Expecting '$+', '$[]'".format( + ":".join(_key_list) + ) + ) for index in indexes: if insert: - if index in to_insert_at_index and to_insert_at_index[index] != _dict_reference[k]: + if ( + index in to_insert_at_index + and to_insert_at_index[index] != _dict_reference[k] + ): # Several different insertions on the same item of the array - raise DbException("Conflict at '{}'. Several insertions on same array index {}".format( - ":".join(_key_list), index)) + raise DbException( + "Conflict at '{}'. Several insertions on same array index {}".format( + ":".join(_key_list), index + ) + ) to_insert_at_index[index] = _dict_reference[k] else: - if index in indexes_to_edit_delete and values_to_edit_delete[index] != _dict_reference[k]: + if ( + index in indexes_to_edit_delete + and values_to_edit_delete[index] != _dict_reference[k] + ): # Several different editions on the same item of the array - raise DbException("Conflict at '{}'. Several editions on array index {}".format( - ":".join(_key_list), index)) + raise DbException( + "Conflict at '{}'. Several editions on array index {}".format( + ":".join(_key_list), index + ) + ) indexes_to_edit_delete.append(index) values_to_edit_delete[index] = _dict_reference[k] if not indexes: @@ -447,22 +508,38 @@ def deep_update_rfc7396(dict_to_change, dict_reference, key_list=None): try: if values_to_edit_delete[index] is None: # None->Anything try: - del (array_to_change[index]) + del array_to_change[index] except IndexError: pass # it is not consider an error if this index does not exist - elif not isinstance(values_to_edit_delete[index], dict): # NotDict->Anything + elif not isinstance( + values_to_edit_delete[index], dict + ): # NotDict->Anything array_to_change[index] = deepcopy(values_to_edit_delete[index]) elif isinstance(array_to_change[index], dict): # Dict->Dict - deep_update_rfc7396(array_to_change[index], values_to_edit_delete[index], _key_list) + deep_update_rfc7396( + array_to_change[index], values_to_edit_delete[index], _key_list + ) else: # Dict->NotDict - if isinstance(array_to_change[index], list): # Dict->List. Check extra array edition - if _deep_update_array(array_to_change[index], values_to_edit_delete[index], _key_list): + if isinstance( + array_to_change[index], list + ): # Dict->List. Check extra array edition + if _deep_update_array( + array_to_change[index], + values_to_edit_delete[index], + _key_list, + ): continue array_to_change[index] = deepcopy(values_to_edit_delete[index]) # calling deep_update_rfc7396 to delete the None values - deep_update_rfc7396(array_to_change[index], values_to_edit_delete[index], _key_list) + deep_update_rfc7396( + array_to_change[index], values_to_edit_delete[index], _key_list + ) except IndexError: - raise DbException("Array edition index out of range at '{}'".format(":".join(_key_list))) + raise DbException( + "Array edition index out of range at '{}'".format( + ":".join(_key_list) + ) + ) # insertion with indexes to_insert_indexes = list(to_insert_at_index.keys()) @@ -489,7 +566,7 @@ def deep_update_rfc7396(dict_to_change, dict_reference, key_list=None): key_list.append("") for k in dict_reference: key_list[-1] = str(k) - if dict_reference[k] is None: # None->Anything + if dict_reference[k] is None: # None->Anything if k in dict_to_change: del dict_to_change[k] elif not isinstance(dict_reference[k], dict): # NotDict->Anything @@ -500,8 +577,10 @@ def deep_update_rfc7396(dict_to_change, dict_reference, key_list=None): deep_update_rfc7396(dict_to_change[k], dict_reference[k], key_list) elif isinstance(dict_to_change[k], dict): # Dict->Dict deep_update_rfc7396(dict_to_change[k], dict_reference[k], key_list) - else: # Dict->NotDict - if isinstance(dict_to_change[k], list): # Dict->List. Check extra array edition + else: # Dict->NotDict + if isinstance( + dict_to_change[k], list + ): # Dict->List. Check extra array edition if _deep_update_array(dict_to_change[k], dict_reference[k], key_list): continue dict_to_change[k] = deepcopy(dict_reference[k]) @@ -511,5 +590,5 @@ def deep_update_rfc7396(dict_to_change, dict_reference, key_list=None): def deep_update(dict_to_change, dict_reference): - """ Maintained for backward compatibility. Use deep_update_rfc7396 instead""" + """Maintained for backward compatibility. Use deep_update_rfc7396 instead""" return deep_update_rfc7396(dict_to_change, dict_reference) diff --git a/osm_common/dbmemory.py b/osm_common/dbmemory.py index 51ae810..d089575 100644 --- a/osm_common/dbmemory.py +++ b/osm_common/dbmemory.py @@ -26,8 +26,7 @@ __author__ = "Alfonso Tierno " class DbMemory(DbBase): - - def __init__(self, logger_name='db', lock=False): + def __init__(self, logger_name="db", lock=False): super().__init__(logger_name, lock) self.db = {} @@ -61,14 +60,15 @@ class DbMemory(DbBase): return db_filter def _find(self, table, q_filter): - def recursive_find(key_list, key_next_index, content, oper, target): if key_next_index == len(key_list) or content is None: try: if oper in ("eq", "cont"): if isinstance(target, list): if isinstance(content, list): - return any(content_item in target for content_item in content) + return any( + content_item in target for content_item in content + ) return content in target elif isinstance(content, list): return target in content @@ -77,7 +77,10 @@ class DbMemory(DbBase): elif oper in ("neq", "ne", "ncont"): if isinstance(target, list): if isinstance(content, list): - return all(content_item not in target for content_item in content) + return all( + content_item not in target + for content_item in content + ) return content not in target elif isinstance(content, list): return target not in content @@ -92,18 +95,31 @@ class DbMemory(DbBase): elif oper == "lte": return content <= target else: - raise DbException("Unknown filter operator '{}' in key '{}'". - format(oper, ".".join(key_list)), http_code=HTTPStatus.BAD_REQUEST) + raise DbException( + "Unknown filter operator '{}' in key '{}'".format( + oper, ".".join(key_list) + ), + http_code=HTTPStatus.BAD_REQUEST, + ) except TypeError: return False elif isinstance(content, dict): - return recursive_find(key_list, key_next_index + 1, content.get(key_list[key_next_index]), oper, - target) + return recursive_find( + key_list, + key_next_index + 1, + content.get(key_list[key_next_index]), + oper, + target, + ) elif isinstance(content, list): look_for_match = True # when there is a match return immediately - if (target is None) != (oper in ("neq", "ne", "ncont")): # one True and other False (Xor) - look_for_match = False # when there is not a match return immediately + if (target is None) != ( + oper in ("neq", "ne", "ncont") + ): # one True and other False (Xor) + look_for_match = ( + False # when there is not a match return immediately + ) for content_item in content: if key_list[key_next_index] == "ANYINDEX" and isinstance(v, dict): @@ -111,19 +127,40 @@ class DbMemory(DbBase): for k2, v2 in target.items(): k_new_list = k2.split(".") new_operator = "eq" - if k_new_list[-1] in ("eq", "ne", "gt", "gte", "lt", "lte", "cont", "ncont", "neq"): + if k_new_list[-1] in ( + "eq", + "ne", + "gt", + "gte", + "lt", + "lte", + "cont", + "ncont", + "neq", + ): new_operator = k_new_list.pop() - if not recursive_find(k_new_list, 0, content_item, new_operator, v2): + if not recursive_find( + k_new_list, 0, content_item, new_operator, v2 + ): matches = False break else: - matches = recursive_find(key_list, key_next_index, content_item, oper, target) + matches = recursive_find( + key_list, key_next_index, content_item, oper, target + ) if matches == look_for_match: return matches - if key_list[key_next_index].isdecimal() and int(key_list[key_next_index]) < len(content): - matches = recursive_find(key_list, key_next_index + 1, content[int(key_list[key_next_index])], - oper, target) + if key_list[key_next_index].isdecimal() and int( + key_list[key_next_index] + ) < len(content): + matches = recursive_find( + key_list, + key_next_index + 1, + content[int(key_list[key_next_index])], + oper, + target, + ) if matches == look_for_match: return matches return not look_for_match @@ -138,7 +175,17 @@ class DbMemory(DbBase): for k, v in q_filter.items(): k_list = k.split(".") operator = "eq" - if k_list[-1] in ("eq", "ne", "gt", "gte", "lt", "lte", "cont", "ncont", "neq"): + if k_list[-1] in ( + "eq", + "ne", + "gt", + "gte", + "lt", + "lte", + "cont", + "ncont", + "neq", + ): operator = k_list.pop() matches = recursive_find(k_list, 0, row, operator, v) if not matches: @@ -199,11 +246,18 @@ class DbMemory(DbBase): if not fail_on_more: return deepcopy(row) if result: - raise DbException("Found more than one entry with filter='{}'".format(q_filter), - HTTPStatus.CONFLICT.value) + raise DbException( + "Found more than one entry with filter='{}'".format( + q_filter + ), + HTTPStatus.CONFLICT.value, + ) result = row if not result and fail_on_empty: - raise DbException("Not found entry with filter='{}'".format(q_filter), HTTPStatus.NOT_FOUND) + raise DbException( + "Not found entry with filter='{}'".format(q_filter), + HTTPStatus.NOT_FOUND, + ) return deepcopy(result) except Exception as e: # TODO refine raise DbException(str(e)) @@ -244,14 +298,26 @@ class DbMemory(DbBase): break else: if fail_on_empty: - raise DbException("Not found entry with filter='{}'".format(q_filter), HTTPStatus.NOT_FOUND) + raise DbException( + "Not found entry with filter='{}'".format(q_filter), + HTTPStatus.NOT_FOUND, + ) return None del self.db[table][i] return {"deleted": 1} except Exception as e: # TODO refine raise DbException(str(e)) - def _update(self, db_item, update_dict, unset=None, pull=None, push=None, push_list=None, pull_list=None): + def _update( + self, + db_item, + update_dict, + unset=None, + pull=None, + push=None, + push_list=None, + pull_list=None, + ): """ Modifies an entry at database :param db_item: entry of the table to update @@ -267,6 +333,7 @@ class DbMemory(DbBase): whole array :return: True if database has been changed, False if not; Exception on error """ + def _iterate_keys(k, db_nested, populate=True): k_list = k.split(".") k_item_prev = k_list[0] @@ -278,7 +345,9 @@ class DbMemory(DbBase): if isinstance(db_nested[k_item_prev], dict): if k_item not in db_nested[k_item_prev]: if not populate: - raise DbException("Cannot set '{}', not existing '{}'".format(k, k_item)) + raise DbException( + "Cannot set '{}', not existing '{}'".format(k, k_item) + ) populated = True db_nested[k_item_prev][k_item] = None elif isinstance(db_nested[k_item_prev], list) and k_item.isdigit(): @@ -286,17 +355,28 @@ class DbMemory(DbBase): k_item = int(k_item) if k_item >= len(db_nested[k_item_prev]): if not populate: - raise DbException("Cannot set '{}', index too large '{}'".format(k, k_item)) + raise DbException( + "Cannot set '{}', index too large '{}'".format( + k, k_item + ) + ) populated = True - db_nested[k_item_prev] += [None] * (k_item - len(db_nested[k_item_prev]) + 1) + db_nested[k_item_prev] += [None] * ( + k_item - len(db_nested[k_item_prev]) + 1 + ) elif db_nested[k_item_prev] is None: if not populate: - raise DbException("Cannot set '{}', not existing '{}'".format(k, k_item)) + raise DbException( + "Cannot set '{}', not existing '{}'".format(k, k_item) + ) populated = True db_nested[k_item_prev] = {k_item: None} else: # number, string, boolean, ... or list but with not integer key - raise DbException("Cannot set '{}' on existing '{}={}'".format(k, k_item_prev, - db_nested[k_item_prev])) + raise DbException( + "Cannot set '{}' on existing '{}={}'".format( + k, k_item_prev, db_nested[k_item_prev] + ) + ) db_nested = db_nested[k_item_prev] k_item_prev = k_item return db_nested, k_item_prev, populated @@ -311,7 +391,9 @@ class DbMemory(DbBase): if unset: for dot_k in unset: try: - dict_to_update, key_to_update, _ = _iterate_keys(dot_k, db_item, populate=False) + dict_to_update, key_to_update, _ = _iterate_keys( + dot_k, db_item, populate=False + ) del dict_to_update[key_to_update] updated = True except Exception: @@ -319,62 +401,92 @@ class DbMemory(DbBase): if pull: for dot_k, v in pull.items(): try: - dict_to_update, key_to_update, _ = _iterate_keys(dot_k, db_item, populate=False) + dict_to_update, key_to_update, _ = _iterate_keys( + dot_k, db_item, populate=False + ) except Exception: continue if key_to_update not in dict_to_update: continue if not isinstance(dict_to_update[key_to_update], list): - raise DbException("Cannot pull '{}'. Target is not a list".format(dot_k)) + raise DbException( + "Cannot pull '{}'. Target is not a list".format(dot_k) + ) while v in dict_to_update[key_to_update]: dict_to_update[key_to_update].remove(v) updated = True if pull_list: for dot_k, v in pull_list.items(): if not isinstance(v, list): - raise DbException("Invalid content at pull_list, '{}' must be an array".format(dot_k), - http_code=HTTPStatus.BAD_REQUEST) + raise DbException( + "Invalid content at pull_list, '{}' must be an array".format( + dot_k + ), + http_code=HTTPStatus.BAD_REQUEST, + ) try: - dict_to_update, key_to_update, _ = _iterate_keys(dot_k, db_item, populate=False) + dict_to_update, key_to_update, _ = _iterate_keys( + dot_k, db_item, populate=False + ) except Exception: continue if key_to_update not in dict_to_update: continue if not isinstance(dict_to_update[key_to_update], list): - raise DbException("Cannot pull_list '{}'. Target is not a list".format(dot_k)) + raise DbException( + "Cannot pull_list '{}'. Target is not a list".format(dot_k) + ) for single_v in v: while single_v in dict_to_update[key_to_update]: dict_to_update[key_to_update].remove(single_v) updated = True if push: for dot_k, v in push.items(): - dict_to_update, key_to_update, populated = _iterate_keys(dot_k, db_item) - if isinstance(dict_to_update, dict) and key_to_update not in dict_to_update: + dict_to_update, key_to_update, populated = _iterate_keys( + dot_k, db_item + ) + if ( + isinstance(dict_to_update, dict) + and key_to_update not in dict_to_update + ): dict_to_update[key_to_update] = [v] updated = True elif populated and dict_to_update[key_to_update] is None: dict_to_update[key_to_update] = [v] updated = True elif not isinstance(dict_to_update[key_to_update], list): - raise DbException("Cannot push '{}'. Target is not a list".format(dot_k)) + raise DbException( + "Cannot push '{}'. Target is not a list".format(dot_k) + ) else: dict_to_update[key_to_update].append(v) updated = True if push_list: for dot_k, v in push_list.items(): if not isinstance(v, list): - raise DbException("Invalid content at push_list, '{}' must be an array".format(dot_k), - http_code=HTTPStatus.BAD_REQUEST) - dict_to_update, key_to_update, populated = _iterate_keys(dot_k, db_item) - if isinstance(dict_to_update, dict) and key_to_update not in dict_to_update: + raise DbException( + "Invalid content at push_list, '{}' must be an array".format( + dot_k + ), + http_code=HTTPStatus.BAD_REQUEST, + ) + dict_to_update, key_to_update, populated = _iterate_keys( + dot_k, db_item + ) + if ( + isinstance(dict_to_update, dict) + and key_to_update not in dict_to_update + ): dict_to_update[key_to_update] = v.copy() updated = True elif populated and dict_to_update[key_to_update] is None: dict_to_update[key_to_update] = v.copy() updated = True elif not isinstance(dict_to_update[key_to_update], list): - raise DbException("Cannot push '{}'. Target is not a list".format(dot_k), - http_code=HTTPStatus.CONFLICT) + raise DbException( + "Cannot push '{}'. Target is not a list".format(dot_k), + http_code=HTTPStatus.CONFLICT, + ) else: dict_to_update[key_to_update] += v updated = True @@ -385,8 +497,18 @@ class DbMemory(DbBase): except Exception as e: # TODO refine raise DbException(str(e)) - def set_one(self, table, q_filter, update_dict, fail_on_empty=True, unset=None, pull=None, push=None, - push_list=None, pull_list=None): + def set_one( + self, + table, + q_filter, + update_dict, + fail_on_empty=True, + unset=None, + pull=None, + push=None, + push_list=None, + pull_list=None, + ): """ Modifies an entry at database :param table: collection or table @@ -407,23 +529,50 @@ class DbMemory(DbBase): """ with self.lock: for i, db_item in self._find(table, self._format_filter(q_filter)): - updated = self._update(db_item, update_dict, unset=unset, pull=pull, push=push, push_list=push_list, - pull_list=pull_list) + updated = self._update( + db_item, + update_dict, + unset=unset, + pull=pull, + push=push, + push_list=push_list, + pull_list=pull_list, + ) return {"updated": 1 if updated else 0} else: if fail_on_empty: - raise DbException("Not found entry with _id='{}'".format(q_filter), HTTPStatus.NOT_FOUND) + raise DbException( + "Not found entry with _id='{}'".format(q_filter), + HTTPStatus.NOT_FOUND, + ) return None - def set_list(self, table, q_filter, update_dict, unset=None, pull=None, push=None, push_list=None, pull_list=None): + def set_list( + self, + table, + q_filter, + update_dict, + unset=None, + pull=None, + push=None, + push_list=None, + pull_list=None, + ): """Modifies al matching entries at database. Same as push. Do not fail if nothing matches""" with self.lock: updated = 0 found = 0 for _, db_item in self._find(table, self._format_filter(q_filter)): found += 1 - if self._update(db_item, update_dict, unset=unset, pull=pull, push=push, push_list=push_list, - pull_list=pull_list): + if self._update( + db_item, + update_dict, + unset=unset, + pull=pull, + push=push, + push_list=push_list, + pull_list=pull_list, + ): updated += 1 # if not found and fail_on_empty: # raise DbException("Not found entry with '{}'".format(q_filter), HTTPStatus.NOT_FOUND) @@ -445,7 +594,10 @@ class DbMemory(DbBase): break else: if fail_on_empty: - raise DbException("Not found entry with _id='{}'".format(_id), HTTPStatus.NOT_FOUND) + raise DbException( + "Not found entry with _id='{}'".format(_id), + HTTPStatus.NOT_FOUND, + ) return None self.db[table][i] = deepcopy(indata) return {"updated": 1} @@ -499,7 +651,7 @@ class DbMemory(DbBase): raise DbException(str(e)) -if __name__ == '__main__': +if __name__ == "__main__": # some test code db = DbMemory() db.create("test", {"_id": 1, "data": 1}) diff --git a/osm_common/dbmongo.py b/osm_common/dbmongo.py index 7fc29dc..d8b373a 100644 --- a/osm_common/dbmongo.py +++ b/osm_common/dbmongo.py @@ -63,7 +63,7 @@ class DbMongo(DbBase): conn_initial_timout = 120 conn_timout = 10 - def __init__(self, logger_name='db', lock=False): + def __init__(self, logger_name="db", lock=False): super().__init__(logger_name, lock) self.client = None self.db = None @@ -79,7 +79,9 @@ class DbMongo(DbBase): self.secret_key = None if self.database_key: self.set_secret_key(self.database_key) - version_data = self.get_one("admin", {"_id": "version"}, fail_on_empty=False, fail_on_more=True) + version_data = self.get_one( + "admin", {"_id": "version"}, fail_on_empty=False, fail_on_more=True + ) if version_data and version_data.get("serial"): self.set_secret_key(b64decode(version_data["serial"])) self.secret_obtained = True @@ -99,32 +101,57 @@ class DbMongo(DbBase): self.database_key = master_key self.set_secret_key(master_key) if config.get("uri"): - self.client = MongoClient(config["uri"], replicaSet=config.get("replicaset", None)) + self.client = MongoClient( + config["uri"], replicaSet=config.get("replicaset", None) + ) else: - self.client = MongoClient(config["host"], config["port"], replicaSet=config.get("replicaset", None)) + self.client = MongoClient( + config["host"], + config["port"], + replicaSet=config.get("replicaset", None), + ) # TODO add as parameters also username=config.get("user"), password=config.get("password")) # when all modules are ready self.db = self.client[config["name"]] if "loglevel" in config: - self.logger.setLevel(getattr(logging, config['loglevel'])) + self.logger.setLevel(getattr(logging, config["loglevel"])) # get data to try a connection now = time() while True: try: - version_data = self.get_one("admin", {"_id": "version"}, fail_on_empty=False, fail_on_more=True) + version_data = self.get_one( + "admin", + {"_id": "version"}, + fail_on_empty=False, + fail_on_more=True, + ) # check database status is ok - if version_data and version_data.get("status") != 'ENABLED': - raise DbException("Wrong database status '{}'".format(version_data.get("status")), - http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + if version_data and version_data.get("status") != "ENABLED": + raise DbException( + "Wrong database status '{}'".format( + version_data.get("status") + ), + http_code=HTTPStatus.INTERNAL_SERVER_ERROR, + ) # check version - db_version = None if not version_data else version_data.get("version") + db_version = ( + None if not version_data else version_data.get("version") + ) if target_version and target_version != db_version: - raise DbException("Invalid database version {}. Expected {}".format(db_version, target_version)) + raise DbException( + "Invalid database version {}. Expected {}".format( + db_version, target_version + ) + ) # get serial if version_data and version_data.get("serial"): self.secret_obtained = True self.set_secret_key(b64decode(version_data["serial"])) - self.logger.info("Connected to database {} version {}".format(config["name"], db_version)) + self.logger.info( + "Connected to database {} version {}".format( + config["name"], db_version + ) + ) return except errors.ConnectionFailure as e: if time() - now >= self.conn_initial_timout: @@ -173,9 +200,18 @@ class DbMongo(DbBase): return db_filter for query_k, query_v in q_filter.items(): dot_index = query_k.rfind(".") - if dot_index > 1 and query_k[dot_index+1:] in ("eq", "ne", "gt", "gte", "lt", "lte", "cont", - "ncont", "neq"): - operator = "$" + query_k[dot_index + 1:] + if dot_index > 1 and query_k[dot_index + 1 :] in ( + "eq", + "ne", + "gt", + "gte", + "lt", + "lte", + "cont", + "ncont", + "neq", + ): + operator = "$" + query_k[dot_index + 1 :] if operator == "$neq": operator = "$ne" k = query_k[:dot_index] @@ -216,8 +252,10 @@ class DbMongo(DbBase): return db_filter except Exception as e: - raise DbException("Invalid query string filter at {}:{}. Error: {}".format(query_k, v, e), - http_code=HTTPStatus.BAD_REQUEST) + raise DbException( + "Invalid query string filter at {}:{}. Error: {}".format(query_k, v, e), + http_code=HTTPStatus.BAD_REQUEST, + ) def get_list(self, table, q_filter=None): """ @@ -279,13 +317,21 @@ class DbMongo(DbBase): rows = collection.find(db_filter) if rows.count() == 0: if fail_on_empty: - raise DbException("Not found any {} with filter='{}'".format(table[:-1], q_filter), - HTTPStatus.NOT_FOUND) + raise DbException( + "Not found any {} with filter='{}'".format( + table[:-1], q_filter + ), + HTTPStatus.NOT_FOUND, + ) return None elif rows.count() > 1: if fail_on_more: - raise DbException("Found more than one {} with filter='{}'".format(table[:-1], q_filter), - HTTPStatus.CONFLICT) + raise DbException( + "Found more than one {} with filter='{}'".format( + table[:-1], q_filter + ), + HTTPStatus.CONFLICT, + ) return rows[0] except Exception as e: # TODO refine raise DbException(e) @@ -322,8 +368,12 @@ class DbMongo(DbBase): rows = collection.delete_one(self._format_filter(q_filter)) if rows.deleted_count == 0: if fail_on_empty: - raise DbException("Not found any {} with filter='{}'".format(table[:-1], q_filter), - HTTPStatus.NOT_FOUND) + raise DbException( + "Not found any {} with filter='{}'".format( + table[:-1], q_filter + ), + HTTPStatus.NOT_FOUND, + ) return None return {"deleted": rows.deleted_count} except Exception as e: # TODO refine @@ -362,8 +412,19 @@ class DbMongo(DbBase): except Exception as e: # TODO refine raise DbException(e) - def set_one(self, table, q_filter, update_dict, fail_on_empty=True, unset=None, pull=None, push=None, - push_list=None, pull_list=None, upsert=False): + def set_one( + self, + table, + q_filter, + update_dict, + fail_on_empty=True, + unset=None, + pull=None, + push=None, + push_list=None, + pull_list=None, + upsert=False, + ): """ Modifies an entry at database :param table: collection or table @@ -393,25 +454,45 @@ class DbMongo(DbBase): if pull or pull_list: db_oper["$pull"] = pull or {} if pull_list: - db_oper["$pull"].update({k: {"$in": v} for k, v in pull_list.items()}) + db_oper["$pull"].update( + {k: {"$in": v} for k, v in pull_list.items()} + ) if push or push_list: db_oper["$push"] = push or {} if push_list: - db_oper["$push"].update({k: {"$each": v} for k, v in push_list.items()}) + db_oper["$push"].update( + {k: {"$each": v} for k, v in push_list.items()} + ) with self.lock: collection = self.db[table] - rows = collection.update_one(self._format_filter(q_filter), db_oper, upsert=upsert) + rows = collection.update_one( + self._format_filter(q_filter), db_oper, upsert=upsert + ) if rows.matched_count == 0: if fail_on_empty: - raise DbException("Not found any {} with filter='{}'".format(table[:-1], q_filter), - HTTPStatus.NOT_FOUND) + raise DbException( + "Not found any {} with filter='{}'".format( + table[:-1], q_filter + ), + HTTPStatus.NOT_FOUND, + ) return None return {"modified": rows.modified_count} except Exception as e: # TODO refine raise DbException(e) - def set_list(self, table, q_filter, update_dict, unset=None, pull=None, push=None, push_list=None, pull_list=None): + def set_list( + self, + table, + q_filter, + update_dict, + unset=None, + pull=None, + push=None, + push_list=None, + pull_list=None, + ): """ Modifies al matching entries at database :param table: collection or table @@ -437,11 +518,15 @@ class DbMongo(DbBase): if pull or pull_list: db_oper["$pull"] = pull or {} if pull_list: - db_oper["$pull"].update({k: {"$in": v} for k, v in pull_list.items()}) + db_oper["$pull"].update( + {k: {"$in": v} for k, v in pull_list.items()} + ) if push or push_list: db_oper["$push"] = push or {} if push_list: - db_oper["$push"].update({k: {"$each": v} for k, v in push_list.items()}) + db_oper["$push"].update( + {k: {"$each": v} for k, v in push_list.items()} + ) with self.lock: collection = self.db[table] rows = collection.update_many(self._format_filter(q_filter), db_oper) @@ -466,7 +551,10 @@ class DbMongo(DbBase): rows = collection.replace_one(db_filter, indata) if rows.matched_count == 0: if fail_on_empty: - raise DbException("Not found any {} with _id='{}'".format(table[:-1], _id), HTTPStatus.NOT_FOUND) + raise DbException( + "Not found any {} with _id='{}'".format(table[:-1], _id), + HTTPStatus.NOT_FOUND, + ) return None return {"replaced": rows.modified_count} except Exception as e: # TODO refine diff --git a/osm_common/fsbase.py b/osm_common/fsbase.py index e15f7a5..219dbe1 100644 --- a/osm_common/fsbase.py +++ b/osm_common/fsbase.py @@ -31,7 +31,7 @@ class FsException(Exception): class FsBase(object): - def __init__(self, logger_name='fs', lock=False): + def __init__(self, logger_name="fs", lock=False): """ Constructor of FsBase :param logger_name: logging name diff --git a/osm_common/fslocal.py b/osm_common/fslocal.py index 3686b36..4a82c4f 100644 --- a/osm_common/fslocal.py +++ b/osm_common/fslocal.py @@ -17,6 +17,7 @@ import os import logging + # import tarfile from http import HTTPStatus from shutil import rmtree @@ -26,8 +27,7 @@ __author__ = "Alfonso Tierno " class FsLocal(FsBase): - - def __init__(self, logger_name='fs', lock=False): + def __init__(self, logger_name="fs", lock=False): super().__init__(logger_name, lock) self.path = None @@ -42,8 +42,11 @@ class FsLocal(FsBase): if not self.path.endswith("/"): self.path += "/" if not os.path.exists(self.path): - raise FsException("Invalid configuration param at '[storage]': path '{}' does not exist".format( - config["path"])) + raise FsException( + "Invalid configuration param at '[storage]': path '{}' does not exist".format( + config["path"] + ) + ) except FsException: raise except Exception as e: # TODO refine @@ -140,9 +143,13 @@ class FsLocal(FsBase): f = "/".join(storage) return open(self.path + f, mode) except FileNotFoundError: - raise FsException("File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND) + raise FsException( + "File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND + ) except IOError: - raise FsException("File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST) + raise FsException( + "File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST + ) def dir_ls(self, storage): """ @@ -157,9 +164,13 @@ class FsLocal(FsBase): f = "/".join(storage) return os.listdir(self.path + f) except NotADirectoryError: - raise FsException("File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND) + raise FsException( + "File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND + ) except IOError: - raise FsException("File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST) + raise FsException( + "File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST + ) def file_delete(self, storage, ignore_non_exist=False): """ @@ -176,9 +187,15 @@ class FsLocal(FsBase): if os.path.exists(f): rmtree(f) elif not ignore_non_exist: - raise FsException("File {} does not exist".format(storage), http_code=HTTPStatus.NOT_FOUND) + raise FsException( + "File {} does not exist".format(storage), + http_code=HTTPStatus.NOT_FOUND, + ) except (IOError, PermissionError) as e: - raise FsException("File {} cannot be deleted: {}".format(f, e), http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise FsException( + "File {} cannot be deleted: {}".format(f, e), + http_code=HTTPStatus.INTERNAL_SERVER_ERROR, + ) def sync(self, from_path=None): pass # Not needed in fslocal diff --git a/osm_common/fsmongo.py b/osm_common/fsmongo.py index f43dca6..ff37c42 100644 --- a/osm_common/fsmongo.py +++ b/osm_common/fsmongo.py @@ -51,13 +51,17 @@ class GridByteStream(BytesIO): exception_file = next(cursor, None) if exception_file: - raise FsException("Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise FsException( + "Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR + ) if requested_file.metadata["type"] in ("file", "sym"): grid_file = requested_file self.file_type = requested_file.metadata["type"] else: - raise FsException("Type isn't file", http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise FsException( + "Type isn't file", http_code=HTTPStatus.INTERNAL_SERVER_ERROR + ) if grid_file: self._id = grid_file._id @@ -74,29 +78,26 @@ class GridByteStream(BytesIO): if self._id: self.fs.delete(self._id) - cursor = self.fs.find({ - "filename": self.filename.split("/")[0], - "metadata": {"type": "dir"}}) + cursor = self.fs.find( + {"filename": self.filename.split("/")[0], "metadata": {"type": "dir"}} + ) parent_dir = next(cursor, None) if not parent_dir: parent_dir_name = self.filename.split("/")[0] - self.filename = self.filename.replace(parent_dir_name, parent_dir_name[:-1], 1) + self.filename = self.filename.replace( + parent_dir_name, parent_dir_name[:-1], 1 + ) self.seek(0, 0) if self._id: self.fs.upload_from_stream_with_id( - self._id, - self.filename, - self, - metadata={"type": self.file_type} + self._id, self.filename, self, metadata={"type": self.file_type} ) else: self.fs.upload_from_stream( - self.filename, - self, - metadata={"type": self.file_type} + self.filename, self, metadata={"type": self.file_type} ) super(GridByteStream, self).close() @@ -127,13 +128,17 @@ class GridStringStream(StringIO): exception_file = next(cursor, None) if exception_file: - raise FsException("Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise FsException( + "Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR + ) if requested_file.metadata["type"] in ("file", "dir"): grid_file = requested_file self.file_type = requested_file.metadata["type"] else: - raise FsException("File type isn't file", http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise FsException( + "File type isn't file", http_code=HTTPStatus.INTERNAL_SERVER_ERROR + ) if grid_file: stream = BytesIO() @@ -154,15 +159,17 @@ class GridStringStream(StringIO): if self._id: self.fs.delete(self._id) - cursor = self.fs.find({ - "filename": self.filename.split("/")[0], - "metadata": {"type": "dir"}}) + cursor = self.fs.find( + {"filename": self.filename.split("/")[0], "metadata": {"type": "dir"}} + ) parent_dir = next(cursor, None) if not parent_dir: parent_dir_name = self.filename.split("/")[0] - self.filename = self.filename.replace(parent_dir_name, parent_dir_name[:-1], 1) + self.filename = self.filename.replace( + parent_dir_name, parent_dir_name[:-1], 1 + ) self.seek(0, 0) stream = BytesIO() @@ -170,16 +177,11 @@ class GridStringStream(StringIO): stream.seek(0, 0) if self._id: self.fs.upload_from_stream_with_id( - self._id, - self.filename, - stream, - metadata={"type": self.file_type} + self._id, self.filename, stream, metadata={"type": self.file_type} ) else: self.fs.upload_from_stream( - self.filename, - stream, - metadata={"type": self.file_type} + self.filename, stream, metadata={"type": self.file_type} ) stream.close() super(GridStringStream, self).close() @@ -192,8 +194,7 @@ class GridStringStream(StringIO): class FsMongo(FsBase): - - def __init__(self, logger_name='fs', lock=False): + def __init__(self, logger_name="fs", lock=False): super().__init__(logger_name, lock) self.path = None self.client = None @@ -207,7 +208,9 @@ class FsMongo(FsBase): continue os.makedirs(self.path + directory.filename, exist_ok=True) - file_cursor = self.fs.find({"metadata.type": {"$in": ["file", "sym"]}}, no_cursor_timeout=True) + file_cursor = self.fs.find( + {"metadata.type": {"$in": ["file", "sym"]}}, no_cursor_timeout=True + ) for writing_file in file_cursor: if from_path and not writing_file.filename.startswith(from_path): @@ -228,7 +231,7 @@ class FsMongo(FsBase): raise os.symlink(link, file_path) else: - with open(file_path, 'wb+') as file_stream: + with open(file_path, "wb+") as file_stream: self.fs.download_to_stream(writing_file._id, file_stream) if "permissions" in writing_file.metadata: os.chmod(file_path, writing_file.metadata["permissions"]) @@ -243,15 +246,21 @@ class FsMongo(FsBase): if "path" in config: self.path = config["path"] else: - raise FsException("Missing parameter \"path\"") + raise FsException('Missing parameter "path"') if not self.path.endswith("/"): self.path += "/" if not os.path.exists(self.path): - raise FsException("Invalid configuration param at '[storage]': path '{}' does not exist".format( - config["path"])) + raise FsException( + "Invalid configuration param at '[storage]': path '{}' does not exist".format( + config["path"] + ) + ) elif not os.access(self.path, os.W_OK): - raise FsException("Invalid configuration param at '[storage]': path '{}' is not writable".format( - config["path"])) + raise FsException( + "Invalid configuration param at '[storage]': path '{}' is not writable".format( + config["path"] + ) + ) if all(key in config.keys() for key in ["uri", "collection"]): self.client = MongoClient(config["uri"]) self.fs = GridFSBucket(self.client[config["collection"]]) @@ -260,9 +269,9 @@ class FsMongo(FsBase): self.fs = GridFSBucket(self.client[config["collection"]]) else: if "collection" not in config.keys(): - raise FsException("Missing parameter \"collection\"") + raise FsException('Missing parameter "collection"') else: - raise FsException("Missing parameters: \"uri\" or \"host\" + \"port\"") + raise FsException('Missing parameters: "uri" or "host" + "port"') except FsException: raise except Exception as e: # TODO refine @@ -278,8 +287,7 @@ class FsMongo(FsBase): :return: None or raises an exception """ try: - self.fs.upload_from_stream( - folder, BytesIO(), metadata={"type": "dir"}) + self.fs.upload_from_stream(folder, BytesIO(), metadata={"type": "dir"}) except errors.FileExists: # make it idempotent pass except Exception as e: @@ -294,15 +302,15 @@ class FsMongo(FsBase): """ try: dst_cursor = self.fs.find( - {"filename": {"$regex": "^{}(/|$)".format(dst)}}, - no_cursor_timeout=True) + {"filename": {"$regex": "^{}(/|$)".format(dst)}}, no_cursor_timeout=True + ) for dst_file in dst_cursor: self.fs.delete(dst_file._id) src_cursor = self.fs.find( - {"filename": {"$regex": "^{}(/|$)".format(src)}}, - no_cursor_timeout=True) + {"filename": {"$regex": "^{}(/|$)".format(src)}}, no_cursor_timeout=True + ) for src_file in src_cursor: self.fs.rename(src_file._id, src_file.filename.replace(src, dst, 1)) @@ -324,7 +332,9 @@ class FsMongo(FsBase): exception_file = next(cursor, None) if exception_file: - raise FsException("Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise FsException( + "Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR + ) # if no special mode is required just check it does exists if not mode: @@ -352,7 +362,9 @@ class FsMongo(FsBase): exception_file = next(cursor, None) if exception_file: - raise FsException("Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise FsException( + "Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR + ) return requested_file.length @@ -380,16 +392,9 @@ class FsMongo(FsBase): else: file_type = "dir" - metadata = { - "type": file_type, - "permissions": member.mode - } + metadata = {"type": file_type, "permissions": member.mode} - self.fs.upload_from_stream( - f + "/" + member.name, - stream, - metadata=metadata - ) + self.fs.upload_from_stream(f + "/" + member.name, stream, metadata=metadata) stream.close() @@ -408,9 +413,13 @@ class FsMongo(FsBase): else: return GridStringStream(f, self.fs, mode) except errors.NoFile: - raise FsException("File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND) + raise FsException( + "File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND + ) except IOError: - raise FsException("File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST) + raise FsException( + "File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST + ) def dir_ls(self, storage): """ @@ -427,18 +436,28 @@ class FsMongo(FsBase): exception_dir = next(dir_cursor, None) if exception_dir: - raise FsException("Multiple directories found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise FsException( + "Multiple directories found", + http_code=HTTPStatus.INTERNAL_SERVER_ERROR, + ) if requested_dir.metadata["type"] != "dir": - raise FsException("File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND) + raise FsException( + "File {} does not exist".format(f), + http_code=HTTPStatus.NOT_FOUND, + ) - files_cursor = self.fs.find({"filename": {"$regex": "^{}/([^/])*".format(f)}}) + files_cursor = self.fs.find( + {"filename": {"$regex": "^{}/([^/])*".format(f)}} + ) for children_file in files_cursor: - files += [children_file.filename.replace(f + '/', '', 1)] + files += [children_file.filename.replace(f + "/", "", 1)] return files except IOError: - raise FsException("File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST) + raise FsException( + "File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST + ) def file_delete(self, storage, ignore_non_exist=False): """ @@ -457,7 +476,10 @@ class FsMongo(FsBase): exception_file = next(file_cursor, None) if exception_file: - raise FsException("Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise FsException( + "Multiple files found", + http_code=HTTPStatus.INTERNAL_SERVER_ERROR, + ) if requested_file.metadata["type"] == "dir": dir_cursor = self.fs.find({"filename": {"$regex": "^{}".format(f)}}) @@ -467,9 +489,15 @@ class FsMongo(FsBase): else: self.fs.delete(requested_file._id) if not found and not ignore_non_exist: - raise FsException("File {} does not exist".format(storage), http_code=HTTPStatus.NOT_FOUND) + raise FsException( + "File {} does not exist".format(storage), + http_code=HTTPStatus.NOT_FOUND, + ) except IOError as e: - raise FsException("File {} cannot be deleted: {}".format(f, e), http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise FsException( + "File {} cannot be deleted: {}".format(f, e), + http_code=HTTPStatus.INTERNAL_SERVER_ERROR, + ) def sync(self, from_path=None): """ @@ -490,10 +518,7 @@ class FsMongo(FsBase): members = [] for root, dirs, files in os.walk(os_path): for folder in dirs: - member = { - "filename": os.path.join(root, folder), - "type": "dir" - } + member = {"filename": os.path.join(root, folder), "type": "dir"} members.append(member) for file in files: filename = os.path.join(root, file) @@ -501,10 +526,7 @@ class FsMongo(FsBase): file_type = "sym" else: file_type = "file" - member = { - "filename": os.path.join(root, file), - "type": file_type - } + member = {"filename": os.path.join(root, file), "type": file_type} members.append(member) # Obtain files in mongo dict @@ -520,10 +542,14 @@ class FsMongo(FsBase): # convert to relative path rel_filename = os.path.relpath(member["filename"], self.path) - last_modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(member["filename"])) + last_modified_date = datetime.datetime.fromtimestamp( + os.path.getmtime(member["filename"]) + ) remote_file = remote_files.get(rel_filename) - upload_date = remote_file[0].uploadDate if remote_file else datetime.datetime.min + upload_date = ( + remote_file[0].uploadDate if remote_file else datetime.datetime.min + ) # remove processed files from dict remote_files.pop(rel_filename, None) @@ -536,21 +562,16 @@ class FsMongo(FsBase): if file_type == "dir": stream = BytesIO() elif file_type == "sym": - stream = BytesIO(os.readlink(member["filename"]).encode("utf-8")) + stream = BytesIO( + os.readlink(member["filename"]).encode("utf-8") + ) else: fh = open(member["filename"], "rb") stream = BytesIO(fh.read()) - metadata = { - "type": file_type, - "permissions": mask - } + metadata = {"type": file_type, "permissions": mask} - self.fs.upload_from_stream( - rel_filename, - stream, - metadata=metadata - ) + self.fs.upload_from_stream(rel_filename, stream, metadata=metadata) # delete old files if remote_file: @@ -570,7 +591,7 @@ class FsMongo(FsBase): def _get_mongo_files(self, from_path=None): file_dict = {} - file_cursor = self.fs.find(no_cursor_timeout=True, sort=[('uploadDate', -1)]) + file_cursor = self.fs.find(no_cursor_timeout=True, sort=[("uploadDate", -1)]) for file in file_cursor: if from_path and not file.filename.startswith(from_path): continue diff --git a/osm_common/msgbase.py b/osm_common/msgbase.py index 7c27f64..49adcb8 100644 --- a/osm_common/msgbase.py +++ b/osm_common/msgbase.py @@ -43,7 +43,7 @@ class MsgBase(object): Base class for all msgXXXX classes """ - def __init__(self, logger_name='msg', lock=False): + def __init__(self, logger_name="msg", lock=False): """ Constructor of FsBase :param logger_name: logging name @@ -69,13 +69,25 @@ class MsgBase(object): pass def write(self, topic, key, msg): - raise MsgException("Method 'write' not implemented", http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise MsgException( + "Method 'write' not implemented", http_code=HTTPStatus.INTERNAL_SERVER_ERROR + ) def read(self, topic): - raise MsgException("Method 'read' not implemented", http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise MsgException( + "Method 'read' not implemented", http_code=HTTPStatus.INTERNAL_SERVER_ERROR + ) async def aiowrite(self, topic, key, msg, loop=None): - raise MsgException("Method 'aiowrite' not implemented", http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + raise MsgException( + "Method 'aiowrite' not implemented", + http_code=HTTPStatus.INTERNAL_SERVER_ERROR, + ) - async def aioread(self, topic, loop=None, callback=None, aiocallback=None, group_id=None, **kwargs): - raise MsgException("Method 'aioread' not implemented", http_code=HTTPStatus.INTERNAL_SERVER_ERROR) + async def aioread( + self, topic, loop=None, callback=None, aiocallback=None, group_id=None, **kwargs + ): + raise MsgException( + "Method 'aioread' not implemented", + http_code=HTTPStatus.INTERNAL_SERVER_ERROR, + ) diff --git a/osm_common/msgkafka.py b/osm_common/msgkafka.py index 4d02024..5caa5b1 100644 --- a/osm_common/msgkafka.py +++ b/osm_common/msgkafka.py @@ -21,12 +21,14 @@ from aiokafka import AIOKafkaProducer from aiokafka.errors import KafkaError from osm_common.msgbase import MsgBase, MsgException -__author__ = "Alfonso Tierno , " \ - "Guillermo Calvino " +__author__ = ( + "Alfonso Tierno , " + "Guillermo Calvino " +) class MsgKafka(MsgBase): - def __init__(self, logger_name='msg', lock=False): + def __init__(self, logger_name="msg", lock=False): super().__init__(logger_name, lock) self.host = None self.port = None @@ -64,15 +66,19 @@ class MsgKafka(MsgBase): :param msg: message content, can be string or dictionary :return: None or raises MsgException on failing """ - retry = 2 # Try two times + retry = 2 # Try two times while retry: try: - self.loop.run_until_complete(self.aiowrite(topic=topic, key=key, msg=msg)) + self.loop.run_until_complete( + self.aiowrite(topic=topic, key=key, msg=msg) + ) break except Exception as e: retry -= 1 if retry == 0: - raise MsgException("Error writing {} topic: {}".format(topic, str(e))) + raise MsgException( + "Error writing {} topic: {}".format(topic, str(e)) + ) def read(self, topic): """ @@ -100,17 +106,33 @@ class MsgKafka(MsgBase): if not loop: loop = self.loop try: - self.producer = AIOKafkaProducer(loop=loop, key_serializer=str.encode, value_serializer=str.encode, - bootstrap_servers=self.broker) + self.producer = AIOKafkaProducer( + loop=loop, + key_serializer=str.encode, + value_serializer=str.encode, + bootstrap_servers=self.broker, + ) await self.producer.start() - await self.producer.send(topic=topic, key=key, value=yaml.safe_dump(msg, default_flow_style=True)) + await self.producer.send( + topic=topic, key=key, value=yaml.safe_dump(msg, default_flow_style=True) + ) except Exception as e: - raise MsgException("Error publishing topic '{}', key '{}': {}".format(topic, key, e)) + raise MsgException( + "Error publishing topic '{}', key '{}': {}".format(topic, key, e) + ) finally: await self.producer.stop() - async def aioread(self, topic, loop=None, callback=None, aiocallback=None, group_id=None, from_beginning=None, - **kwargs): + async def aioread( + self, + topic, + loop=None, + callback=None, + aiocallback=None, + group_id=None, + from_beginning=None, + **kwargs + ): """ Asyncio read from one or several topics. :param topic: can be str: single topic; or str list: several topics @@ -137,19 +159,36 @@ class MsgKafka(MsgBase): topic_list = topic else: topic_list = (topic,) - self.consumer = AIOKafkaConsumer(loop=loop, bootstrap_servers=self.broker, group_id=group_id, - auto_offset_reset="earliest" if from_beginning else "latest") + self.consumer = AIOKafkaConsumer( + loop=loop, + bootstrap_servers=self.broker, + group_id=group_id, + auto_offset_reset="earliest" if from_beginning else "latest", + ) await self.consumer.start() self.consumer.subscribe(topic_list) async for message in self.consumer: if callback: - callback(message.topic, yaml.safe_load(message.key), yaml.safe_load(message.value), **kwargs) + callback( + message.topic, + yaml.safe_load(message.key), + yaml.safe_load(message.value), + **kwargs + ) elif aiocallback: - await aiocallback(message.topic, yaml.safe_load(message.key), yaml.safe_load(message.value), - **kwargs) + await aiocallback( + message.topic, + yaml.safe_load(message.key), + yaml.safe_load(message.value), + **kwargs + ) else: - return message.topic, yaml.safe_load(message.key), yaml.safe_load(message.value) + return ( + message.topic, + yaml.safe_load(message.key), + yaml.safe_load(message.value), + ) except KafkaError as e: raise MsgException(str(e)) finally: diff --git a/osm_common/msglocal.py b/osm_common/msglocal.py index 843b376..c10ff17 100644 --- a/osm_common/msglocal.py +++ b/osm_common/msglocal.py @@ -34,8 +34,7 @@ One text line per message is used in yaml format. class MsgLocal(MsgBase): - - def __init__(self, logger_name='msg', lock=False): + def __init__(self, logger_name="msg", lock=False): super().__init__(logger_name, lock) self.path = None # create a different file for each topic @@ -86,7 +85,12 @@ class MsgLocal(MsgBase): with self.lock: if topic not in self.files_write: self.files_write[topic] = open(self.path + topic, "a+") - yaml.safe_dump({key: msg}, self.files_write[topic], default_flow_style=True, width=20000) + yaml.safe_dump( + {key: msg}, + self.files_write[topic], + default_flow_style=True, + width=20000, + ) self.files_write[topic].flush() except Exception as e: # TODO refine raise MsgException(str(e), HTTPStatus.INTERNAL_SERVER_ERROR) @@ -102,14 +106,18 @@ class MsgLocal(MsgBase): if isinstance(topic, (list, tuple)): topic_list = topic else: - topic_list = (topic, ) + topic_list = (topic,) while True: for single_topic in topic_list: with self.lock: if single_topic not in self.files_read: - self.files_read[single_topic] = open(self.path + single_topic, "a+") + self.files_read[single_topic] = open( + self.path + single_topic, "a+" + ) self.buffer[single_topic] = "" - self.buffer[single_topic] += self.files_read[single_topic].readline() + self.buffer[single_topic] += self.files_read[ + single_topic + ].readline() if not self.buffer[single_topic].endswith("\n"): continue msg_dict = yaml.safe_load(self.buffer[single_topic]) @@ -123,7 +131,9 @@ class MsgLocal(MsgBase): except Exception as e: # TODO refine raise MsgException(str(e), HTTPStatus.INTERNAL_SERVER_ERROR) - async def aioread(self, topic, loop=None, callback=None, aiocallback=None, group_id=None, **kwargs): + async def aioread( + self, topic, loop=None, callback=None, aiocallback=None, group_id=None, **kwargs + ): """ Asyncio read from one or several topics. It blocks :param topic: can be str: single topic; or str list: several topics diff --git a/osm_common/sol004_package.py b/osm_common/sol004_package.py index 7d402f5..e6b40b4 100644 --- a/osm_common/sol004_package.py +++ b/osm_common/sol004_package.py @@ -60,16 +60,16 @@ import os import hashlib -_METADATA_FILE_PATH = 'TOSCA-Metadata/TOSCA.meta' -_METADATA_DESCRIPTOR_FIELD = 'Entry-Definitions' -_METADATA_MANIFEST_FIELD = 'ETSI-Entry-Manifest' -_METADATA_CHANGELOG_FIELD = 'ETSI-Entry-Change-Log' -_METADATA_LICENSES_FIELD = 'ETSI-Entry-Licenses' -_METADATA_DEFAULT_CHANGELOG_PATH = 'ChangeLog.txt' -_METADATA_DEFAULT_LICENSES_PATH = 'Licenses' -_MANIFEST_FILE_PATH_FIELD = 'Source' -_MANIFEST_FILE_HASH_ALGORITHM_FIELD = 'Algorithm' -_MANIFEST_FILE_HASH_DIGEST_FIELD = 'Hash' +_METADATA_FILE_PATH = "TOSCA-Metadata/TOSCA.meta" +_METADATA_DESCRIPTOR_FIELD = "Entry-Definitions" +_METADATA_MANIFEST_FIELD = "ETSI-Entry-Manifest" +_METADATA_CHANGELOG_FIELD = "ETSI-Entry-Change-Log" +_METADATA_LICENSES_FIELD = "ETSI-Entry-Licenses" +_METADATA_DEFAULT_CHANGELOG_PATH = "ChangeLog.txt" +_METADATA_DEFAULT_LICENSES_PATH = "Licenses" +_MANIFEST_FILE_PATH_FIELD = "Source" +_MANIFEST_FILE_HASH_ALGORITHM_FIELD = "Algorithm" +_MANIFEST_FILE_HASH_DIGEST_FIELD = "Hash" class SOL004PackageException(Exception): @@ -77,7 +77,7 @@ class SOL004PackageException(Exception): class SOL004Package: - def __init__(self, package_path=''): + def __init__(self, package_path=""): self._package_path = package_path self._package_metadata = self._parse_package_metadata() self._manifest_data = self._parse_manifest_data() @@ -94,21 +94,29 @@ class SOL004Package: except FileNotFoundError as e: raise e except (Exception, OSError) as e: - raise SOL004PackageException('Error parsing {}: {}'.format(_METADATA_FILE_PATH, e)) + raise SOL004PackageException( + "Error parsing {}: {}".format(_METADATA_FILE_PATH, e) + ) def _parse_package_metadata_without_metadata_dir(self): package_root_files = {f for f in os.listdir(self._package_path)} - package_root_yamls = [f for f in package_root_files if f.endswith('.yml') or f.endswith('.yaml')] + package_root_yamls = [ + f for f in package_root_files if f.endswith(".yml") or f.endswith(".yaml") + ] if len(package_root_yamls) != 1: - error_msg = 'Error parsing package metadata: there should be exactly 1 descriptor YAML, found {}' + error_msg = "Error parsing package metadata: there should be exactly 1 descriptor YAML, found {}" raise SOL004PackageException(error_msg.format(len(package_root_yamls))) # TODO: Parse extra metadata from descriptor YAML? - return [{ - _METADATA_DESCRIPTOR_FIELD: package_root_yamls[0], - _METADATA_MANIFEST_FIELD: '{}.mf'.format(os.path.splitext(package_root_yamls[0])[0]), - _METADATA_CHANGELOG_FIELD: _METADATA_DEFAULT_CHANGELOG_PATH, - _METADATA_LICENSES_FIELD: _METADATA_DEFAULT_LICENSES_PATH - }] + return [ + { + _METADATA_DESCRIPTOR_FIELD: package_root_yamls[0], + _METADATA_MANIFEST_FIELD: "{}.mf".format( + os.path.splitext(package_root_yamls[0])[0] + ), + _METADATA_CHANGELOG_FIELD: _METADATA_DEFAULT_CHANGELOG_PATH, + _METADATA_LICENSES_FIELD: _METADATA_DEFAULT_LICENSES_PATH, + } + ] def _parse_manifest_data(self): manifest_path = None @@ -117,13 +125,17 @@ class SOL004Package: manifest_path = tosca_meta[_METADATA_MANIFEST_FIELD] break else: - error_msg = 'Error parsing {}: no {} field on path'.format(_METADATA_FILE_PATH, _METADATA_MANIFEST_FIELD) + error_msg = "Error parsing {}: no {} field on path".format( + _METADATA_FILE_PATH, _METADATA_MANIFEST_FIELD + ) raise SOL004PackageException(error_msg) try: return self._parse_file_in_blocks(manifest_path) except (Exception, OSError) as e: - raise SOL004PackageException('Error parsing {}: {}'.format(manifest_path, e)) + raise SOL004PackageException( + "Error parsing {}: {}".format(manifest_path, e) + ) def _get_package_file_full_path(self, file_relative_path): return os.path.join(self._package_path, file_relative_path) @@ -131,16 +143,20 @@ class SOL004Package: def _parse_file_in_blocks(self, file_relative_path): file_path = self._get_package_file_full_path(file_relative_path) with open(file_path) as f: - blocks = f.read().split('\n\n') + blocks = f.read().split("\n\n") parsed_blocks = map(yaml.safe_load, blocks) return [block for block in parsed_blocks if block is not None] def _get_package_file_manifest_data(self, file_relative_path): for file_data in self._manifest_data: - if file_data.get(_MANIFEST_FILE_PATH_FIELD, '') == file_relative_path: + if file_data.get(_MANIFEST_FILE_PATH_FIELD, "") == file_relative_path: return file_data - error_msg = 'Error parsing {} manifest data: file not found on manifest file'.format(file_relative_path) + error_msg = ( + "Error parsing {} manifest data: file not found on manifest file".format( + file_relative_path + ) + ) raise SOL004PackageException(error_msg) def get_package_file_hash_digest_from_manifest(self, file_relative_path): @@ -149,7 +165,9 @@ class SOL004Package: try: return file_manifest_data[_MANIFEST_FILE_HASH_DIGEST_FIELD] except Exception as e: - raise SOL004PackageException('Error parsing {} hash digest: {}'.format(file_relative_path, e)) + raise SOL004PackageException( + "Error parsing {} hash digest: {}".format(file_relative_path, e) + ) def get_package_file_hash_algorithm_from_manifest(self, file_relative_path): """Returns the hash algorithm of a file inside this package as specified on the manifest file.""" @@ -157,16 +175,19 @@ class SOL004Package: try: return file_manifest_data[_MANIFEST_FILE_HASH_ALGORITHM_FIELD] except Exception as e: - raise SOL004PackageException('Error parsing {} hash digest: {}'.format(file_relative_path, e)) + raise SOL004PackageException( + "Error parsing {} hash digest: {}".format(file_relative_path, e) + ) @staticmethod def _get_hash_function_from_hash_algorithm(hash_algorithm): - function_to_algorithm = { - 'SHA-256': hashlib.sha256, - 'SHA-512': hashlib.sha512 - } + function_to_algorithm = {"SHA-256": hashlib.sha256, "SHA-512": hashlib.sha512} if hash_algorithm not in function_to_algorithm: - error_msg = 'Error checking hash function: hash algorithm {} not supported'.format(hash_algorithm) + error_msg = ( + "Error checking hash function: hash algorithm {} not supported".format( + hash_algorithm + ) + ) raise SOL004PackageException(error_msg) return function_to_algorithm[hash_algorithm] @@ -177,16 +198,24 @@ class SOL004Package: with open(file_path, "rb") as f: return hash_function(f.read()).hexdigest() except Exception as e: - raise SOL004PackageException('Error hashing {}: {}'.format(file_relative_path, e)) + raise SOL004PackageException( + "Error hashing {}: {}".format(file_relative_path, e) + ) def validate_package_file_hash(self, file_relative_path): """Validates the integrity of a file using the hash algorithm and digest on the package manifest.""" - hash_algorithm = self.get_package_file_hash_algorithm_from_manifest(file_relative_path) + hash_algorithm = self.get_package_file_hash_algorithm_from_manifest( + file_relative_path + ) file_hash = self._calculate_file_hash(file_relative_path, hash_algorithm) - expected_file_hash = self.get_package_file_hash_digest_from_manifest(file_relative_path) + expected_file_hash = self.get_package_file_hash_digest_from_manifest( + file_relative_path + ) if file_hash != expected_file_hash: - error_msg = 'Error validating {} hash: calculated hash {} is different than manifest hash {}' - raise SOL004PackageException(error_msg.format(file_relative_path, file_hash, expected_file_hash)) + error_msg = "Error validating {} hash: calculated hash {} is different than manifest hash {}" + raise SOL004PackageException( + error_msg.format(file_relative_path, file_hash, expected_file_hash) + ) def validate_package_hashes(self): """Validates the integrity of all files listed on the package manifest.""" @@ -201,5 +230,7 @@ class SOL004Package: if _METADATA_DESCRIPTOR_FIELD in tosca_meta: return tosca_meta[_METADATA_DESCRIPTOR_FIELD] - error_msg = 'Error: no {} entry found on {}'.format(_METADATA_DESCRIPTOR_FIELD, _METADATA_FILE_PATH) + error_msg = "Error: no {} entry found on {}".format( + _METADATA_DESCRIPTOR_FIELD, _METADATA_FILE_PATH + ) raise SOL004PackageException(error_msg) diff --git a/osm_common/tests/packages/invalid_package_vnf/Scripts/charms/simple/src/charm.py b/osm_common/tests/packages/invalid_package_vnf/Scripts/charms/simple/src/charm.py index 409f286..54cefc4 100755 --- a/osm_common/tests/packages/invalid_package_vnf/Scripts/charms/simple/src/charm.py +++ b/osm_common/tests/packages/invalid_package_vnf/Scripts/charms/simple/src/charm.py @@ -27,7 +27,6 @@ sys.path.append("lib") class MyNativeCharm(CharmBase): - def __init__(self, framework, key): super().__init__(framework, key) diff --git a/osm_common/tests/packages/invalid_package_vnf/manifest.mf b/osm_common/tests/packages/invalid_package_vnf/manifest.mf index dacf77f..007dde7 100644 --- a/osm_common/tests/packages/invalid_package_vnf/manifest.mf +++ b/osm_common/tests/packages/invalid_package_vnf/manifest.mf @@ -45,7 +45,7 @@ Hash: 7455ca868843cc5da1f0a2255cdedb64a69df3b618c344b83b82848a94540eda # Invalid hash algorithm Source: Scripts/charms/simple/src/charm.py Algorithm: SHA-733 -Hash: 7895f7b9e1b7ed5b5bcd64398950ca95b456d7fc973334351474eed466c2f480 +Hash: ea72f897a966e6174ed9164fabc3c500df5a2f712eb6b22ab2408afb07d04d14 # Wrong hash Source: Scripts/charms/simple/hooks/start diff --git a/osm_common/tests/packages/native_charm_with_metadata_dir_vnf/Scripts/charms/simple/src/charm.py b/osm_common/tests/packages/native_charm_with_metadata_dir_vnf/Scripts/charms/simple/src/charm.py index 409f286..54cefc4 100755 --- a/osm_common/tests/packages/native_charm_with_metadata_dir_vnf/Scripts/charms/simple/src/charm.py +++ b/osm_common/tests/packages/native_charm_with_metadata_dir_vnf/Scripts/charms/simple/src/charm.py @@ -27,7 +27,6 @@ sys.path.append("lib") class MyNativeCharm(CharmBase): - def __init__(self, framework, key): super().__init__(framework, key) diff --git a/osm_common/tests/packages/native_charm_with_metadata_dir_vnf/manifest.mf b/osm_common/tests/packages/native_charm_with_metadata_dir_vnf/manifest.mf index b42c240..ab907a8 100644 --- a/osm_common/tests/packages/native_charm_with_metadata_dir_vnf/manifest.mf +++ b/osm_common/tests/packages/native_charm_with_metadata_dir_vnf/manifest.mf @@ -46,7 +46,7 @@ Hash: 0eef3f1a642339e2053af48a7e370dac1952f9cb81166e439e8f72afd6f03621 Source: Scripts/charms/simple/src/charm.py Algorithm: SHA-256 -Hash: 7895f7b9e1b7ed5b5bcd64398950ca95b456d7fc973334351474eed466c2f480 +Hash: ea72f897a966e6174ed9164fabc3c500df5a2f712eb6b22ab2408afb07d04d14 Source: Scripts/charms/simple/hooks/start Algorithm: SHA-256 diff --git a/osm_common/tests/packages/native_charm_without_metadata_dir_vnf/Scripts/charms/simple/src/charm.py b/osm_common/tests/packages/native_charm_without_metadata_dir_vnf/Scripts/charms/simple/src/charm.py index 409f286..54cefc4 100755 --- a/osm_common/tests/packages/native_charm_without_metadata_dir_vnf/Scripts/charms/simple/src/charm.py +++ b/osm_common/tests/packages/native_charm_without_metadata_dir_vnf/Scripts/charms/simple/src/charm.py @@ -27,7 +27,6 @@ sys.path.append("lib") class MyNativeCharm(CharmBase): - def __init__(self, framework, key): super().__init__(framework, key) diff --git a/osm_common/tests/packages/native_charm_without_metadata_dir_vnf/native_charm_vnfd.mf b/osm_common/tests/packages/native_charm_without_metadata_dir_vnf/native_charm_vnfd.mf index d948858..fbe1a70 100644 --- a/osm_common/tests/packages/native_charm_without_metadata_dir_vnf/native_charm_vnfd.mf +++ b/osm_common/tests/packages/native_charm_without_metadata_dir_vnf/native_charm_vnfd.mf @@ -46,7 +46,7 @@ Hash: 0eef3f1a642339e2053af48a7e370dac1952f9cb81166e439e8f72afd6f03621 Source: Scripts/charms/simple/src/charm.py Algorithm: SHA-256 -Hash: 7895f7b9e1b7ed5b5bcd64398950ca95b456d7fc973334351474eed466c2f480 +Hash: ea72f897a966e6174ed9164fabc3c500df5a2f712eb6b22ab2408afb07d04d14 Source: Scripts/charms/simple/hooks/start Algorithm: SHA-256 diff --git a/osm_common/tests/test_dbbase.py b/osm_common/tests/test_dbbase.py index 1abd1c7..117350e 100644 --- a/osm_common/tests/test_dbbase.py +++ b/osm_common/tests/test_dbbase.py @@ -43,7 +43,9 @@ def test_constructor(): def test_db_connect(db_base): with pytest.raises(DbException) as excinfo: db_base.db_connect(None) - assert str(excinfo.value).startswith(exception_message("Method 'db_connect' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'db_connect' not implemented") + ) def test_db_disconnect(db_base): @@ -53,42 +55,54 @@ def test_db_disconnect(db_base): def test_get_list(db_base): with pytest.raises(DbException) as excinfo: db_base.get_list(None, None) - assert str(excinfo.value).startswith(exception_message("Method 'get_list' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'get_list' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND def test_get_one(db_base): with pytest.raises(DbException) as excinfo: db_base.get_one(None, None, None, None) - assert str(excinfo.value).startswith(exception_message("Method 'get_one' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'get_one' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND def test_create(db_base): with pytest.raises(DbException) as excinfo: db_base.create(None, None) - assert str(excinfo.value).startswith(exception_message("Method 'create' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'create' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND def test_create_list(db_base): with pytest.raises(DbException) as excinfo: db_base.create_list(None, None) - assert str(excinfo.value).startswith(exception_message("Method 'create_list' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'create_list' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND def test_del_list(db_base): with pytest.raises(DbException) as excinfo: db_base.del_list(None, None) - assert str(excinfo.value).startswith(exception_message("Method 'del_list' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'del_list' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND def test_del_one(db_base): with pytest.raises(DbException) as excinfo: db_base.del_one(None, None, None) - assert str(excinfo.value).startswith(exception_message("Method 'del_one' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'del_one' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND @@ -118,17 +132,25 @@ class TestEncryption(unittest.TestCase): for db_base in self.db_bases: for value, salt in TEST: # no encryption - encrypted = db_base.encrypt(value, schema_version='1.0', salt=salt) - self.assertEqual(encrypted, value, "value '{}' has been encrypted".format(value)) - decrypted = db_base.decrypt(encrypted, schema_version='1.0', salt=salt) - self.assertEqual(decrypted, value, "value '{}' has been decrypted".format(value)) + encrypted = db_base.encrypt(value, schema_version="1.0", salt=salt) + self.assertEqual( + encrypted, value, "value '{}' has been encrypted".format(value) + ) + decrypted = db_base.decrypt(encrypted, schema_version="1.0", salt=salt) + self.assertEqual( + decrypted, value, "value '{}' has been decrypted".format(value) + ) # encrypt/decrypt - encrypted = db_base.encrypt(value, schema_version='1.1', salt=salt) - self.assertNotEqual(encrypted, value, "value '{}' has not been encrypted".format(value)) + encrypted = db_base.encrypt(value, schema_version="1.1", salt=salt) + self.assertNotEqual( + encrypted, value, "value '{}' has not been encrypted".format(value) + ) self.assertIsInstance(encrypted, str, "Encrypted is not ascii text") - decrypted = db_base.decrypt(encrypted, schema_version='1.1', salt=salt) - self.assertEqual(decrypted, value, "value is not equal after encryption/decryption") + decrypted = db_base.decrypt(encrypted, schema_version="1.1", salt=salt) + self.assertEqual( + decrypted, value, "value is not equal after encryption/decryption" + ) def test_encrypt_decrypt_salt(self): value = "value to be encrypted!" @@ -136,22 +158,44 @@ class TestEncryption(unittest.TestCase): for db_base in self.db_bases: for salt in (None, "salt 1", "1afd5d1a-4a7e-4d9c-8c65-251290183106"): # encrypt/decrypt - encrypted.append(db_base.encrypt(value, schema_version='1.1', salt=salt)) - self.assertNotEqual(encrypted[-1], value, "value '{}' has not been encrypted".format(value)) + encrypted.append( + db_base.encrypt(value, schema_version="1.1", salt=salt) + ) + self.assertNotEqual( + encrypted[-1], + value, + "value '{}' has not been encrypted".format(value), + ) self.assertIsInstance(encrypted[-1], str, "Encrypted is not ascii text") - decrypted = db_base.decrypt(encrypted[-1], schema_version='1.1', salt=salt) - self.assertEqual(decrypted, value, "value is not equal after encryption/decryption") + decrypted = db_base.decrypt( + encrypted[-1], schema_version="1.1", salt=salt + ) + self.assertEqual( + decrypted, value, "value is not equal after encryption/decryption" + ) for i in range(0, len(encrypted)): - for j in range(i+1, len(encrypted)): - self.assertNotEqual(encrypted[i], encrypted[j], - "encryption with different salt must contain different result") + for j in range(i + 1, len(encrypted)): + self.assertNotEqual( + encrypted[i], + encrypted[j], + "encryption with different salt must contain different result", + ) # decrypt with a different master key try: - decrypted = self.db_bases[-1].decrypt(encrypted[0], schema_version='1.1', salt=None) - self.assertNotEqual(encrypted[0], decrypted, "Decryption with different KEY must generate different result") + decrypted = self.db_bases[-1].decrypt( + encrypted[0], schema_version="1.1", salt=None + ) + self.assertNotEqual( + encrypted[0], + decrypted, + "Decryption with different KEY must generate different result", + ) except DbException as e: - self.assertEqual(e.http_code, HTTPStatus.INTERNAL_SERVER_ERROR, - "Decryption with different KEY does not provide expected http_code") + self.assertEqual( + e.http_code, + HTTPStatus.INTERNAL_SERVER_ERROR, + "Decryption with different KEY does not provide expected http_code", + ) class TestDeepUpdate(unittest.TestCase): @@ -193,30 +237,62 @@ class TestDeepUpdate(unittest.TestCase): ({"A": ["a", "b", "a"]}, {"A": {"$a": None}}, {"A": ["b"]}), ({"A": ["a", "b", "a"]}, {"A": {"$d": None}}, {"A": ["a", "b", "a"]}), # delete and insert at 0 - ({"A": ["a", "b", "c"]}, {"A": {"$b": None, "$+[0]": "b"}}, {"A": ["b", "a", "c"]}), + ( + {"A": ["a", "b", "c"]}, + {"A": {"$b": None, "$+[0]": "b"}}, + {"A": ["b", "a", "c"]}, + ), # delete and edit - ({"A": ["a", "b", "a"]}, {"A": {"$a": None, "$[1]": {"c": "d"}}}, {"A": [{"c": "d"}]}), + ( + {"A": ["a", "b", "a"]}, + {"A": {"$a": None, "$[1]": {"c": "d"}}}, + {"A": [{"c": "d"}]}, + ), # insert if not exist ({"A": ["a", "b", "c"]}, {"A": {"$+b": "b"}}, {"A": ["a", "b", "c"]}), ({"A": ["a", "b", "c"]}, {"A": {"$+d": "f"}}, {"A": ["a", "b", "c", "f"]}), # edit by filter - ({"A": ["a", "b", "a"]}, {"A": {"$b": {"c": "d"}}}, {"A": ["a", {"c": "d"}, "a"]}), - ({"A": ["a", "b", "a"]}, {"A": {"$b": None, "$+[0]": "b", "$+": "c"}}, {"A": ["b", "a", "a", "c"]}), + ( + {"A": ["a", "b", "a"]}, + {"A": {"$b": {"c": "d"}}}, + {"A": ["a", {"c": "d"}, "a"]}, + ), + ( + {"A": ["a", "b", "a"]}, + {"A": {"$b": None, "$+[0]": "b", "$+": "c"}}, + {"A": ["b", "a", "a", "c"]}, + ), ({"A": ["a", "b", "a"]}, {"A": {"$c": None}}, {"A": ["a", "b", "a"]}), # index deletion out of range ({"A": ["a", "b", "a"]}, {"A": {"$[5]": None}}, {"A": ["a", "b", "a"]}), # nested array->dict - ({"A": ["a", "b", {"id": "1", "c": {"d": 2}}]}, {"A": {"$id: '1'": {"h": None, "c": {"d": "e", "f": "g"}}}}, - {"A": ["a", "b", {"id": "1", "c": {"d": "e", "f": "g"}}]}), - ({"A": [{"id": 1, "c": {"d": 2}}, {"id": 1, "c": {"f": []}}]}, - {"A": {"$id: 1": {"h": None, "c": {"d": "e", "f": "g"}}}}, - {"A": [{"id": 1, "c": {"d": "e", "f": "g"}}, {"id": 1, "c": {"d": "e", "f": "g"}}]}), + ( + {"A": ["a", "b", {"id": "1", "c": {"d": 2}}]}, + {"A": {"$id: '1'": {"h": None, "c": {"d": "e", "f": "g"}}}}, + {"A": ["a", "b", {"id": "1", "c": {"d": "e", "f": "g"}}]}, + ), + ( + {"A": [{"id": 1, "c": {"d": 2}}, {"id": 1, "c": {"f": []}}]}, + {"A": {"$id: 1": {"h": None, "c": {"d": "e", "f": "g"}}}}, + { + "A": [ + {"id": 1, "c": {"d": "e", "f": "g"}}, + {"id": 1, "c": {"d": "e", "f": "g"}}, + ] + }, + ), # nested array->array - ({"A": ["a", "b", ["a", "b"]]}, {"A": {"$b": None, "$[2]": {"$b": {}, "$+": "c"}}}, - {"A": ["a", ["a", {}, "c"]]}), + ( + {"A": ["a", "b", ["a", "b"]]}, + {"A": {"$b": None, "$[2]": {"$b": {}, "$+": "c"}}}, + {"A": ["a", ["a", {}, "c"]]}, + ), # types str and int different, so not found - ({"A": ["a", {"id": "1", "c": "d"}]}, {"A": {"$id: 1": {"c": "e"}}}, {"A": ["a", {"id": "1", "c": "d"}]}), - + ( + {"A": ["a", {"id": "1", "c": "d"}]}, + {"A": {"$id: 1": {"c": "e"}}}, + {"A": ["a", {"id": "1", "c": "d"}]}, + ), ) for t in TEST: print(t) @@ -244,7 +320,10 @@ class TestDeepUpdate(unittest.TestCase): # index edition out of range ({"A": ["a", "b", "a"]}, {"A": {"$[5]": 6}}), # conflict, two editions on index 2 - ({"A": ["a", {"id": "1", "c": "d"}]}, {"A": {"$id: '1'": {"c": "e"}, "$c: d": {"c": "f"}}}), + ( + {"A": ["a", {"id": "1", "c": "d"}]}, + {"A": {"$id: '1'": {"c": "e"}, "$c: d": {"c": "f"}}}, + ), ) for t in TEST: print(t) @@ -255,5 +334,5 @@ class TestDeepUpdate(unittest.TestCase): print(e) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/osm_common/tests/test_dbmemory.py b/osm_common/tests/test_dbmemory.py index 344d3dd..3c45527 100644 --- a/osm_common/tests/test_dbmemory.py +++ b/osm_common/tests/test_dbmemory.py @@ -28,7 +28,7 @@ from osm_common.dbbase import DbException from osm_common.dbmemory import DbMemory from copy import deepcopy -__author__ = 'Eduardo Sousa ' +__author__ = "Eduardo Sousa " @pytest.fixture(scope="function", params=[True, False]) @@ -52,21 +52,40 @@ def db_memory_with_data(request): def db_memory_with_many_data(request): db = DbMemory(lock=False) - db.create_list("test", [ - {"_id": 1, "data": {"data2": {"data3": 1}}, "list": [{"a": 1}], "text": "sometext"}, - {"_id": 2, "data": {"data2": {"data3": 2}}, "list": [{"a": 2}], "list2": [1, 2, 3]}, - {"_id": 3, "data": {"data2": {"data3": 3}}, "list": [{"a": 3}]}, - {"_id": 4, "data": {"data2": {"data3": 4}}, "list": [{"a": 4}, {"a": 0}]}, - {"_id": 5, "data": {"data2": {"data3": 5}}, "list": [{"a": 5}]}, - {"_id": 6, "data": {"data2": {"data3": 6}}, "list": [{"0": {"a": 1}}]}, - {"_id": 7, "data": {"data2": {"data3": 7}}, "0": {"a": 0}}, - {"_id": 8, "list": [{"a": 3, "b": 0, "c": [{"a": 3, "b": 1}, {"a": 0, "b": "v"}]}, {"a": 0, "b": 1}]}, - ]) + db.create_list( + "test", + [ + { + "_id": 1, + "data": {"data2": {"data3": 1}}, + "list": [{"a": 1}], + "text": "sometext", + }, + { + "_id": 2, + "data": {"data2": {"data3": 2}}, + "list": [{"a": 2}], + "list2": [1, 2, 3], + }, + {"_id": 3, "data": {"data2": {"data3": 3}}, "list": [{"a": 3}]}, + {"_id": 4, "data": {"data2": {"data3": 4}}, "list": [{"a": 4}, {"a": 0}]}, + {"_id": 5, "data": {"data2": {"data3": 5}}, "list": [{"a": 5}]}, + {"_id": 6, "data": {"data2": {"data3": 6}}, "list": [{"0": {"a": 1}}]}, + {"_id": 7, "data": {"data2": {"data3": 7}}, "0": {"a": 0}}, + { + "_id": 8, + "list": [ + {"a": 3, "b": 0, "c": [{"a": 3, "b": 1}, {"a": 0, "b": "v"}]}, + {"a": 0, "b": 1}, + ], + }, + ], + ) return db def empty_exception_message(): - return 'database exception ' + return "database exception " def get_one_exception_message(db_filter): @@ -74,7 +93,9 @@ def get_one_exception_message(db_filter): def get_one_multiple_exception_message(db_filter): - return "database exception Found more than one entry with filter='{}'".format(db_filter) + return "database exception Found more than one entry with filter='{}'".format( + db_filter + ) def del_one_exception_message(db_filter): @@ -87,20 +108,20 @@ def replace_exception_message(value): def test_constructor(): db = DbMemory() - assert db.logger == logging.getLogger('db') + assert db.logger == logging.getLogger("db") assert db.db == {} def test_constructor_with_logger(): - logger_name = 'db_local' + logger_name = "db_local" db = DbMemory(logger_name=logger_name) assert db.logger == logging.getLogger(logger_name) assert db.db == {} def test_db_connect(): - logger_name = 'db_local' - config = {'logger_name': logger_name} + logger_name = "db_local" + config = {"logger_name": logger_name} db = DbMemory() db.db_connect(config) assert db.logger == logging.getLogger(logger_name) @@ -111,32 +132,46 @@ def test_db_disconnect(db_memory): db_memory.db_disconnect() -@pytest.mark.parametrize("table, db_filter", [ - ("test", {}), - ("test", {"_id": 1}), - ("test", {"data": 1}), - ("test", {"_id": 1, "data": 1})]) +@pytest.mark.parametrize( + "table, db_filter", + [ + ("test", {}), + ("test", {"_id": 1}), + ("test", {"data": 1}), + ("test", {"_id": 1, "data": 1}), + ], +) def test_get_list_with_empty_db(db_memory, table, db_filter): result = db_memory.get_list(table, db_filter) assert len(result) == 0 -@pytest.mark.parametrize("table, db_filter, expected_data", [ - ("test", {}, [{"_id": 1, "data": 1}, {"_id": 2, "data": 2}, {"_id": 3, "data": 3}]), - ("test", {"_id": 1}, [{"_id": 1, "data": 1}]), - ("test", {"data": 1}, [{"_id": 1, "data": 1}]), - ("test", {"_id": 1, "data": 1}, [{"_id": 1, "data": 1}]), - ("test", {"_id": 2}, [{"_id": 2, "data": 2}]), - ("test", {"data": 2}, [{"_id": 2, "data": 2}]), - ("test", {"_id": 2, "data": 2}, [{"_id": 2, "data": 2}]), - ("test", {"_id": 4}, []), - ("test", {"data": 4}, []), - ("test", {"_id": 4, "data": 4}, []), - ("test_table", {}, []), - ("test_table", {"_id": 1}, []), - ("test_table", {"data": 1}, []), - ("test_table", {"_id": 1, "data": 1}, [])]) -def test_get_list_with_non_empty_db(db_memory_with_data, table, db_filter, expected_data): +@pytest.mark.parametrize( + "table, db_filter, expected_data", + [ + ( + "test", + {}, + [{"_id": 1, "data": 1}, {"_id": 2, "data": 2}, {"_id": 3, "data": 3}], + ), + ("test", {"_id": 1}, [{"_id": 1, "data": 1}]), + ("test", {"data": 1}, [{"_id": 1, "data": 1}]), + ("test", {"_id": 1, "data": 1}, [{"_id": 1, "data": 1}]), + ("test", {"_id": 2}, [{"_id": 2, "data": 2}]), + ("test", {"data": 2}, [{"_id": 2, "data": 2}]), + ("test", {"_id": 2, "data": 2}, [{"_id": 2, "data": 2}]), + ("test", {"_id": 4}, []), + ("test", {"data": 4}, []), + ("test", {"_id": 4, "data": 4}, []), + ("test_table", {}, []), + ("test_table", {"_id": 1}, []), + ("test_table", {"data": 1}, []), + ("test_table", {"_id": 1, "data": 1}, []), + ], +) +def test_get_list_with_non_empty_db( + db_memory_with_data, table, db_filter, expected_data +): result = db_memory_with_data.get_list(table, db_filter) assert len(result) == len(expected_data) for data in expected_data: @@ -144,7 +179,7 @@ def test_get_list_with_non_empty_db(db_memory_with_data, table, db_filter, expec def test_get_list_exception(db_memory_with_data): - table = 'test' + table = "test" db_filter = {} db_memory_with_data._find = MagicMock(side_effect=Exception()) with pytest.raises(DbException) as excinfo: @@ -153,16 +188,20 @@ def test_get_list_exception(db_memory_with_data): assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("table, db_filter, expected_data", [ - ("test", {"_id": 1}, {"_id": 1, "data": 1}), - ("test", {"_id": 2}, {"_id": 2, "data": 2}), - ("test", {"_id": 3}, {"_id": 3, "data": 3}), - ("test", {"data": 1}, {"_id": 1, "data": 1}), - ("test", {"data": 2}, {"_id": 2, "data": 2}), - ("test", {"data": 3}, {"_id": 3, "data": 3}), - ("test", {"_id": 1, "data": 1}, {"_id": 1, "data": 1}), - ("test", {"_id": 2, "data": 2}, {"_id": 2, "data": 2}), - ("test", {"_id": 3, "data": 3}, {"_id": 3, "data": 3})]) +@pytest.mark.parametrize( + "table, db_filter, expected_data", + [ + ("test", {"_id": 1}, {"_id": 1, "data": 1}), + ("test", {"_id": 2}, {"_id": 2, "data": 2}), + ("test", {"_id": 3}, {"_id": 3, "data": 3}), + ("test", {"data": 1}, {"_id": 1, "data": 1}), + ("test", {"data": 2}, {"_id": 2, "data": 2}), + ("test", {"data": 3}, {"_id": 3, "data": 3}), + ("test", {"_id": 1, "data": 1}, {"_id": 1, "data": 1}), + ("test", {"_id": 2, "data": 2}, {"_id": 2, "data": 2}), + ("test", {"_id": 3, "data": 3}, {"_id": 3, "data": 3}), + ], +) def test_get_one(db_memory_with_data, table, db_filter, expected_data): result = db_memory_with_data.get_one(table, db_filter) assert result == expected_data @@ -172,63 +211,83 @@ def test_get_one(db_memory_with_data, table, db_filter, expected_data): assert result in db_memory_with_data.db[table] -@pytest.mark.parametrize("db_filter, expected_ids", [ - ({}, [1, 2, 3, 4, 5, 6, 7, 8]), - ({"_id": 1}, [1]), - ({"data.data2.data3": 2}, [2]), - ({"data.data2.data3.eq": 2}, [2]), - ({"data.data2.data3": [2]}, [2]), - ({"data.data2.data3.cont": [2]}, [2]), - ({"data.data2.data3.neq": 2}, [1, 3, 4, 5, 6, 7, 8]), - ({"data.data2.data3.neq": [2]}, [1, 3, 4, 5, 6, 7, 8]), - ({"data.data2.data3.ncont": [2]}, [1, 3, 4, 5, 6, 7, 8]), - ({"data.data2.data3": [2, 3]}, [2, 3]), - ({"data.data2.data3.gt": 4}, [5, 6, 7]), - ({"data.data2.data3.gte": 4}, [4, 5, 6, 7]), - ({"data.data2.data3.lt": 4}, [1, 2, 3]), - ({"data.data2.data3.lte": 4}, [1, 2, 3, 4]), - ({"data.data2.data3.lte": 4.5}, [1, 2, 3, 4]), - ({"data.data2.data3.gt": "text"}, []), - ({"nonexist.nonexist": "4"}, []), - ({"nonexist.nonexist": None}, [1, 2, 3, 4, 5, 6, 7, 8]), - ({"nonexist.nonexist.neq": "4"}, [1, 2, 3, 4, 5, 6, 7, 8]), - ({"nonexist.nonexist.neq": None}, []), - ({"text.eq": "sometext"}, [1]), - ({"text.neq": "sometext"}, [2, 3, 4, 5, 6, 7, 8]), - ({"text.eq": "somet"}, []), - ({"text.gte": "a"}, [1]), - ({"text.gte": "somet"}, [1]), - ({"text.gte": "sometext"}, [1]), - ({"text.lt": "somet"}, []), - ({"data.data2.data3": 2, "data.data2.data4": None}, [2]), - ({"data.data2.data3": 2, "data.data2.data4": 5}, []), - ({"data.data2.data3": 4}, [4]), - ({"data.data2.data3": [3, 4, "e"]}, [3, 4]), - ({"data.data2.data3": None}, [8]), - ({"data.data2": "4"}, []), - ({"list.0.a": 1}, [1, 6]), - ({"list2": 1}, [2]), - ({"list2": [1, 5]}, [2]), - ({"list2": [1, 2]}, [2]), - ({"list2": [5, 7]}, []), - ({"list.ANYINDEX.a": 1}, [1]), - ({"list.a": 3, "list.b": 1}, [8]), - ({"list.ANYINDEX.a": 3, "list.ANYINDEX.b": 1}, []), - ({"list.ANYINDEX.a": 3, "list.ANYINDEX.c.a": 3}, [8]), - ({"list.ANYINDEX.a": 3, "list.ANYINDEX.b": 0}, [8]), - ({"list.ANYINDEX.a": 3, "list.ANYINDEX.c.ANYINDEX.a": 0, "list.ANYINDEX.c.ANYINDEX.b": "v"}, [8]), - ({"list.ANYINDEX.a": 3, "list.ANYINDEX.c.ANYINDEX.a": 0, "list.ANYINDEX.c.ANYINDEX.b": 1}, []), - ({"list.c.b": 1}, [8]), - ({"list.c.b": None}, [1, 2, 3, 4, 5, 6, 7]), - # ({"data.data2.data3": 4}, []), - # ({"data.data2.data3": 4}, []), -]) +@pytest.mark.parametrize( + "db_filter, expected_ids", + [ + ({}, [1, 2, 3, 4, 5, 6, 7, 8]), + ({"_id": 1}, [1]), + ({"data.data2.data3": 2}, [2]), + ({"data.data2.data3.eq": 2}, [2]), + ({"data.data2.data3": [2]}, [2]), + ({"data.data2.data3.cont": [2]}, [2]), + ({"data.data2.data3.neq": 2}, [1, 3, 4, 5, 6, 7, 8]), + ({"data.data2.data3.neq": [2]}, [1, 3, 4, 5, 6, 7, 8]), + ({"data.data2.data3.ncont": [2]}, [1, 3, 4, 5, 6, 7, 8]), + ({"data.data2.data3": [2, 3]}, [2, 3]), + ({"data.data2.data3.gt": 4}, [5, 6, 7]), + ({"data.data2.data3.gte": 4}, [4, 5, 6, 7]), + ({"data.data2.data3.lt": 4}, [1, 2, 3]), + ({"data.data2.data3.lte": 4}, [1, 2, 3, 4]), + ({"data.data2.data3.lte": 4.5}, [1, 2, 3, 4]), + ({"data.data2.data3.gt": "text"}, []), + ({"nonexist.nonexist": "4"}, []), + ({"nonexist.nonexist": None}, [1, 2, 3, 4, 5, 6, 7, 8]), + ({"nonexist.nonexist.neq": "4"}, [1, 2, 3, 4, 5, 6, 7, 8]), + ({"nonexist.nonexist.neq": None}, []), + ({"text.eq": "sometext"}, [1]), + ({"text.neq": "sometext"}, [2, 3, 4, 5, 6, 7, 8]), + ({"text.eq": "somet"}, []), + ({"text.gte": "a"}, [1]), + ({"text.gte": "somet"}, [1]), + ({"text.gte": "sometext"}, [1]), + ({"text.lt": "somet"}, []), + ({"data.data2.data3": 2, "data.data2.data4": None}, [2]), + ({"data.data2.data3": 2, "data.data2.data4": 5}, []), + ({"data.data2.data3": 4}, [4]), + ({"data.data2.data3": [3, 4, "e"]}, [3, 4]), + ({"data.data2.data3": None}, [8]), + ({"data.data2": "4"}, []), + ({"list.0.a": 1}, [1, 6]), + ({"list2": 1}, [2]), + ({"list2": [1, 5]}, [2]), + ({"list2": [1, 2]}, [2]), + ({"list2": [5, 7]}, []), + ({"list.ANYINDEX.a": 1}, [1]), + ({"list.a": 3, "list.b": 1}, [8]), + ({"list.ANYINDEX.a": 3, "list.ANYINDEX.b": 1}, []), + ({"list.ANYINDEX.a": 3, "list.ANYINDEX.c.a": 3}, [8]), + ({"list.ANYINDEX.a": 3, "list.ANYINDEX.b": 0}, [8]), + ( + { + "list.ANYINDEX.a": 3, + "list.ANYINDEX.c.ANYINDEX.a": 0, + "list.ANYINDEX.c.ANYINDEX.b": "v", + }, + [8], + ), + ( + { + "list.ANYINDEX.a": 3, + "list.ANYINDEX.c.ANYINDEX.a": 0, + "list.ANYINDEX.c.ANYINDEX.b": 1, + }, + [], + ), + ({"list.c.b": 1}, [8]), + ({"list.c.b": None}, [1, 2, 3, 4, 5, 6, 7]), + # ({"data.data2.data3": 4}, []), + # ({"data.data2.data3": 4}, []), + ], +) def test_get_list(db_memory_with_many_data, db_filter, expected_ids): result = db_memory_with_many_data.get_list("test", db_filter) assert isinstance(result, list) result_ids = [item["_id"] for item in result] - assert len(result) == len(expected_ids), "for db_filter={} result={} expected_ids={}".format(db_filter, result, - result_ids) + assert len(result) == len( + expected_ids + ), "for db_filter={} result={} expected_ids={}".format( + db_filter, result, result_ids + ) assert result_ids == expected_ids for i in range(len(result)): assert result[i] in db_memory_with_many_data.db["test"] @@ -240,9 +299,12 @@ def test_get_list(db_memory_with_many_data, db_filter, expected_ids): assert result == len(expected_ids) -@pytest.mark.parametrize("table, db_filter, expected_data", [ - ("test", {}, {"_id": 1, "data": 1})]) -def test_get_one_with_multiple_results(db_memory_with_data, table, db_filter, expected_data): +@pytest.mark.parametrize( + "table, db_filter, expected_data", [("test", {}, {"_id": 1, "data": 1})] +) +def test_get_one_with_multiple_results( + db_memory_with_data, table, db_filter, expected_data +): result = db_memory_with_data.get_one(table, db_filter, fail_on_more=False) assert result == expected_data assert len(db_memory_with_data.db) == 1 @@ -256,64 +318,86 @@ def test_get_one_with_multiple_results_exception(db_memory_with_data): db_filter = {} with pytest.raises(DbException) as excinfo: db_memory_with_data.get_one(table, db_filter) - assert str(excinfo.value) == (empty_exception_message() + get_one_multiple_exception_message(db_filter)) + assert str(excinfo.value) == ( + empty_exception_message() + get_one_multiple_exception_message(db_filter) + ) # assert excinfo.value.http_code == http.HTTPStatus.CONFLICT -@pytest.mark.parametrize("table, db_filter", [ - ("test", {"_id": 4}), - ("test", {"data": 4}), - ("test", {"_id": 4, "data": 4}), - ("test_table", {"_id": 4}), - ("test_table", {"data": 4}), - ("test_table", {"_id": 4, "data": 4})]) +@pytest.mark.parametrize( + "table, db_filter", + [ + ("test", {"_id": 4}), + ("test", {"data": 4}), + ("test", {"_id": 4, "data": 4}), + ("test_table", {"_id": 4}), + ("test_table", {"data": 4}), + ("test_table", {"_id": 4, "data": 4}), + ], +) def test_get_one_with_non_empty_db_exception(db_memory_with_data, table, db_filter): with pytest.raises(DbException) as excinfo: db_memory_with_data.get_one(table, db_filter) - assert str(excinfo.value) == (empty_exception_message() + get_one_exception_message(db_filter)) + assert str(excinfo.value) == ( + empty_exception_message() + get_one_exception_message(db_filter) + ) assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("table, db_filter", [ - ("test", {"_id": 4}), - ("test", {"data": 4}), - ("test", {"_id": 4, "data": 4}), - ("test_table", {"_id": 4}), - ("test_table", {"data": 4}), - ("test_table", {"_id": 4, "data": 4})]) +@pytest.mark.parametrize( + "table, db_filter", + [ + ("test", {"_id": 4}), + ("test", {"data": 4}), + ("test", {"_id": 4, "data": 4}), + ("test_table", {"_id": 4}), + ("test_table", {"data": 4}), + ("test_table", {"_id": 4, "data": 4}), + ], +) def test_get_one_with_non_empty_db_none(db_memory_with_data, table, db_filter): result = db_memory_with_data.get_one(table, db_filter, fail_on_empty=False) assert result is None -@pytest.mark.parametrize("table, db_filter", [ - ("test", {"_id": 4}), - ("test", {"data": 4}), - ("test", {"_id": 4, "data": 4}), - ("test_table", {"_id": 4}), - ("test_table", {"data": 4}), - ("test_table", {"_id": 4, "data": 4})]) +@pytest.mark.parametrize( + "table, db_filter", + [ + ("test", {"_id": 4}), + ("test", {"data": 4}), + ("test", {"_id": 4, "data": 4}), + ("test_table", {"_id": 4}), + ("test_table", {"data": 4}), + ("test_table", {"_id": 4, "data": 4}), + ], +) def test_get_one_with_empty_db_exception(db_memory, table, db_filter): with pytest.raises(DbException) as excinfo: db_memory.get_one(table, db_filter) - assert str(excinfo.value) == (empty_exception_message() + get_one_exception_message(db_filter)) + assert str(excinfo.value) == ( + empty_exception_message() + get_one_exception_message(db_filter) + ) assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("table, db_filter", [ - ("test", {"_id": 4}), - ("test", {"data": 4}), - ("test", {"_id": 4, "data": 4}), - ("test_table", {"_id": 4}), - ("test_table", {"data": 4}), - ("test_table", {"_id": 4, "data": 4})]) +@pytest.mark.parametrize( + "table, db_filter", + [ + ("test", {"_id": 4}), + ("test", {"data": 4}), + ("test", {"_id": 4, "data": 4}), + ("test_table", {"_id": 4}), + ("test_table", {"data": 4}), + ("test_table", {"_id": 4, "data": 4}), + ], +) def test_get_one_with_empty_db_none(db_memory, table, db_filter): result = db_memory.get_one(table, db_filter, fail_on_empty=False) assert result is None def test_get_one_generic_exception(db_memory_with_data): - table = 'test' + table = "test" db_filter = {} db_memory_with_data._find = MagicMock(side_effect=Exception()) with pytest.raises(DbException) as excinfo: @@ -322,13 +406,19 @@ def test_get_one_generic_exception(db_memory_with_data): assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("table, db_filter, expected_data", [ - ("test", {}, []), - ("test", {"_id": 1}, [{"_id": 2, "data": 2}, {"_id": 3, "data": 3}]), - ("test", {"_id": 2}, [{"_id": 1, "data": 1}, {"_id": 3, "data": 3}]), - ("test", {"_id": 1, "data": 1}, [{"_id": 2, "data": 2}, {"_id": 3, "data": 3}]), - ("test", {"_id": 2, "data": 2}, [{"_id": 1, "data": 1}, {"_id": 3, "data": 3}])]) -def test_del_list_with_non_empty_db(db_memory_with_data, table, db_filter, expected_data): +@pytest.mark.parametrize( + "table, db_filter, expected_data", + [ + ("test", {}, []), + ("test", {"_id": 1}, [{"_id": 2, "data": 2}, {"_id": 3, "data": 3}]), + ("test", {"_id": 2}, [{"_id": 1, "data": 1}, {"_id": 3, "data": 3}]), + ("test", {"_id": 1, "data": 1}, [{"_id": 2, "data": 2}, {"_id": 3, "data": 3}]), + ("test", {"_id": 2, "data": 2}, [{"_id": 1, "data": 1}, {"_id": 3, "data": 3}]), + ], +) +def test_del_list_with_non_empty_db( + db_memory_with_data, table, db_filter, expected_data +): result = db_memory_with_data.del_list(table, db_filter) assert result["deleted"] == (3 - len(expected_data)) assert len(db_memory_with_data.db) == 1 @@ -338,21 +428,25 @@ def test_del_list_with_non_empty_db(db_memory_with_data, table, db_filter, expec assert data in db_memory_with_data.db[table] -@pytest.mark.parametrize("table, db_filter", [ - ("test", {}), - ("test", {"_id": 1}), - ("test", {"_id": 2}), - ("test", {"data": 1}), - ("test", {"data": 2}), - ("test", {"_id": 1, "data": 1}), - ("test", {"_id": 2, "data": 2})]) +@pytest.mark.parametrize( + "table, db_filter", + [ + ("test", {}), + ("test", {"_id": 1}), + ("test", {"_id": 2}), + ("test", {"data": 1}), + ("test", {"data": 2}), + ("test", {"_id": 1, "data": 1}), + ("test", {"_id": 2, "data": 2}), + ], +) def test_del_list_with_empty_db(db_memory, table, db_filter): result = db_memory.del_list(table, db_filter) - assert result['deleted'] == 0 + assert result["deleted"] == 0 def test_del_list_generic_exception(db_memory_with_data): - table = 'test' + table = "test" db_filter = {} db_memory_with_data._find = MagicMock(side_effect=Exception()) with pytest.raises(DbException) as excinfo: @@ -361,14 +455,18 @@ def test_del_list_generic_exception(db_memory_with_data): assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("table, db_filter, data", [ - ("test", {}, {"_id": 1, "data": 1}), - ("test", {"_id": 1}, {"_id": 1, "data": 1}), - ("test", {"data": 1}, {"_id": 1, "data": 1}), - ("test", {"_id": 1, "data": 1}, {"_id": 1, "data": 1}), - ("test", {"_id": 2}, {"_id": 2, "data": 2}), - ("test", {"data": 2}, {"_id": 2, "data": 2}), - ("test", {"_id": 2, "data": 2}, {"_id": 2, "data": 2})]) +@pytest.mark.parametrize( + "table, db_filter, data", + [ + ("test", {}, {"_id": 1, "data": 1}), + ("test", {"_id": 1}, {"_id": 1, "data": 1}), + ("test", {"data": 1}, {"_id": 1, "data": 1}), + ("test", {"_id": 1, "data": 1}, {"_id": 1, "data": 1}), + ("test", {"_id": 2}, {"_id": 2, "data": 2}), + ("test", {"data": 2}, {"_id": 2, "data": 2}), + ("test", {"_id": 2, "data": 2}, {"_id": 2, "data": 2}), + ], +) def test_del_one(db_memory_with_data, table, db_filter, data): result = db_memory_with_data.del_one(table, db_filter) assert result == {"deleted": 1} @@ -378,93 +476,111 @@ def test_del_one(db_memory_with_data, table, db_filter, data): assert data not in db_memory_with_data.db[table] -@pytest.mark.parametrize("table, db_filter", [ - ("test", {}), - ("test", {"_id": 1}), - ("test", {"_id": 2}), - ("test", {"data": 1}), - ("test", {"data": 2}), - ("test", {"_id": 1, "data": 1}), - ("test", {"_id": 2, "data": 2}), - ("test_table", {}), - ("test_table", {"_id": 1}), - ("test_table", {"_id": 2}), - ("test_table", {"data": 1}), - ("test_table", {"data": 2}), - ("test_table", {"_id": 1, "data": 1}), - ("test_table", {"_id": 2, "data": 2})]) +@pytest.mark.parametrize( + "table, db_filter", + [ + ("test", {}), + ("test", {"_id": 1}), + ("test", {"_id": 2}), + ("test", {"data": 1}), + ("test", {"data": 2}), + ("test", {"_id": 1, "data": 1}), + ("test", {"_id": 2, "data": 2}), + ("test_table", {}), + ("test_table", {"_id": 1}), + ("test_table", {"_id": 2}), + ("test_table", {"data": 1}), + ("test_table", {"data": 2}), + ("test_table", {"_id": 1, "data": 1}), + ("test_table", {"_id": 2, "data": 2}), + ], +) def test_del_one_with_empty_db_exception(db_memory, table, db_filter): with pytest.raises(DbException) as excinfo: db_memory.del_one(table, db_filter) - assert str(excinfo.value) == (empty_exception_message() + del_one_exception_message(db_filter)) + assert str(excinfo.value) == ( + empty_exception_message() + del_one_exception_message(db_filter) + ) assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("table, db_filter", [ - ("test", {}), - ("test", {"_id": 1}), - ("test", {"_id": 2}), - ("test", {"data": 1}), - ("test", {"data": 2}), - ("test", {"_id": 1, "data": 1}), - ("test", {"_id": 2, "data": 2}), - ("test_table", {}), - ("test_table", {"_id": 1}), - ("test_table", {"_id": 2}), - ("test_table", {"data": 1}), - ("test_table", {"data": 2}), - ("test_table", {"_id": 1, "data": 1}), - ("test_table", {"_id": 2, "data": 2})]) +@pytest.mark.parametrize( + "table, db_filter", + [ + ("test", {}), + ("test", {"_id": 1}), + ("test", {"_id": 2}), + ("test", {"data": 1}), + ("test", {"data": 2}), + ("test", {"_id": 1, "data": 1}), + ("test", {"_id": 2, "data": 2}), + ("test_table", {}), + ("test_table", {"_id": 1}), + ("test_table", {"_id": 2}), + ("test_table", {"data": 1}), + ("test_table", {"data": 2}), + ("test_table", {"_id": 1, "data": 1}), + ("test_table", {"_id": 2, "data": 2}), + ], +) def test_del_one_with_empty_db_none(db_memory, table, db_filter): result = db_memory.del_one(table, db_filter, fail_on_empty=False) assert result is None -@pytest.mark.parametrize("table, db_filter", [ - ("test", {"_id": 4}), - ("test", {"_id": 5}), - ("test", {"data": 4}), - ("test", {"data": 5}), - ("test", {"_id": 1, "data": 2}), - ("test", {"_id": 2, "data": 3}), - ("test_table", {}), - ("test_table", {"_id": 1}), - ("test_table", {"_id": 2}), - ("test_table", {"data": 1}), - ("test_table", {"data": 2}), - ("test_table", {"_id": 1, "data": 1}), - ("test_table", {"_id": 2, "data": 2})]) +@pytest.mark.parametrize( + "table, db_filter", + [ + ("test", {"_id": 4}), + ("test", {"_id": 5}), + ("test", {"data": 4}), + ("test", {"data": 5}), + ("test", {"_id": 1, "data": 2}), + ("test", {"_id": 2, "data": 3}), + ("test_table", {}), + ("test_table", {"_id": 1}), + ("test_table", {"_id": 2}), + ("test_table", {"data": 1}), + ("test_table", {"data": 2}), + ("test_table", {"_id": 1, "data": 1}), + ("test_table", {"_id": 2, "data": 2}), + ], +) def test_del_one_with_non_empty_db_exception(db_memory_with_data, table, db_filter): with pytest.raises(DbException) as excinfo: db_memory_with_data.del_one(table, db_filter) - assert str(excinfo.value) == (empty_exception_message() + del_one_exception_message(db_filter)) + assert str(excinfo.value) == ( + empty_exception_message() + del_one_exception_message(db_filter) + ) assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("table, db_filter", [ - ("test", {"_id": 4}), - ("test", {"_id": 5}), - ("test", {"data": 4}), - ("test", {"data": 5}), - ("test", {"_id": 1, "data": 2}), - ("test", {"_id": 2, "data": 3}), - ("test_table", {}), - ("test_table", {"_id": 1}), - ("test_table", {"_id": 2}), - ("test_table", {"data": 1}), - ("test_table", {"data": 2}), - ("test_table", {"_id": 1, "data": 1}), - ("test_table", {"_id": 2, "data": 2})]) +@pytest.mark.parametrize( + "table, db_filter", + [ + ("test", {"_id": 4}), + ("test", {"_id": 5}), + ("test", {"data": 4}), + ("test", {"data": 5}), + ("test", {"_id": 1, "data": 2}), + ("test", {"_id": 2, "data": 3}), + ("test_table", {}), + ("test_table", {"_id": 1}), + ("test_table", {"_id": 2}), + ("test_table", {"data": 1}), + ("test_table", {"data": 2}), + ("test_table", {"_id": 1, "data": 1}), + ("test_table", {"_id": 2, "data": 2}), + ], +) def test_del_one_with_non_empty_db_none(db_memory_with_data, table, db_filter): result = db_memory_with_data.del_one(table, db_filter, fail_on_empty=False) assert result is None -@pytest.mark.parametrize("fail_on_empty", [ - (True), - (False)]) +@pytest.mark.parametrize("fail_on_empty", [(True), (False)]) def test_del_one_generic_exception(db_memory_with_data, fail_on_empty): - table = 'test' + table = "test" db_filter = {} db_memory_with_data._find = MagicMock(side_effect=Exception()) with pytest.raises(DbException) as excinfo: @@ -473,16 +589,20 @@ def test_del_one_generic_exception(db_memory_with_data, fail_on_empty): assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("table, _id, indata", [ - ("test", 1, {"_id": 1, "data": 42}), - ("test", 1, {"_id": 1, "data": 42, "kk": 34}), - ("test", 1, {"_id": 1}), - ("test", 2, {"_id": 2, "data": 42}), - ("test", 2, {"_id": 2, "data": 42, "kk": 34}), - ("test", 2, {"_id": 2}), - ("test", 3, {"_id": 3, "data": 42}), - ("test", 3, {"_id": 3, "data": 42, "kk": 34}), - ("test", 3, {"_id": 3})]) +@pytest.mark.parametrize( + "table, _id, indata", + [ + ("test", 1, {"_id": 1, "data": 42}), + ("test", 1, {"_id": 1, "data": 42, "kk": 34}), + ("test", 1, {"_id": 1}), + ("test", 2, {"_id": 2, "data": 42}), + ("test", 2, {"_id": 2, "data": 42, "kk": 34}), + ("test", 2, {"_id": 2}), + ("test", 3, {"_id": 3, "data": 42}), + ("test", 3, {"_id": 3, "data": 42, "kk": 34}), + ("test", 3, {"_id": 3}), + ], +) def test_replace(db_memory_with_data, table, _id, indata): result = db_memory_with_data.replace(table, _id, indata) assert result == {"updated": 1} @@ -492,10 +612,14 @@ def test_replace(db_memory_with_data, table, _id, indata): assert indata in db_memory_with_data.db[table] -@pytest.mark.parametrize("table, _id, indata", [ - ("test", 1, {"_id": 1, "data": 42}), - ("test", 2, {"_id": 2}), - ("test", 3, {"_id": 3})]) +@pytest.mark.parametrize( + "table, _id, indata", + [ + ("test", 1, {"_id": 1, "data": 42}), + ("test", 2, {"_id": 2}), + ("test", 3, {"_id": 3}), + ], +) def test_replace_without_data_exception(db_memory, table, _id, indata): with pytest.raises(DbException) as excinfo: db_memory.replace(table, _id, indata, fail_on_empty=True) @@ -503,19 +627,27 @@ def test_replace_without_data_exception(db_memory, table, _id, indata): assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("table, _id, indata", [ - ("test", 1, {"_id": 1, "data": 42}), - ("test", 2, {"_id": 2}), - ("test", 3, {"_id": 3})]) +@pytest.mark.parametrize( + "table, _id, indata", + [ + ("test", 1, {"_id": 1, "data": 42}), + ("test", 2, {"_id": 2}), + ("test", 3, {"_id": 3}), + ], +) def test_replace_without_data_none(db_memory, table, _id, indata): result = db_memory.replace(table, _id, indata, fail_on_empty=False) assert result is None -@pytest.mark.parametrize("table, _id, indata", [ - ("test", 11, {"_id": 11, "data": 42}), - ("test", 12, {"_id": 12}), - ("test", 33, {"_id": 33})]) +@pytest.mark.parametrize( + "table, _id, indata", + [ + ("test", 11, {"_id": 11, "data": 42}), + ("test", 12, {"_id": 12}), + ("test", 33, {"_id": 33}), + ], +) def test_replace_with_data_exception(db_memory_with_data, table, _id, indata): with pytest.raises(DbException) as excinfo: db_memory_with_data.replace(table, _id, indata, fail_on_empty=True) @@ -523,22 +655,24 @@ def test_replace_with_data_exception(db_memory_with_data, table, _id, indata): assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("table, _id, indata", [ - ("test", 11, {"_id": 11, "data": 42}), - ("test", 12, {"_id": 12}), - ("test", 33, {"_id": 33})]) +@pytest.mark.parametrize( + "table, _id, indata", + [ + ("test", 11, {"_id": 11, "data": 42}), + ("test", 12, {"_id": 12}), + ("test", 33, {"_id": 33}), + ], +) def test_replace_with_data_none(db_memory_with_data, table, _id, indata): result = db_memory_with_data.replace(table, _id, indata, fail_on_empty=False) assert result is None -@pytest.mark.parametrize("fail_on_empty", [ - True, - False]) +@pytest.mark.parametrize("fail_on_empty", [True, False]) def test_replace_generic_exception(db_memory_with_data, fail_on_empty): - table = 'test' + table = "test" _id = {} - indata = {'_id': 1, 'data': 1} + indata = {"_id": 1, "data": 1} db_memory_with_data._find = MagicMock(side_effect=Exception()) with pytest.raises(DbException) as excinfo: db_memory_with_data.replace(table, _id, indata, fail_on_empty=fail_on_empty) @@ -546,26 +680,30 @@ def test_replace_generic_exception(db_memory_with_data, fail_on_empty): assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("table, id, data", [ - ("test", "1", {"data": 1}), - ("test", "1", {"data": 2}), - ("test", "2", {"data": 1}), - ("test", "2", {"data": 2}), - ("test_table", "1", {"data": 1}), - ("test_table", "1", {"data": 2}), - ("test_table", "2", {"data": 1}), - ("test_table", "2", {"data": 2}), - ("test", "1", {"data_1": 1, "data_2": 2}), - ("test", "1", {"data_1": 2, "data_2": 1}), - ("test", "2", {"data_1": 1, "data_2": 2}), - ("test", "2", {"data_1": 2, "data_2": 1}), - ("test_table", "1", {"data_1": 1, "data_2": 2}), - ("test_table", "1", {"data_1": 2, "data_2": 1}), - ("test_table", "2", {"data_1": 1, "data_2": 2}), - ("test_table", "2", {"data_1": 2, "data_2": 1})]) +@pytest.mark.parametrize( + "table, id, data", + [ + ("test", "1", {"data": 1}), + ("test", "1", {"data": 2}), + ("test", "2", {"data": 1}), + ("test", "2", {"data": 2}), + ("test_table", "1", {"data": 1}), + ("test_table", "1", {"data": 2}), + ("test_table", "2", {"data": 1}), + ("test_table", "2", {"data": 2}), + ("test", "1", {"data_1": 1, "data_2": 2}), + ("test", "1", {"data_1": 2, "data_2": 1}), + ("test", "2", {"data_1": 1, "data_2": 2}), + ("test", "2", {"data_1": 2, "data_2": 1}), + ("test_table", "1", {"data_1": 1, "data_2": 2}), + ("test_table", "1", {"data_1": 2, "data_2": 1}), + ("test_table", "2", {"data_1": 1, "data_2": 2}), + ("test_table", "2", {"data_1": 2, "data_2": 1}), + ], +) def test_create_with_empty_db_with_id(db_memory, table, id, data): data_to_insert = data - data_to_insert['_id'] = id + data_to_insert["_id"] = id returned_id = db_memory.create(table, data_to_insert) assert returned_id == id assert len(db_memory.db) == 1 @@ -574,85 +712,97 @@ def test_create_with_empty_db_with_id(db_memory, table, id, data): assert data_to_insert in db_memory.db[table] -@pytest.mark.parametrize("table, id, data", [ - ("test", "4", {"data": 1}), - ("test", "5", {"data": 2}), - ("test", "4", {"data": 1}), - ("test", "5", {"data": 2}), - ("test_table", "4", {"data": 1}), - ("test_table", "5", {"data": 2}), - ("test_table", "4", {"data": 1}), - ("test_table", "5", {"data": 2}), - ("test", "4", {"data_1": 1, "data_2": 2}), - ("test", "5", {"data_1": 2, "data_2": 1}), - ("test", "4", {"data_1": 1, "data_2": 2}), - ("test", "5", {"data_1": 2, "data_2": 1}), - ("test_table", "4", {"data_1": 1, "data_2": 2}), - ("test_table", "5", {"data_1": 2, "data_2": 1}), - ("test_table", "4", {"data_1": 1, "data_2": 2}), - ("test_table", "5", {"data_1": 2, "data_2": 1})]) +@pytest.mark.parametrize( + "table, id, data", + [ + ("test", "4", {"data": 1}), + ("test", "5", {"data": 2}), + ("test", "4", {"data": 1}), + ("test", "5", {"data": 2}), + ("test_table", "4", {"data": 1}), + ("test_table", "5", {"data": 2}), + ("test_table", "4", {"data": 1}), + ("test_table", "5", {"data": 2}), + ("test", "4", {"data_1": 1, "data_2": 2}), + ("test", "5", {"data_1": 2, "data_2": 1}), + ("test", "4", {"data_1": 1, "data_2": 2}), + ("test", "5", {"data_1": 2, "data_2": 1}), + ("test_table", "4", {"data_1": 1, "data_2": 2}), + ("test_table", "5", {"data_1": 2, "data_2": 1}), + ("test_table", "4", {"data_1": 1, "data_2": 2}), + ("test_table", "5", {"data_1": 2, "data_2": 1}), + ], +) def test_create_with_non_empty_db_with_id(db_memory_with_data, table, id, data): data_to_insert = data - data_to_insert['_id'] = id + data_to_insert["_id"] = id returned_id = db_memory_with_data.create(table, data_to_insert) assert returned_id == id - assert len(db_memory_with_data.db) == (1 if table == 'test' else 2) + assert len(db_memory_with_data.db) == (1 if table == "test" else 2) assert table in db_memory_with_data.db - assert len(db_memory_with_data.db[table]) == (4 if table == 'test' else 1) + assert len(db_memory_with_data.db[table]) == (4 if table == "test" else 1) assert data_to_insert in db_memory_with_data.db[table] -@pytest.mark.parametrize("table, data", [ - ("test", {"data": 1}), - ("test", {"data": 2}), - ("test", {"data": 1}), - ("test", {"data": 2}), - ("test_table", {"data": 1}), - ("test_table", {"data": 2}), - ("test_table", {"data": 1}), - ("test_table", {"data": 2}), - ("test", {"data_1": 1, "data_2": 2}), - ("test", {"data_1": 2, "data_2": 1}), - ("test", {"data_1": 1, "data_2": 2}), - ("test", {"data_1": 2, "data_2": 1}), - ("test_table", {"data_1": 1, "data_2": 2}), - ("test_table", {"data_1": 2, "data_2": 1}), - ("test_table", {"data_1": 1, "data_2": 2}), - ("test_table", {"data_1": 2, "data_2": 1})]) +@pytest.mark.parametrize( + "table, data", + [ + ("test", {"data": 1}), + ("test", {"data": 2}), + ("test", {"data": 1}), + ("test", {"data": 2}), + ("test_table", {"data": 1}), + ("test_table", {"data": 2}), + ("test_table", {"data": 1}), + ("test_table", {"data": 2}), + ("test", {"data_1": 1, "data_2": 2}), + ("test", {"data_1": 2, "data_2": 1}), + ("test", {"data_1": 1, "data_2": 2}), + ("test", {"data_1": 2, "data_2": 1}), + ("test_table", {"data_1": 1, "data_2": 2}), + ("test_table", {"data_1": 2, "data_2": 1}), + ("test_table", {"data_1": 1, "data_2": 2}), + ("test_table", {"data_1": 2, "data_2": 1}), + ], +) def test_create_with_empty_db_without_id(db_memory, table, data): returned_id = db_memory.create(table, data) assert len(db_memory.db) == 1 assert table in db_memory.db assert len(db_memory.db[table]) == 1 data_inserted = data - data_inserted['_id'] = returned_id + data_inserted["_id"] = returned_id assert data_inserted in db_memory.db[table] -@pytest.mark.parametrize("table, data", [ - ("test", {"data": 1}), - ("test", {"data": 2}), - ("test", {"data": 1}), - ("test", {"data": 2}), - ("test_table", {"data": 1}), - ("test_table", {"data": 2}), - ("test_table", {"data": 1}), - ("test_table", {"data": 2}), - ("test", {"data_1": 1, "data_2": 2}), - ("test", {"data_1": 2, "data_2": 1}), - ("test", {"data_1": 1, "data_2": 2}), - ("test", {"data_1": 2, "data_2": 1}), - ("test_table", {"data_1": 1, "data_2": 2}), - ("test_table", {"data_1": 2, "data_2": 1}), - ("test_table", {"data_1": 1, "data_2": 2}), - ("test_table", {"data_1": 2, "data_2": 1})]) +@pytest.mark.parametrize( + "table, data", + [ + ("test", {"data": 1}), + ("test", {"data": 2}), + ("test", {"data": 1}), + ("test", {"data": 2}), + ("test_table", {"data": 1}), + ("test_table", {"data": 2}), + ("test_table", {"data": 1}), + ("test_table", {"data": 2}), + ("test", {"data_1": 1, "data_2": 2}), + ("test", {"data_1": 2, "data_2": 1}), + ("test", {"data_1": 1, "data_2": 2}), + ("test", {"data_1": 2, "data_2": 1}), + ("test_table", {"data_1": 1, "data_2": 2}), + ("test_table", {"data_1": 2, "data_2": 1}), + ("test_table", {"data_1": 1, "data_2": 2}), + ("test_table", {"data_1": 2, "data_2": 1}), + ], +) def test_create_with_non_empty_db_without_id(db_memory_with_data, table, data): returned_id = db_memory_with_data.create(table, data) - assert len(db_memory_with_data.db) == (1 if table == 'test' else 2) + assert len(db_memory_with_data.db) == (1 if table == "test" else 2) assert table in db_memory_with_data.db - assert len(db_memory_with_data.db[table]) == (4 if table == 'test' else 1) + assert len(db_memory_with_data.db[table]) == (4 if table == "test" else 1) data_inserted = data - data_inserted['_id'] = returned_id + data_inserted["_id"] = returned_id assert data_inserted in db_memory_with_data.db[table] @@ -667,29 +817,82 @@ def test_create_with_exception(db_memory): assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("db_content, update_dict, expected, message", [ - ({"a": {"none": None}}, {"a.b.num": "v"}, {"a": {"none": None, "b": {"num": "v"}}}, "create dict"), - ({"a": {"none": None}}, {"a.none.num": "v"}, {"a": {"none": {"num": "v"}}}, "create dict over none"), - ({"a": {"b": {"num": 4}}}, {"a.b.num": "v"}, {"a": {"b": {"num": "v"}}}, "replace_number"), - ({"a": {"b": {"num": 4}}}, {"a.b.num.c.d": "v"}, None, "create dict over number should fail"), - ({"a": {"b": {"num": 4}}}, {"a.b": "v"}, {"a": {"b": "v"}}, "replace dict with a string"), - ({"a": {"b": {"num": 4}}}, {"a.b": None}, {"a": {"b": None}}, "replace dict with None"), - ({"a": [{"b": {"num": 4}}]}, {"a.b.num": "v"}, None, "create dict over list should fail"), - ({"a": [{"b": {"num": 4}}]}, {"a.0.b.num": "v"}, {"a": [{"b": {"num": "v"}}]}, "set list"), - ({"a": [{"b": {"num": 4}}]}, {"a.3.b.num": "v"}, - {"a": [{"b": {"num": 4}}, None, None, {"b": {"num": "v"}}]}, "expand list"), - ({"a": [[4]]}, {"a.0.0": "v"}, {"a": [["v"]]}, "set nested list"), - ({"a": [[4]]}, {"a.0.2": "v"}, {"a": [[4, None, "v"]]}, "expand nested list"), - ({"a": [[4]]}, {"a.2.2": "v"}, {"a": [[4], None, {"2": "v"}]}, "expand list and add number key")]) +@pytest.mark.parametrize( + "db_content, update_dict, expected, message", + [ + ( + {"a": {"none": None}}, + {"a.b.num": "v"}, + {"a": {"none": None, "b": {"num": "v"}}}, + "create dict", + ), + ( + {"a": {"none": None}}, + {"a.none.num": "v"}, + {"a": {"none": {"num": "v"}}}, + "create dict over none", + ), + ( + {"a": {"b": {"num": 4}}}, + {"a.b.num": "v"}, + {"a": {"b": {"num": "v"}}}, + "replace_number", + ), + ( + {"a": {"b": {"num": 4}}}, + {"a.b.num.c.d": "v"}, + None, + "create dict over number should fail", + ), + ( + {"a": {"b": {"num": 4}}}, + {"a.b": "v"}, + {"a": {"b": "v"}}, + "replace dict with a string", + ), + ( + {"a": {"b": {"num": 4}}}, + {"a.b": None}, + {"a": {"b": None}}, + "replace dict with None", + ), + ( + {"a": [{"b": {"num": 4}}]}, + {"a.b.num": "v"}, + None, + "create dict over list should fail", + ), + ( + {"a": [{"b": {"num": 4}}]}, + {"a.0.b.num": "v"}, + {"a": [{"b": {"num": "v"}}]}, + "set list", + ), + ( + {"a": [{"b": {"num": 4}}]}, + {"a.3.b.num": "v"}, + {"a": [{"b": {"num": 4}}, None, None, {"b": {"num": "v"}}]}, + "expand list", + ), + ({"a": [[4]]}, {"a.0.0": "v"}, {"a": [["v"]]}, "set nested list"), + ({"a": [[4]]}, {"a.0.2": "v"}, {"a": [[4, None, "v"]]}, "expand nested list"), + ( + {"a": [[4]]}, + {"a.2.2": "v"}, + {"a": [[4], None, {"2": "v"}]}, + "expand list and add number key", + ), + ], +) def test_set_one(db_memory, db_content, update_dict, expected, message): - db_memory._find = Mock(return_value=((0, db_content), )) + db_memory._find = Mock(return_value=((0, db_content),)) if expected is None: with pytest.raises(DbException) as excinfo: db_memory.set_one("table", {}, update_dict) - assert (excinfo.value.http_code == http.HTTPStatus.NOT_FOUND), message + assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND, message else: db_memory.set_one("table", {}, update_dict) - assert (db_content == expected), message + assert db_content == expected, message class TestDbMemory(unittest.TestCase): @@ -697,26 +900,79 @@ class TestDbMemory(unittest.TestCase): def test_set_one(self): test_set = ( # (database content, set-content, expected database content (None=fails), message) - ({"a": {"none": None}}, {"a.b.num": "v"}, {"a": {"none": None, "b": {"num": "v"}}}, "create dict"), - ({"a": {"none": None}}, {"a.none.num": "v"}, {"a": {"none": {"num": "v"}}}, "create dict over none"), - ({"a": {"b": {"num": 4}}}, {"a.b.num": "v"}, {"a": {"b": {"num": "v"}}}, "replace_number"), - ({"a": {"b": {"num": 4}}}, {"a.b.num.c.d": "v"}, None, "create dict over number should fail"), - ({"a": {"b": {"num": 4}}}, {"a.b": "v"}, {"a": {"b": "v"}}, "replace dict with a string"), - ({"a": {"b": {"num": 4}}}, {"a.b": None}, {"a": {"b": None}}, "replace dict with None"), - - ({"a": [{"b": {"num": 4}}]}, {"a.b.num": "v"}, None, "create dict over list should fail"), - ({"a": [{"b": {"num": 4}}]}, {"a.0.b.num": "v"}, {"a": [{"b": {"num": "v"}}]}, "set list"), - ({"a": [{"b": {"num": 4}}]}, {"a.3.b.num": "v"}, - {"a": [{"b": {"num": 4}}, None, None, {"b": {"num": "v"}}]}, "expand list"), + ( + {"a": {"none": None}}, + {"a.b.num": "v"}, + {"a": {"none": None, "b": {"num": "v"}}}, + "create dict", + ), + ( + {"a": {"none": None}}, + {"a.none.num": "v"}, + {"a": {"none": {"num": "v"}}}, + "create dict over none", + ), + ( + {"a": {"b": {"num": 4}}}, + {"a.b.num": "v"}, + {"a": {"b": {"num": "v"}}}, + "replace_number", + ), + ( + {"a": {"b": {"num": 4}}}, + {"a.b.num.c.d": "v"}, + None, + "create dict over number should fail", + ), + ( + {"a": {"b": {"num": 4}}}, + {"a.b": "v"}, + {"a": {"b": "v"}}, + "replace dict with a string", + ), + ( + {"a": {"b": {"num": 4}}}, + {"a.b": None}, + {"a": {"b": None}}, + "replace dict with None", + ), + ( + {"a": [{"b": {"num": 4}}]}, + {"a.b.num": "v"}, + None, + "create dict over list should fail", + ), + ( + {"a": [{"b": {"num": 4}}]}, + {"a.0.b.num": "v"}, + {"a": [{"b": {"num": "v"}}]}, + "set list", + ), + ( + {"a": [{"b": {"num": 4}}]}, + {"a.3.b.num": "v"}, + {"a": [{"b": {"num": 4}}, None, None, {"b": {"num": "v"}}]}, + "expand list", + ), ({"a": [[4]]}, {"a.0.0": "v"}, {"a": [["v"]]}, "set nested list"), - ({"a": [[4]]}, {"a.0.2": "v"}, {"a": [[4, None, "v"]]}, "expand nested list"), - ({"a": [[4]]}, {"a.2.2": "v"}, {"a": [[4], None, {"2": "v"}]}, "expand list and add number key"), + ( + {"a": [[4]]}, + {"a.0.2": "v"}, + {"a": [[4, None, "v"]]}, + "expand nested list", + ), + ( + {"a": [[4]]}, + {"a.2.2": "v"}, + {"a": [[4], None, {"2": "v"}]}, + "expand list and add number key", + ), ({"a": None}, {"b.c": "v"}, {"a": None, "b": {"c": "v"}}, "expand at root"), ) db_men = DbMemory() db_men._find = Mock() for db_content, update_dict, expected, message in test_set: - db_men._find.return_value = ((0, db_content), ) + db_men._find.return_value = ((0, db_content),) if expected is None: self.assertRaises(DbException, db_men.set_one, "table", {}, update_dict) else: @@ -740,9 +996,17 @@ class TestDbMemory(unittest.TestCase): db_men._find = Mock() for db_content, pull_dict, expected, message in test_set: db_content = deepcopy(db_content) - db_men._find.return_value = ((0, db_content), ) + db_men._find.return_value = ((0, db_content),) if expected is None: - self.assertRaises(DbException, db_men.set_one, "table", {}, None, fail_on_empty=False, pull=pull_dict) + self.assertRaises( + DbException, + db_men.set_one, + "table", + {}, + None, + fail_on_empty=False, + pull=pull_dict, + ) else: db_men.set_one("table", {}, None, pull=pull_dict) self.assertEqual(db_content, expected, message) @@ -751,12 +1015,36 @@ class TestDbMemory(unittest.TestCase): example = {"a": [1, "1", 1], "d": {}, "n": None} test_set = ( # (database content, set-content, expected database content (None=fails), message) - (example, {"d.b.c": 1}, {"a": [1, "1", 1], "d": {"b": {"c": [1]}}, "n": None}, "push non existing arrray2"), - (example, {"b": 1}, {"a": [1, "1", 1], "d": {}, "b": [1], "n": None}, "push non existing arrray3"), - (example, {"a.6": 1}, {"a": [1, "1", 1, None, None, None, [1]], "d": {}, "n": None}, - "push non existing arrray"), - (example, {"a": 2}, {"a": [1, "1", 1, 2], "d": {}, "n": None}, "push one item"), - (example, {"a": {1: 1}}, {"a": [1, "1", 1, {1: 1}], "d": {}, "n": None}, "push a dict"), + ( + example, + {"d.b.c": 1}, + {"a": [1, "1", 1], "d": {"b": {"c": [1]}}, "n": None}, + "push non existing arrray2", + ), + ( + example, + {"b": 1}, + {"a": [1, "1", 1], "d": {}, "b": [1], "n": None}, + "push non existing arrray3", + ), + ( + example, + {"a.6": 1}, + {"a": [1, "1", 1, None, None, None, [1]], "d": {}, "n": None}, + "push non existing arrray", + ), + ( + example, + {"a": 2}, + {"a": [1, "1", 1, 2], "d": {}, "n": None}, + "push one item", + ), + ( + example, + {"a": {1: 1}}, + {"a": [1, "1", 1, {1: 1}], "d": {}, "n": None}, + "push a dict", + ), (example, {"d": 1}, None, "push over dict"), (example, {"n": 1}, None, "push over None"), ) @@ -764,9 +1052,17 @@ class TestDbMemory(unittest.TestCase): db_men._find = Mock() for db_content, push_dict, expected, message in test_set: db_content = deepcopy(db_content) - db_men._find.return_value = ((0, db_content), ) + db_men._find.return_value = ((0, db_content),) if expected is None: - self.assertRaises(DbException, db_men.set_one, "table", {}, None, fail_on_empty=False, push=push_dict) + self.assertRaises( + DbException, + db_men.set_one, + "table", + {}, + None, + fail_on_empty=False, + push=push_dict, + ) else: db_men.set_one("table", {}, None, push=push_dict) self.assertEqual(db_content, expected, message) @@ -775,13 +1071,36 @@ class TestDbMemory(unittest.TestCase): example = {"a": [1, "1", 1], "d": {}, "n": None} test_set = ( # (database content, set-content, expected database content (None=fails), message) - (example, {"d.b.c": [1]}, {"a": [1, "1", 1], "d": {"b": {"c": [1]}}, "n": None}, - "push non existing arrray2"), - (example, {"b": [1]}, {"a": [1, "1", 1], "d": {}, "b": [1], "n": None}, "push non existing arrray3"), - (example, {"a.6": [1]}, {"a": [1, "1", 1, None, None, None, [1]], "d": {}, "n": None}, - "push non existing arrray"), - (example, {"a": [2, 3]}, {"a": [1, "1", 1, 2, 3], "d": {}, "n": None}, "push two item"), - (example, {"a": [{1: 1}]}, {"a": [1, "1", 1, {1: 1}], "d": {}, "n": None}, "push a dict"), + ( + example, + {"d.b.c": [1]}, + {"a": [1, "1", 1], "d": {"b": {"c": [1]}}, "n": None}, + "push non existing arrray2", + ), + ( + example, + {"b": [1]}, + {"a": [1, "1", 1], "d": {}, "b": [1], "n": None}, + "push non existing arrray3", + ), + ( + example, + {"a.6": [1]}, + {"a": [1, "1", 1, None, None, None, [1]], "d": {}, "n": None}, + "push non existing arrray", + ), + ( + example, + {"a": [2, 3]}, + {"a": [1, "1", 1, 2, 3], "d": {}, "n": None}, + "push two item", + ), + ( + example, + {"a": [{1: 1}]}, + {"a": [1, "1", 1, {1: 1}], "d": {}, "n": None}, + "push a dict", + ), (example, {"d": [1]}, None, "push over dict"), (example, {"n": [1]}, None, "push over None"), (example, {"a": 1}, None, "invalid push list non an array"), @@ -790,10 +1109,17 @@ class TestDbMemory(unittest.TestCase): db_men._find = Mock() for db_content, push_list, expected, message in test_set: db_content = deepcopy(db_content) - db_men._find.return_value = ((0, db_content), ) + db_men._find.return_value = ((0, db_content),) if expected is None: - self.assertRaises(DbException, db_men.set_one, "table", {}, None, fail_on_empty=False, - push_list=push_list) + self.assertRaises( + DbException, + db_men.set_one, + "table", + {}, + None, + fail_on_empty=False, + push_list=push_list, + ) else: db_men.set_one("table", {}, None, push_list=push_list) self.assertEqual(db_content, expected, message) @@ -813,9 +1139,17 @@ class TestDbMemory(unittest.TestCase): db_men._find = Mock() for db_content, unset_dict, expected, message in test_set: db_content = deepcopy(db_content) - db_men._find.return_value = ((0, db_content), ) + db_men._find.return_value = ((0, db_content),) if expected is None: - self.assertRaises(DbException, db_men.set_one, "table", {}, None, fail_on_empty=False, unset=unset_dict) + self.assertRaises( + DbException, + db_men.set_one, + "table", + {}, + None, + fail_on_empty=False, + unset=unset_dict, + ) else: db_men.set_one("table", {}, None, unset=unset_dict) self.assertEqual(db_content, expected, message) diff --git a/osm_common/tests/test_fsbase.py b/osm_common/tests/test_fsbase.py index 4decad0..43349ad 100644 --- a/osm_common/tests/test_fsbase.py +++ b/osm_common/tests/test_fsbase.py @@ -55,40 +55,52 @@ def test_fs_disconnect(fs_base): def test_mkdir(fs_base): with pytest.raises(FsException) as excinfo: fs_base.mkdir(None) - assert str(excinfo.value).startswith(exception_message("Method 'mkdir' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'mkdir' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR def test_file_exists(fs_base): with pytest.raises(FsException) as excinfo: fs_base.file_exists(None) - assert str(excinfo.value).startswith(exception_message("Method 'file_exists' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'file_exists' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR def test_file_size(fs_base): with pytest.raises(FsException) as excinfo: fs_base.file_size(None) - assert str(excinfo.value).startswith(exception_message("Method 'file_size' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'file_size' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR def test_file_extract(fs_base): with pytest.raises(FsException) as excinfo: fs_base.file_extract(None, None) - assert str(excinfo.value).startswith(exception_message("Method 'file_extract' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'file_extract' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR def test_file_open(fs_base): with pytest.raises(FsException) as excinfo: fs_base.file_open(None, None) - assert str(excinfo.value).startswith(exception_message("Method 'file_open' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'file_open' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR def test_file_delete(fs_base): with pytest.raises(FsException) as excinfo: fs_base.file_delete(None, None) - assert str(excinfo.value).startswith(exception_message("Method 'file_delete' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'file_delete' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR diff --git a/osm_common/tests/test_fslocal.py b/osm_common/tests/test_fslocal.py index 7416cfb..6f52984 100644 --- a/osm_common/tests/test_fslocal.py +++ b/osm_common/tests/test_fslocal.py @@ -34,41 +34,43 @@ __author__ = "Eduardo Sousa " def valid_path(): - return tempfile.gettempdir() + '/' + return tempfile.gettempdir() + "/" def invalid_path(): - return '/#tweeter/' + return "/#tweeter/" @pytest.fixture(scope="function", params=[True, False]) def fs_local(request): fs = FsLocal(lock=request.param) - fs.fs_connect({'path': valid_path()}) + fs.fs_connect({"path": valid_path()}) return fs def fs_connect_exception_message(path): - return "storage exception Invalid configuration param at '[storage]': path '{}' does not exist".format(path) + return "storage exception Invalid configuration param at '[storage]': path '{}' does not exist".format( + path + ) def file_open_file_not_found_exception(storage): - f = storage if isinstance(storage, str) else '/'.join(storage) + f = storage if isinstance(storage, str) else "/".join(storage) return "storage exception File {} does not exist".format(f) def file_open_io_exception(storage): - f = storage if isinstance(storage, str) else '/'.join(storage) + f = storage if isinstance(storage, str) else "/".join(storage) return "storage exception File {} cannot be opened".format(f) def dir_ls_not_a_directory_exception(storage): - f = storage if isinstance(storage, str) else '/'.join(storage) + f = storage if isinstance(storage, str) else "/".join(storage) return "storage exception File {} does not exist".format(f) def dir_ls_io_exception(storage): - f = storage if isinstance(storage, str) else '/'.join(storage) + f = storage if isinstance(storage, str) else "/".join(storage) return "storage exception File {} cannot be opened".format(f) @@ -78,12 +80,12 @@ def file_delete_exception_message(storage): def test_constructor_without_logger(): fs = FsLocal() - assert fs.logger == logging.getLogger('fs') + assert fs.logger == logging.getLogger("fs") assert fs.path is None def test_constructor_with_logger(): - logger_name = 'fs_local' + logger_name = "fs_local" fs = FsLocal(logger_name=logger_name) assert fs.logger == logging.getLogger(logger_name) assert fs.path is None @@ -98,11 +100,19 @@ def test_get_params(fs_local): assert params["path"] == valid_path() -@pytest.mark.parametrize("config, exp_logger, exp_path", [ - ({'logger_name': 'fs_local', 'path': valid_path()}, 'fs_local', valid_path()), - ({'logger_name': 'fs_local', 'path': valid_path()[:-1]}, 'fs_local', valid_path()), - ({'path': valid_path()}, 'fs', valid_path()), - ({'path': valid_path()[:-1]}, 'fs', valid_path())]) +@pytest.mark.parametrize( + "config, exp_logger, exp_path", + [ + ({"logger_name": "fs_local", "path": valid_path()}, "fs_local", valid_path()), + ( + {"logger_name": "fs_local", "path": valid_path()[:-1]}, + "fs_local", + valid_path(), + ), + ({"path": valid_path()}, "fs", valid_path()), + ({"path": valid_path()[:-1]}, "fs", valid_path()), + ], +) def test_fs_connect_with_valid_config(config, exp_logger, exp_path): fs = FsLocal() fs.fs_connect(config) @@ -110,11 +120,24 @@ def test_fs_connect_with_valid_config(config, exp_logger, exp_path): assert fs.path == exp_path -@pytest.mark.parametrize("config, exp_exception_message", [ - ({'logger_name': 'fs_local', 'path': invalid_path()}, fs_connect_exception_message(invalid_path())), - ({'logger_name': 'fs_local', 'path': invalid_path()[:-1]}, fs_connect_exception_message(invalid_path()[:-1])), - ({'path': invalid_path()}, fs_connect_exception_message(invalid_path())), - ({'path': invalid_path()[:-1]}, fs_connect_exception_message(invalid_path()[:-1]))]) +@pytest.mark.parametrize( + "config, exp_exception_message", + [ + ( + {"logger_name": "fs_local", "path": invalid_path()}, + fs_connect_exception_message(invalid_path()), + ), + ( + {"logger_name": "fs_local", "path": invalid_path()[:-1]}, + fs_connect_exception_message(invalid_path()[:-1]), + ), + ({"path": invalid_path()}, fs_connect_exception_message(invalid_path())), + ( + {"path": invalid_path()[:-1]}, + fs_connect_exception_message(invalid_path()[:-1]), + ), + ], +) def test_fs_connect_with_invalid_path(config, exp_exception_message): fs = FsLocal() with pytest.raises(FsException) as excinfo: @@ -144,65 +167,89 @@ def test_mkdir_with_exception(fs_local): assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR -@pytest.mark.parametrize("storage, mode, expected", [ - (str(uuid.uuid4()), 'file', False), - ([str(uuid.uuid4())], 'file', False), - (str(uuid.uuid4()), 'dir', False), - ([str(uuid.uuid4())], 'dir', False)]) +@pytest.mark.parametrize( + "storage, mode, expected", + [ + (str(uuid.uuid4()), "file", False), + ([str(uuid.uuid4())], "file", False), + (str(uuid.uuid4()), "dir", False), + ([str(uuid.uuid4())], "dir", False), + ], +) def test_file_exists_returns_false(fs_local, storage, mode, expected): assert fs_local.file_exists(storage, mode) == expected -@pytest.mark.parametrize("storage, mode, expected", [ - (str(uuid.uuid4()), 'file', True), - ([str(uuid.uuid4())], 'file', True), - (str(uuid.uuid4()), 'dir', True), - ([str(uuid.uuid4())], 'dir', True)]) +@pytest.mark.parametrize( + "storage, mode, expected", + [ + (str(uuid.uuid4()), "file", True), + ([str(uuid.uuid4())], "file", True), + (str(uuid.uuid4()), "dir", True), + ([str(uuid.uuid4())], "dir", True), + ], +) def test_file_exists_returns_true(fs_local, storage, mode, expected): - path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0] - if mode == 'file': + path = ( + valid_path() + storage + if isinstance(storage, str) + else valid_path() + storage[0] + ) + if mode == "file": os.mknod(path) - elif mode == 'dir': + elif mode == "dir": os.mkdir(path) assert fs_local.file_exists(storage, mode) == expected - if mode == 'file': + if mode == "file": os.remove(path) - elif mode == 'dir': + elif mode == "dir": os.rmdir(path) -@pytest.mark.parametrize("storage, mode", [ - (str(uuid.uuid4()), 'file'), - ([str(uuid.uuid4())], 'file'), - (str(uuid.uuid4()), 'dir'), - ([str(uuid.uuid4())], 'dir')]) +@pytest.mark.parametrize( + "storage, mode", + [ + (str(uuid.uuid4()), "file"), + ([str(uuid.uuid4())], "file"), + (str(uuid.uuid4()), "dir"), + ([str(uuid.uuid4())], "dir"), + ], +) def test_file_size(fs_local, storage, mode): - path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0] - if mode == 'file': + path = ( + valid_path() + storage + if isinstance(storage, str) + else valid_path() + storage[0] + ) + if mode == "file": os.mknod(path) - elif mode == 'dir': + elif mode == "dir": os.mkdir(path) size = os.path.getsize(path) assert fs_local.file_size(storage) == size - if mode == 'file': + if mode == "file": os.remove(path) - elif mode == 'dir': + elif mode == "dir": os.rmdir(path) -@pytest.mark.parametrize("files, path", [ - (['foo', 'bar', 'foobar'], str(uuid.uuid4())), - (['foo', 'bar', 'foobar'], [str(uuid.uuid4())])]) +@pytest.mark.parametrize( + "files, path", + [ + (["foo", "bar", "foobar"], str(uuid.uuid4())), + (["foo", "bar", "foobar"], [str(uuid.uuid4())]), + ], +) def test_file_extract(fs_local, files, path): for f in files: os.mknod(valid_path() + f) - tar_path = valid_path() + str(uuid.uuid4()) + '.tar' - with tarfile.open(tar_path, 'w') as tar: + tar_path = valid_path() + str(uuid.uuid4()) + ".tar" + with tarfile.open(tar_path, "w") as tar: for f in files: tar.add(valid_path() + f, arcname=f) - with tarfile.open(tar_path, 'r') as tar: + with tarfile.open(tar_path, "r") as tar: fs_local.file_extract(tar, path) - extracted_path = valid_path() + (path if isinstance(path, str) else '/'.join(path)) + extracted_path = valid_path() + (path if isinstance(path, str) else "/".join(path)) ls_dir = os.listdir(extracted_path) assert len(ls_dir) == len(files) for f in files: @@ -213,21 +260,29 @@ def test_file_extract(fs_local, files, path): shutil.rmtree(extracted_path) -@pytest.mark.parametrize("storage, mode", [ - (str(uuid.uuid4()), 'r'), - (str(uuid.uuid4()), 'w'), - (str(uuid.uuid4()), 'a'), - (str(uuid.uuid4()), 'rb'), - (str(uuid.uuid4()), 'wb'), - (str(uuid.uuid4()), 'ab'), - ([str(uuid.uuid4())], 'r'), - ([str(uuid.uuid4())], 'w'), - ([str(uuid.uuid4())], 'a'), - ([str(uuid.uuid4())], 'rb'), - ([str(uuid.uuid4())], 'wb'), - ([str(uuid.uuid4())], 'ab')]) +@pytest.mark.parametrize( + "storage, mode", + [ + (str(uuid.uuid4()), "r"), + (str(uuid.uuid4()), "w"), + (str(uuid.uuid4()), "a"), + (str(uuid.uuid4()), "rb"), + (str(uuid.uuid4()), "wb"), + (str(uuid.uuid4()), "ab"), + ([str(uuid.uuid4())], "r"), + ([str(uuid.uuid4())], "w"), + ([str(uuid.uuid4())], "a"), + ([str(uuid.uuid4())], "rb"), + ([str(uuid.uuid4())], "wb"), + ([str(uuid.uuid4())], "ab"), + ], +) def test_file_open(fs_local, storage, mode): - path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0] + path = ( + valid_path() + storage + if isinstance(storage, str) + else valid_path() + storage[0] + ) os.mknod(path) file_obj = fs_local.file_open(storage, mode) assert isinstance(file_obj, io.IOBase) @@ -235,11 +290,15 @@ def test_file_open(fs_local, storage, mode): os.remove(path) -@pytest.mark.parametrize("storage, mode", [ - (str(uuid.uuid4()), 'r'), - (str(uuid.uuid4()), 'rb'), - ([str(uuid.uuid4())], 'r'), - ([str(uuid.uuid4())], 'rb')]) +@pytest.mark.parametrize( + "storage, mode", + [ + (str(uuid.uuid4()), "r"), + (str(uuid.uuid4()), "rb"), + ([str(uuid.uuid4())], "r"), + ([str(uuid.uuid4())], "rb"), + ], +) def test_file_open_file_not_found_exception(fs_local, storage, mode): with pytest.raises(FsException) as excinfo: fs_local.file_open(storage, mode) @@ -247,21 +306,29 @@ def test_file_open_file_not_found_exception(fs_local, storage, mode): assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("storage, mode", [ - (str(uuid.uuid4()), 'r'), - (str(uuid.uuid4()), 'w'), - (str(uuid.uuid4()), 'a'), - (str(uuid.uuid4()), 'rb'), - (str(uuid.uuid4()), 'wb'), - (str(uuid.uuid4()), 'ab'), - ([str(uuid.uuid4())], 'r'), - ([str(uuid.uuid4())], 'w'), - ([str(uuid.uuid4())], 'a'), - ([str(uuid.uuid4())], 'rb'), - ([str(uuid.uuid4())], 'wb'), - ([str(uuid.uuid4())], 'ab')]) +@pytest.mark.parametrize( + "storage, mode", + [ + (str(uuid.uuid4()), "r"), + (str(uuid.uuid4()), "w"), + (str(uuid.uuid4()), "a"), + (str(uuid.uuid4()), "rb"), + (str(uuid.uuid4()), "wb"), + (str(uuid.uuid4()), "ab"), + ([str(uuid.uuid4())], "r"), + ([str(uuid.uuid4())], "w"), + ([str(uuid.uuid4())], "a"), + ([str(uuid.uuid4())], "rb"), + ([str(uuid.uuid4())], "wb"), + ([str(uuid.uuid4())], "ab"), + ], +) def test_file_open_io_error(fs_local, storage, mode): - path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0] + path = ( + valid_path() + storage + if isinstance(storage, str) + else valid_path() + storage[0] + ) os.mknod(path) os.chmod(path, 0) with pytest.raises(FsException) as excinfo: @@ -271,17 +338,25 @@ def test_file_open_io_error(fs_local, storage, mode): os.remove(path) -@pytest.mark.parametrize("storage, with_files", [ - (str(uuid.uuid4()), True), - (str(uuid.uuid4()), False), - ([str(uuid.uuid4())], True), - ([str(uuid.uuid4())], False)]) +@pytest.mark.parametrize( + "storage, with_files", + [ + (str(uuid.uuid4()), True), + (str(uuid.uuid4()), False), + ([str(uuid.uuid4())], True), + ([str(uuid.uuid4())], False), + ], +) def test_dir_ls(fs_local, storage, with_files): - path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0] + path = ( + valid_path() + storage + if isinstance(storage, str) + else valid_path() + storage[0] + ) os.mkdir(path) if with_files is True: file_name = str(uuid.uuid4()) - file_path = path + '/' + file_name + file_path = path + "/" + file_name os.mknod(file_path) result = fs_local.dir_ls(storage) @@ -293,11 +368,13 @@ def test_dir_ls(fs_local, storage, with_files): shutil.rmtree(path) -@pytest.mark.parametrize("storage", [ - (str(uuid.uuid4())), - ([str(uuid.uuid4())])]) +@pytest.mark.parametrize("storage", [(str(uuid.uuid4())), ([str(uuid.uuid4())])]) def test_dir_ls_with_not_a_directory_error(fs_local, storage): - path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0] + path = ( + valid_path() + storage + if isinstance(storage, str) + else valid_path() + storage[0] + ) os.mknod(path) with pytest.raises(FsException) as excinfo: fs_local.dir_ls(storage) @@ -306,11 +383,13 @@ def test_dir_ls_with_not_a_directory_error(fs_local, storage): os.remove(path) -@pytest.mark.parametrize("storage", [ - (str(uuid.uuid4())), - ([str(uuid.uuid4())])]) +@pytest.mark.parametrize("storage", [(str(uuid.uuid4())), ([str(uuid.uuid4())])]) def test_dir_ls_with_io_error(fs_local, storage): - path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0] + path = ( + valid_path() + storage + if isinstance(storage, str) + else valid_path() + storage[0] + ) os.mkdir(path) os.chmod(path, 0) with pytest.raises(FsException) as excinfo: @@ -320,28 +399,34 @@ def test_dir_ls_with_io_error(fs_local, storage): os.rmdir(path) -@pytest.mark.parametrize("storage, with_files, ignore_non_exist", [ - (str(uuid.uuid4()), True, True), - (str(uuid.uuid4()), False, True), - (str(uuid.uuid4()), True, False), - (str(uuid.uuid4()), False, False), - ([str(uuid.uuid4())], True, True), - ([str(uuid.uuid4())], False, True), - ([str(uuid.uuid4())], True, False), - ([str(uuid.uuid4())], False, False)]) +@pytest.mark.parametrize( + "storage, with_files, ignore_non_exist", + [ + (str(uuid.uuid4()), True, True), + (str(uuid.uuid4()), False, True), + (str(uuid.uuid4()), True, False), + (str(uuid.uuid4()), False, False), + ([str(uuid.uuid4())], True, True), + ([str(uuid.uuid4())], False, True), + ([str(uuid.uuid4())], True, False), + ([str(uuid.uuid4())], False, False), + ], +) def test_file_delete_with_dir(fs_local, storage, with_files, ignore_non_exist): - path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0] + path = ( + valid_path() + storage + if isinstance(storage, str) + else valid_path() + storage[0] + ) os.mkdir(path) if with_files is True: - file_path = path + '/' + str(uuid.uuid4()) + file_path = path + "/" + str(uuid.uuid4()) os.mknod(file_path) fs_local.file_delete(storage, ignore_non_exist) assert os.path.exists(path) is False -@pytest.mark.parametrize("storage", [ - (str(uuid.uuid4())), - ([str(uuid.uuid4())])]) +@pytest.mark.parametrize("storage", [(str(uuid.uuid4())), ([str(uuid.uuid4())])]) def test_file_delete_expect_exception(fs_local, storage): with pytest.raises(FsException) as excinfo: fs_local.file_delete(storage) @@ -349,10 +434,12 @@ def test_file_delete_expect_exception(fs_local, storage): assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND -@pytest.mark.parametrize("storage", [ - (str(uuid.uuid4())), - ([str(uuid.uuid4())])]) +@pytest.mark.parametrize("storage", [(str(uuid.uuid4())), ([str(uuid.uuid4())])]) def test_file_delete_no_exception(fs_local, storage): - path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0] + path = ( + valid_path() + storage + if isinstance(storage, str) + else valid_path() + storage[0] + ) fs_local.file_delete(storage, ignore_non_exist=True) assert os.path.exists(path) is False diff --git a/osm_common/tests/test_fsmongo.py b/osm_common/tests/test_fsmongo.py index 3b62569..6a8ec58 100644 --- a/osm_common/tests/test_fsmongo.py +++ b/osm_common/tests/test_fsmongo.py @@ -38,11 +38,11 @@ __author__ = "Eduardo Sousa " def valid_path(): - return tempfile.gettempdir() + '/' + return tempfile.gettempdir() + "/" def invalid_path(): - return '/#tweeter/' + return "/#tweeter/" @pytest.fixture(scope="function", params=[True, False]) @@ -56,15 +56,13 @@ def fs_mongo(request, monkeypatch): def mock_gridfs_constructor(a, b): pass - monkeypatch.setattr(MongoClient, '__init__', mock_mongoclient_constructor) - monkeypatch.setattr(MongoClient, '__getitem__', mock_mongoclient_getitem) - monkeypatch.setattr(GridFSBucket, '__init__', mock_gridfs_constructor) + monkeypatch.setattr(MongoClient, "__init__", mock_mongoclient_constructor) + monkeypatch.setattr(MongoClient, "__getitem__", mock_mongoclient_getitem) + monkeypatch.setattr(GridFSBucket, "__init__", mock_gridfs_constructor) fs = FsMongo(lock=request.param) - fs.fs_connect({ - 'path': valid_path(), - 'host': 'mongo', - 'port': 27017, - 'collection': 'files'}) + fs.fs_connect( + {"path": valid_path(), "host": "mongo", "port": 27017, "collection": "files"} + ) return fs @@ -73,26 +71,28 @@ def generic_fs_exception_message(message): def fs_connect_exception_message(path): - return "storage exception Invalid configuration param at '[storage]': path '{}' does not exist".format(path) + return "storage exception Invalid configuration param at '[storage]': path '{}' does not exist".format( + path + ) def file_open_file_not_found_exception(storage): - f = storage if isinstance(storage, str) else '/'.join(storage) + f = storage if isinstance(storage, str) else "/".join(storage) return "storage exception File {} does not exist".format(f) def file_open_io_exception(storage): - f = storage if isinstance(storage, str) else '/'.join(storage) + f = storage if isinstance(storage, str) else "/".join(storage) return "storage exception File {} cannot be opened".format(f) def dir_ls_not_a_directory_exception(storage): - f = storage if isinstance(storage, str) else '/'.join(storage) + f = storage if isinstance(storage, str) else "/".join(storage) return "storage exception File {} does not exist".format(f) def dir_ls_io_exception(storage): - f = storage if isinstance(storage, str) else '/'.join(storage) + f = storage if isinstance(storage, str) else "/".join(storage) return "storage exception File {} cannot be opened".format(f) @@ -102,14 +102,14 @@ def file_delete_exception_message(storage): def test_constructor_without_logger(): fs = FsMongo() - assert fs.logger == logging.getLogger('fs') + assert fs.logger == logging.getLogger("fs") assert fs.path is None assert fs.client is None assert fs.fs is None def test_constructor_with_logger(): - logger_name = 'fs_mongo' + logger_name = "fs_mongo" fs = FsMongo(logger_name=logger_name) assert fs.logger == logging.getLogger(logger_name) assert fs.path is None @@ -121,7 +121,7 @@ def test_get_params(fs_mongo, monkeypatch): def mock_gridfs_find(self, search_query, **kwargs): return [] - monkeypatch.setattr(GridFSBucket, 'find', mock_gridfs_find) + monkeypatch.setattr(GridFSBucket, "find", mock_gridfs_find) params = fs_mongo.get_params() assert len(params) == 2 assert "fs" in params @@ -130,79 +130,83 @@ def test_get_params(fs_mongo, monkeypatch): assert params["path"] == valid_path() -@pytest.mark.parametrize("config, exp_logger, exp_path", [ - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path(), - 'uri': 'mongo:27017', - 'collection': 'files' - }, - 'fs_mongo', valid_path() - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path(), - 'host': 'mongo', - 'port': 27017, - 'collection': 'files' - }, - 'fs_mongo', valid_path() - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path()[:-1], - 'uri': 'mongo:27017', - 'collection': 'files' - }, - 'fs_mongo', valid_path() - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path()[:-1], - 'host': 'mongo', - 'port': 27017, - 'collection': 'files' - }, - 'fs_mongo', valid_path() - ), - ( - { - 'path': valid_path(), - 'uri': 'mongo:27017', - 'collection': 'files' - }, - 'fs', valid_path() - ), - ( - { - 'path': valid_path(), - 'host': 'mongo', - 'port': 27017, - 'collection': 'files' - }, - 'fs', valid_path() - ), - ( - { - 'path': valid_path()[:-1], - 'uri': 'mongo:27017', - 'collection': 'files' - }, - 'fs', valid_path() - ), - ( - { - 'path': valid_path()[:-1], - 'host': 'mongo', - 'port': 27017, - 'collection': 'files' - }, - 'fs', valid_path() - )]) +@pytest.mark.parametrize( + "config, exp_logger, exp_path", + [ + ( + { + "logger_name": "fs_mongo", + "path": valid_path(), + "uri": "mongo:27017", + "collection": "files", + }, + "fs_mongo", + valid_path(), + ), + ( + { + "logger_name": "fs_mongo", + "path": valid_path(), + "host": "mongo", + "port": 27017, + "collection": "files", + }, + "fs_mongo", + valid_path(), + ), + ( + { + "logger_name": "fs_mongo", + "path": valid_path()[:-1], + "uri": "mongo:27017", + "collection": "files", + }, + "fs_mongo", + valid_path(), + ), + ( + { + "logger_name": "fs_mongo", + "path": valid_path()[:-1], + "host": "mongo", + "port": 27017, + "collection": "files", + }, + "fs_mongo", + valid_path(), + ), + ( + {"path": valid_path(), "uri": "mongo:27017", "collection": "files"}, + "fs", + valid_path(), + ), + ( + { + "path": valid_path(), + "host": "mongo", + "port": 27017, + "collection": "files", + }, + "fs", + valid_path(), + ), + ( + {"path": valid_path()[:-1], "uri": "mongo:27017", "collection": "files"}, + "fs", + valid_path(), + ), + ( + { + "path": valid_path()[:-1], + "host": "mongo", + "port": 27017, + "collection": "files", + }, + "fs", + valid_path(), + ), + ], +) def test_fs_connect_with_valid_config(config, exp_logger, exp_path): fs = FsMongo() fs.fs_connect(config) @@ -212,90 +216,81 @@ def test_fs_connect_with_valid_config(config, exp_logger, exp_path): assert type(fs.fs) == GridFSBucket -@pytest.mark.parametrize("config, exp_exception_message", [ - ( - { - 'logger_name': 'fs_mongo', - 'path': invalid_path(), - 'uri': 'mongo:27017', - 'collection': 'files' - }, - fs_connect_exception_message(invalid_path()) - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': invalid_path(), - 'host': 'mongo', - 'port': 27017, - 'collection': 'files' - }, - fs_connect_exception_message(invalid_path()) - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': invalid_path()[:-1], - 'uri': 'mongo:27017', - 'collection': 'files' - }, - fs_connect_exception_message(invalid_path()[:-1]) - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': invalid_path()[:-1], - 'host': 'mongo', - 'port': 27017, - 'collection': 'files' - }, - fs_connect_exception_message(invalid_path()[:-1]) - ), - ( - { - 'path': invalid_path(), - 'uri': 'mongo:27017', - 'collection': 'files' - }, - fs_connect_exception_message(invalid_path()) - ), - ( - { - 'path': invalid_path(), - 'host': 'mongo', - 'port': 27017, - 'collection': 'files' - }, - fs_connect_exception_message(invalid_path()) - ), - ( - { - 'path': invalid_path()[:-1], - 'uri': 'mongo:27017', - 'collection': 'files' - }, - fs_connect_exception_message(invalid_path()[:-1]) - ), - ( - { - 'path': invalid_path()[:-1], - 'host': 'mongo', - 'port': 27017, - 'collection': 'files' - }, - fs_connect_exception_message(invalid_path()[:-1]) - ), - ( - { - 'path': '/', - 'host': 'mongo', - 'port': 27017, - 'collection': 'files' - }, - generic_fs_exception_message( - "Invalid configuration param at '[storage]': path '/' is not writable" - ) - )]) +@pytest.mark.parametrize( + "config, exp_exception_message", + [ + ( + { + "logger_name": "fs_mongo", + "path": invalid_path(), + "uri": "mongo:27017", + "collection": "files", + }, + fs_connect_exception_message(invalid_path()), + ), + ( + { + "logger_name": "fs_mongo", + "path": invalid_path(), + "host": "mongo", + "port": 27017, + "collection": "files", + }, + fs_connect_exception_message(invalid_path()), + ), + ( + { + "logger_name": "fs_mongo", + "path": invalid_path()[:-1], + "uri": "mongo:27017", + "collection": "files", + }, + fs_connect_exception_message(invalid_path()[:-1]), + ), + ( + { + "logger_name": "fs_mongo", + "path": invalid_path()[:-1], + "host": "mongo", + "port": 27017, + "collection": "files", + }, + fs_connect_exception_message(invalid_path()[:-1]), + ), + ( + {"path": invalid_path(), "uri": "mongo:27017", "collection": "files"}, + fs_connect_exception_message(invalid_path()), + ), + ( + { + "path": invalid_path(), + "host": "mongo", + "port": 27017, + "collection": "files", + }, + fs_connect_exception_message(invalid_path()), + ), + ( + {"path": invalid_path()[:-1], "uri": "mongo:27017", "collection": "files"}, + fs_connect_exception_message(invalid_path()[:-1]), + ), + ( + { + "path": invalid_path()[:-1], + "host": "mongo", + "port": 27017, + "collection": "files", + }, + fs_connect_exception_message(invalid_path()[:-1]), + ), + ( + {"path": "/", "host": "mongo", "port": 27017, "collection": "files"}, + generic_fs_exception_message( + "Invalid configuration param at '[storage]': path '/' is not writable" + ), + ), + ], +) def test_fs_connect_with_invalid_path(config, exp_exception_message): fs = FsMongo() with pytest.raises(FsException) as excinfo: @@ -303,67 +298,59 @@ def test_fs_connect_with_invalid_path(config, exp_exception_message): assert str(excinfo.value) == exp_exception_message -@pytest.mark.parametrize("config, exp_exception_message", [ - ( - { - 'logger_name': 'fs_mongo', - 'uri': 'mongo:27017', - 'collection': 'files' - }, - "Missing parameter \"path\"" - ), - ( - { - 'logger_name': 'fs_mongo', - 'host': 'mongo', - 'port': 27017, - 'collection': 'files' - }, - "Missing parameter \"path\"" - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path(), - 'collection': 'files' - }, - "Missing parameters: \"uri\" or \"host\" + \"port\"" - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path(), - 'port': 27017, - 'collection': 'files' - }, - "Missing parameters: \"uri\" or \"host\" + \"port\"" - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path(), - 'host': 'mongo', - 'collection': 'files' - }, - "Missing parameters: \"uri\" or \"host\" + \"port\"" - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path(), - 'uri': 'mongo:27017' - }, - "Missing parameter \"collection\"" - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path(), - 'host': 'mongo', - 'port': 27017, - }, - "Missing parameter \"collection\"" - )]) +@pytest.mark.parametrize( + "config, exp_exception_message", + [ + ( + {"logger_name": "fs_mongo", "uri": "mongo:27017", "collection": "files"}, + 'Missing parameter "path"', + ), + ( + { + "logger_name": "fs_mongo", + "host": "mongo", + "port": 27017, + "collection": "files", + }, + 'Missing parameter "path"', + ), + ( + {"logger_name": "fs_mongo", "path": valid_path(), "collection": "files"}, + 'Missing parameters: "uri" or "host" + "port"', + ), + ( + { + "logger_name": "fs_mongo", + "path": valid_path(), + "port": 27017, + "collection": "files", + }, + 'Missing parameters: "uri" or "host" + "port"', + ), + ( + { + "logger_name": "fs_mongo", + "path": valid_path(), + "host": "mongo", + "collection": "files", + }, + 'Missing parameters: "uri" or "host" + "port"', + ), + ( + {"logger_name": "fs_mongo", "path": valid_path(), "uri": "mongo:27017"}, + 'Missing parameter "collection"', + ), + ( + { + "logger_name": "fs_mongo", + "path": valid_path(), + "host": "mongo", + "port": 27017, + }, + 'Missing parameter "collection"', + ), + ], +) def test_fs_connect_with_missing_parameters(config, exp_exception_message): fs = FsMongo() with pytest.raises(FsException) as excinfo: @@ -371,31 +358,37 @@ def test_fs_connect_with_missing_parameters(config, exp_exception_message): assert str(excinfo.value) == generic_fs_exception_message(exp_exception_message) -@pytest.mark.parametrize("config, exp_exception_message", [ - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path(), - 'uri': 'mongo:27017', - 'collection': 'files' - }, - "MongoClient crashed" - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path(), - 'host': 'mongo', - 'port': 27017, - 'collection': 'files' - }, - "MongoClient crashed" - )]) -def test_fs_connect_with_invalid_mongoclient(config, exp_exception_message, monkeypatch): +@pytest.mark.parametrize( + "config, exp_exception_message", + [ + ( + { + "logger_name": "fs_mongo", + "path": valid_path(), + "uri": "mongo:27017", + "collection": "files", + }, + "MongoClient crashed", + ), + ( + { + "logger_name": "fs_mongo", + "path": valid_path(), + "host": "mongo", + "port": 27017, + "collection": "files", + }, + "MongoClient crashed", + ), + ], +) +def test_fs_connect_with_invalid_mongoclient( + config, exp_exception_message, monkeypatch +): def generate_exception(a, b, c=None): raise Exception(exp_exception_message) - monkeypatch.setattr(MongoClient, '__init__', generate_exception) + monkeypatch.setattr(MongoClient, "__init__", generate_exception) fs = FsMongo() with pytest.raises(FsException) as excinfo: @@ -403,35 +396,41 @@ def test_fs_connect_with_invalid_mongoclient(config, exp_exception_message, monk assert str(excinfo.value) == generic_fs_exception_message(exp_exception_message) -@pytest.mark.parametrize("config, exp_exception_message", [ - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path(), - 'uri': 'mongo:27017', - 'collection': 'files' - }, - "Collection unavailable" - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path(), - 'host': 'mongo', - 'port': 27017, - 'collection': 'files' - }, - "Collection unavailable" - )]) -def test_fs_connect_with_invalid_mongo_collection(config, exp_exception_message, monkeypatch): +@pytest.mark.parametrize( + "config, exp_exception_message", + [ + ( + { + "logger_name": "fs_mongo", + "path": valid_path(), + "uri": "mongo:27017", + "collection": "files", + }, + "Collection unavailable", + ), + ( + { + "logger_name": "fs_mongo", + "path": valid_path(), + "host": "mongo", + "port": 27017, + "collection": "files", + }, + "Collection unavailable", + ), + ], +) +def test_fs_connect_with_invalid_mongo_collection( + config, exp_exception_message, monkeypatch +): def mock_mongoclient_constructor(a, b, c=None): pass def generate_exception(a, b): raise Exception(exp_exception_message) - monkeypatch.setattr(MongoClient, '__init__', mock_mongoclient_constructor) - monkeypatch.setattr(MongoClient, '__getitem__', generate_exception) + monkeypatch.setattr(MongoClient, "__init__", mock_mongoclient_constructor) + monkeypatch.setattr(MongoClient, "__getitem__", generate_exception) fs = FsMongo() with pytest.raises(FsException) as excinfo: @@ -439,27 +438,33 @@ def test_fs_connect_with_invalid_mongo_collection(config, exp_exception_message, assert str(excinfo.value) == generic_fs_exception_message(exp_exception_message) -@pytest.mark.parametrize("config, exp_exception_message", [ - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path(), - 'uri': 'mongo:27017', - 'collection': 'files' - }, - "GridFsBucket crashed" - ), - ( - { - 'logger_name': 'fs_mongo', - 'path': valid_path(), - 'host': 'mongo', - 'port': 27017, - 'collection': 'files' - }, - "GridFsBucket crashed" - )]) -def test_fs_connect_with_invalid_gridfsbucket(config, exp_exception_message, monkeypatch): +@pytest.mark.parametrize( + "config, exp_exception_message", + [ + ( + { + "logger_name": "fs_mongo", + "path": valid_path(), + "uri": "mongo:27017", + "collection": "files", + }, + "GridFsBucket crashed", + ), + ( + { + "logger_name": "fs_mongo", + "path": valid_path(), + "host": "mongo", + "port": 27017, + "collection": "files", + }, + "GridFsBucket crashed", + ), + ], +) +def test_fs_connect_with_invalid_gridfsbucket( + config, exp_exception_message, monkeypatch +): def mock_mongoclient_constructor(a, b, c=None): pass @@ -469,9 +474,9 @@ def test_fs_connect_with_invalid_gridfsbucket(config, exp_exception_message, mon def generate_exception(a, b): raise Exception(exp_exception_message) - monkeypatch.setattr(MongoClient, '__init__', mock_mongoclient_constructor) - monkeypatch.setattr(MongoClient, '__getitem__', mock_mongoclient_getitem) - monkeypatch.setattr(GridFSBucket, '__init__', generate_exception) + monkeypatch.setattr(MongoClient, "__init__", mock_mongoclient_constructor) + monkeypatch.setattr(MongoClient, "__getitem__", mock_mongoclient_getitem) + monkeypatch.setattr(GridFSBucket, "__init__", generate_exception) fs = FsMongo() with pytest.raises(FsException) as excinfo: @@ -498,53 +503,55 @@ class FakeCursor: class FakeFS: - directory_metadata = {'type': 'dir', 'permissions': 509} - file_metadata = {'type': 'file', 'permissions': 436} - symlink_metadata = {'type': 'sym', 'permissions': 511} + directory_metadata = {"type": "dir", "permissions": 509} + file_metadata = {"type": "file", "permissions": 436} + symlink_metadata = {"type": "sym", "permissions": 511} tar_info = { 1: { - "cursor": FakeCursor(1, 'example_tar', directory_metadata), + "cursor": FakeCursor(1, "example_tar", directory_metadata), "metadata": directory_metadata, - "stream_content": b'', + "stream_content": b"", "stream_content_bad": b"Something", - "path": './tmp/example_tar', + "path": "./tmp/example_tar", }, 2: { - "cursor": FakeCursor(2, 'example_tar/directory', directory_metadata), + "cursor": FakeCursor(2, "example_tar/directory", directory_metadata), "metadata": directory_metadata, - "stream_content": b'', + "stream_content": b"", "stream_content_bad": b"Something", - "path": './tmp/example_tar/directory', + "path": "./tmp/example_tar/directory", }, 3: { - "cursor": FakeCursor(3, 'example_tar/symlinks', directory_metadata), + "cursor": FakeCursor(3, "example_tar/symlinks", directory_metadata), "metadata": directory_metadata, - "stream_content": b'', + "stream_content": b"", "stream_content_bad": b"Something", - "path": './tmp/example_tar/symlinks', + "path": "./tmp/example_tar/symlinks", }, 4: { - "cursor": FakeCursor(4, 'example_tar/directory/file', file_metadata), + "cursor": FakeCursor(4, "example_tar/directory/file", file_metadata), "metadata": file_metadata, "stream_content": b"Example test", "stream_content_bad": b"Example test2", - "path": './tmp/example_tar/directory/file', + "path": "./tmp/example_tar/directory/file", }, 5: { - "cursor": FakeCursor(5, 'example_tar/symlinks/file_link', symlink_metadata), + "cursor": FakeCursor(5, "example_tar/symlinks/file_link", symlink_metadata), "metadata": symlink_metadata, "stream_content": b"../directory/file", "stream_content_bad": b"", - "path": './tmp/example_tar/symlinks/file_link', + "path": "./tmp/example_tar/symlinks/file_link", }, 6: { - "cursor": FakeCursor(6, 'example_tar/symlinks/directory_link', symlink_metadata), + "cursor": FakeCursor( + 6, "example_tar/symlinks/directory_link", symlink_metadata + ), "metadata": symlink_metadata, "stream_content": b"../directory/", "stream_content_bad": b"", - "path": './tmp/example_tar/symlinks/directory_link', - } + "path": "./tmp/example_tar/symlinks/directory_link", + }, } def upload_from_stream(self, f, stream, metadata=None): @@ -635,7 +642,9 @@ def test_upload_mongo_fs(): os.mkdir(path) os.mkdir("{}example_local".format(path)) os.mkdir("{}example_local/directory".format(path)) - with open("{}example_local/directory/test_file".format(path), "w+") as test_file: + with open( + "{}example_local/directory/test_file".format(path), "w+" + ) as test_file: test_file.write(file_content) fs.reverse_sync("example_local") diff --git a/osm_common/tests/test_msgbase.py b/osm_common/tests/test_msgbase.py index 9c6c3c5..665968e 100644 --- a/osm_common/tests/test_msgbase.py +++ b/osm_common/tests/test_msgbase.py @@ -49,26 +49,36 @@ def test_disconnect(msg_base): def test_write(msg_base): with pytest.raises(MsgException) as excinfo: msg_base.write("test", "test", "test") - assert str(excinfo.value).startswith(exception_message("Method 'write' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'write' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR def test_read(msg_base): with pytest.raises(MsgException) as excinfo: msg_base.read("test") - assert str(excinfo.value).startswith(exception_message("Method 'read' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'read' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR def test_aiowrite(msg_base, event_loop): with pytest.raises(MsgException) as excinfo: - event_loop.run_until_complete(msg_base.aiowrite("test", "test", "test", event_loop)) - assert str(excinfo.value).startswith(exception_message("Method 'aiowrite' not implemented")) + event_loop.run_until_complete( + msg_base.aiowrite("test", "test", "test", event_loop) + ) + assert str(excinfo.value).startswith( + exception_message("Method 'aiowrite' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR def test_aioread(msg_base, event_loop): with pytest.raises(MsgException) as excinfo: event_loop.run_until_complete(msg_base.aioread("test", event_loop)) - assert str(excinfo.value).startswith(exception_message("Method 'aioread' not implemented")) + assert str(excinfo.value).startswith( + exception_message("Method 'aioread' not implemented") + ) assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR diff --git a/osm_common/tests/test_msglocal.py b/osm_common/tests/test_msglocal.py index 41f6eb8..9548885 100644 --- a/osm_common/tests/test_msglocal.py +++ b/osm_common/tests/test_msglocal.py @@ -36,11 +36,11 @@ __author__ = "Eduardo Sousa " def valid_path(): - return tempfile.gettempdir() + '/' + return tempfile.gettempdir() + "/" def invalid_path(): - return '/#tweeter/' + return "/#tweeter/" @pytest.fixture(scope="function", params=[True, False]) @@ -90,7 +90,7 @@ def empty_exception_message(): def test_constructor(): msg = MsgLocal() - assert msg.logger == logging.getLogger('msg') + assert msg.logger == logging.getLogger("msg") assert msg.path is None assert len(msg.files_read) == 0 assert len(msg.files_write) == 0 @@ -98,7 +98,7 @@ def test_constructor(): def test_constructor_with_logger(): - logger_name = 'msg_local' + logger_name = "msg_local" msg = MsgLocal(logger_name=logger_name) assert msg.logger == logging.getLogger(logger_name) assert msg.path is None @@ -107,15 +107,31 @@ def test_constructor_with_logger(): assert len(msg.buffer) == 0 -@pytest.mark.parametrize("config, logger_name, path", [ - ({"logger_name": "msg_local", "path": valid_path()}, "msg_local", valid_path()), - ({"logger_name": "msg_local", "path": valid_path()[:-1]}, "msg_local", valid_path()), - ({"logger_name": "msg_local", "path": valid_path() + "test_it/"}, "msg_local", valid_path() + "test_it/"), - ({"logger_name": "msg_local", "path": valid_path() + "test_it"}, "msg_local", valid_path() + "test_it/"), - ({"path": valid_path()}, "msg", valid_path()), - ({"path": valid_path()[:-1]}, "msg", valid_path()), - ({"path": valid_path() + "test_it/"}, "msg", valid_path() + "test_it/"), - ({"path": valid_path() + "test_it"}, "msg", valid_path() + "test_it/")]) +@pytest.mark.parametrize( + "config, logger_name, path", + [ + ({"logger_name": "msg_local", "path": valid_path()}, "msg_local", valid_path()), + ( + {"logger_name": "msg_local", "path": valid_path()[:-1]}, + "msg_local", + valid_path(), + ), + ( + {"logger_name": "msg_local", "path": valid_path() + "test_it/"}, + "msg_local", + valid_path() + "test_it/", + ), + ( + {"logger_name": "msg_local", "path": valid_path() + "test_it"}, + "msg_local", + valid_path() + "test_it/", + ), + ({"path": valid_path()}, "msg", valid_path()), + ({"path": valid_path()[:-1]}, "msg", valid_path()), + ({"path": valid_path() + "test_it/"}, "msg", valid_path() + "test_it/"), + ({"path": valid_path() + "test_it"}, "msg", valid_path() + "test_it/"), + ], +) def test_connect(msg_local, config, logger_name, path): msg_local.connect(config) assert msg_local.logger == logging.getLogger(logger_name) @@ -125,9 +141,13 @@ def test_connect(msg_local, config, logger_name, path): assert len(msg_local.buffer) == 0 -@pytest.mark.parametrize("config", [ - ({"logger_name": "msg_local", "path": invalid_path()}), - ({"path": invalid_path()})]) +@pytest.mark.parametrize( + "config", + [ + ({"logger_name": "msg_local", "path": invalid_path()}), + ({"path": invalid_path()}), + ], +) def test_connect_with_exception(msg_local, config): with pytest.raises(MsgException) as excinfo: msg_local.connect(config) @@ -146,8 +166,8 @@ def test_disconnect(msg_local_config): def test_disconnect_with_read(msg_local_config): - msg_local_config.read('topic1', blocks=False) - msg_local_config.read('topic2', blocks=False) + msg_local_config.read("topic1", blocks=False) + msg_local_config.read("topic2", blocks=False) files_read = msg_local_config.files_read.copy() files_write = msg_local_config.files_write.copy() msg_local_config.disconnect() @@ -164,14 +184,14 @@ def test_disconnect_with_write(msg_local_with_data): for f in files_read.values(): assert f.closed - + for f in files_write.values(): assert f.closed def test_disconnect_with_read_and_write(msg_local_with_data): - msg_local_with_data.read('topic1', blocks=False) - msg_local_with_data.read('topic2', blocks=False) + msg_local_with_data.read("topic1", blocks=False) + msg_local_with_data.read("topic2", blocks=False) files_read = msg_local_with_data.files_read.copy() files_write = msg_local_with_data.files_write.copy() @@ -182,77 +202,93 @@ def test_disconnect_with_read_and_write(msg_local_with_data): assert f.closed -@pytest.mark.parametrize("topic, key, msg", [ - ("test_topic", "test_key", "test_msg"), - ("test", "test_key", "test_msg"), - ("test_topic", "test", "test_msg"), - ("test_topic", "test_key", "test"), - ("test_topic", "test_list", ["a", "b", "c"]), - ("test_topic", "test_tuple", ("c", "b", "a")), - ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}), - ("test_topic", "test_number", 123), - ("test_topic", "test_float", 1.23), - ("test_topic", "test_boolean", True), - ("test_topic", "test_none", None)]) +@pytest.mark.parametrize( + "topic, key, msg", + [ + ("test_topic", "test_key", "test_msg"), + ("test", "test_key", "test_msg"), + ("test_topic", "test", "test_msg"), + ("test_topic", "test_key", "test"), + ("test_topic", "test_list", ["a", "b", "c"]), + ("test_topic", "test_tuple", ("c", "b", "a")), + ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}), + ("test_topic", "test_number", 123), + ("test_topic", "test_float", 1.23), + ("test_topic", "test_boolean", True), + ("test_topic", "test_none", None), + ], +) def test_write(msg_local_config, topic, key, msg): file_path = msg_local_config.path + topic msg_local_config.write(topic, key, msg) assert os.path.exists(file_path) - with open(file_path, 'r') as stream: - assert yaml.safe_load(stream) == {key: msg if not isinstance(msg, tuple) else list(msg)} - - -@pytest.mark.parametrize("topic, key, msg, times", [ - ("test_topic", "test_key", "test_msg", 2), - ("test", "test_key", "test_msg", 3), - ("test_topic", "test", "test_msg", 4), - ("test_topic", "test_key", "test", 2), - ("test_topic", "test_list", ["a", "b", "c"], 3), - ("test_topic", "test_tuple", ("c", "b", "a"), 4), - ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}, 2), - ("test_topic", "test_number", 123, 3), - ("test_topic", "test_float", 1.23, 4), - ("test_topic", "test_boolean", True, 2), - ("test_topic", "test_none", None, 3)]) + with open(file_path, "r") as stream: + assert yaml.safe_load(stream) == { + key: msg if not isinstance(msg, tuple) else list(msg) + } + + +@pytest.mark.parametrize( + "topic, key, msg, times", + [ + ("test_topic", "test_key", "test_msg", 2), + ("test", "test_key", "test_msg", 3), + ("test_topic", "test", "test_msg", 4), + ("test_topic", "test_key", "test", 2), + ("test_topic", "test_list", ["a", "b", "c"], 3), + ("test_topic", "test_tuple", ("c", "b", "a"), 4), + ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}, 2), + ("test_topic", "test_number", 123, 3), + ("test_topic", "test_float", 1.23, 4), + ("test_topic", "test_boolean", True, 2), + ("test_topic", "test_none", None, 3), + ], +) def test_write_with_multiple_calls(msg_local_config, topic, key, msg, times): file_path = msg_local_config.path + topic - + for _ in range(times): msg_local_config.write(topic, key, msg) assert os.path.exists(file_path) - with open(file_path, 'r') as stream: + with open(file_path, "r") as stream: for _ in range(times): data = stream.readline() - assert yaml.safe_load(data) == {key: msg if not isinstance(msg, tuple) else list(msg)} + assert yaml.safe_load(data) == { + key: msg if not isinstance(msg, tuple) else list(msg) + } def test_write_exception(msg_local_config): msg_local_config.files_write = MagicMock() msg_local_config.files_write.__contains__.side_effect = Exception() - + with pytest.raises(MsgException) as excinfo: msg_local_config.write("test", "test", "test") assert str(excinfo.value).startswith(empty_exception_message()) assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR -@pytest.mark.parametrize("topics, datas", [ - (["topic"], [{"key": "value"}]), - (["topic1"], [{"key": "value"}]), - (["topic2"], [{"key": "value"}]), - (["topic", "topic1"], [{"key": "value"}]), - (["topic", "topic2"], [{"key": "value"}]), - (["topic1", "topic2"], [{"key": "value"}]), - (["topic", "topic1", "topic2"], [{"key": "value"}]), - (["topic"], [{"key": "value"}, {"key1": "value1"}]), - (["topic1"], [{"key": "value"}, {"key1": "value1"}]), - (["topic2"], [{"key": "value"}, {"key1": "value1"}]), - (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]), - (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]), - (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]), - (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}])]) +@pytest.mark.parametrize( + "topics, datas", + [ + (["topic"], [{"key": "value"}]), + (["topic1"], [{"key": "value"}]), + (["topic2"], [{"key": "value"}]), + (["topic", "topic1"], [{"key": "value"}]), + (["topic", "topic2"], [{"key": "value"}]), + (["topic1", "topic2"], [{"key": "value"}]), + (["topic", "topic1", "topic2"], [{"key": "value"}]), + (["topic"], [{"key": "value"}, {"key1": "value1"}]), + (["topic1"], [{"key": "value"}, {"key1": "value1"}]), + (["topic2"], [{"key": "value"}, {"key1": "value1"}]), + (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]), + (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]), + (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]), + (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]), + ], +) def test_read(msg_local_with_data, topics, datas): def write_to_topic(topics, datas): # Allow msglocal to block while waiting @@ -282,21 +318,25 @@ def test_read(msg_local_with_data, topics, datas): t.join() -@pytest.mark.parametrize("topics, datas", [ - (["topic"], [{"key": "value"}]), - (["topic1"], [{"key": "value"}]), - (["topic2"], [{"key": "value"}]), - (["topic", "topic1"], [{"key": "value"}]), - (["topic", "topic2"], [{"key": "value"}]), - (["topic1", "topic2"], [{"key": "value"}]), - (["topic", "topic1", "topic2"], [{"key": "value"}]), - (["topic"], [{"key": "value"}, {"key1": "value1"}]), - (["topic1"], [{"key": "value"}, {"key1": "value1"}]), - (["topic2"], [{"key": "value"}, {"key1": "value1"}]), - (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]), - (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]), - (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]), - (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}])]) +@pytest.mark.parametrize( + "topics, datas", + [ + (["topic"], [{"key": "value"}]), + (["topic1"], [{"key": "value"}]), + (["topic2"], [{"key": "value"}]), + (["topic", "topic1"], [{"key": "value"}]), + (["topic", "topic2"], [{"key": "value"}]), + (["topic1", "topic2"], [{"key": "value"}]), + (["topic", "topic1", "topic2"], [{"key": "value"}]), + (["topic"], [{"key": "value"}, {"key1": "value1"}]), + (["topic1"], [{"key": "value"}, {"key1": "value1"}]), + (["topic2"], [{"key": "value"}, {"key1": "value1"}]), + (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]), + (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]), + (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]), + (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]), + ], +) def test_read_non_block(msg_local_with_data, topics, datas): def write_to_topic(topics, datas): for topic in topics: @@ -316,7 +356,9 @@ def test_read_non_block(msg_local_with_data, topics, datas): for topic in topics: for data in datas: - recv_topic, recv_key, recv_msg = msg_local_with_data.read(topic, blocks=False) + recv_topic, recv_key, recv_msg = msg_local_with_data.read( + topic, blocks=False + ) key = list(data.keys())[0] val = data[key] assert recv_topic == topic @@ -324,21 +366,25 @@ def test_read_non_block(msg_local_with_data, topics, datas): assert recv_msg == val -@pytest.mark.parametrize("topics, datas", [ - (["topic"], [{"key": "value"}]), - (["topic1"], [{"key": "value"}]), - (["topic2"], [{"key": "value"}]), - (["topic", "topic1"], [{"key": "value"}]), - (["topic", "topic2"], [{"key": "value"}]), - (["topic1", "topic2"], [{"key": "value"}]), - (["topic", "topic1", "topic2"], [{"key": "value"}]), - (["topic"], [{"key": "value"}, {"key1": "value1"}]), - (["topic1"], [{"key": "value"}, {"key1": "value1"}]), - (["topic2"], [{"key": "value"}, {"key1": "value1"}]), - (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]), - (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]), - (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]), - (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}])]) +@pytest.mark.parametrize( + "topics, datas", + [ + (["topic"], [{"key": "value"}]), + (["topic1"], [{"key": "value"}]), + (["topic2"], [{"key": "value"}]), + (["topic", "topic1"], [{"key": "value"}]), + (["topic", "topic2"], [{"key": "value"}]), + (["topic1", "topic2"], [{"key": "value"}]), + (["topic", "topic1", "topic2"], [{"key": "value"}]), + (["topic"], [{"key": "value"}, {"key1": "value1"}]), + (["topic1"], [{"key": "value"}, {"key1": "value1"}]), + (["topic2"], [{"key": "value"}, {"key1": "value1"}]), + (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]), + (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]), + (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]), + (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]), + ], +) def test_read_non_block_none(msg_local_with_data, topics, datas): def write_to_topic(topics, datas): time.sleep(2) @@ -347,6 +393,7 @@ def test_read_non_block_none(msg_local_with_data, topics, datas): with open(msg_local_with_data.path + topic, "a+") as fp: yaml.safe_dump(data, fp, default_flow_style=True, width=20000) fp.flush() + # If file is not opened first, the messages written won't be seen for topic in topics: if topic not in msg_local_with_data.files_read: @@ -360,9 +407,7 @@ def test_read_non_block_none(msg_local_with_data, topics, datas): t.join() -@pytest.mark.parametrize("blocks", [ - (True), - (False)]) +@pytest.mark.parametrize("blocks", [(True), (False)]) def test_read_exception(msg_local_with_data, blocks): msg_local_with_data.files_read = MagicMock() msg_local_with_data.files_read.__contains__.side_effect = Exception() @@ -373,21 +418,25 @@ def test_read_exception(msg_local_with_data, blocks): assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR -@pytest.mark.parametrize("topics, datas", [ - (["topic"], [{"key": "value"}]), - (["topic1"], [{"key": "value"}]), - (["topic2"], [{"key": "value"}]), - (["topic", "topic1"], [{"key": "value"}]), - (["topic", "topic2"], [{"key": "value"}]), - (["topic1", "topic2"], [{"key": "value"}]), - (["topic", "topic1", "topic2"], [{"key": "value"}]), - (["topic"], [{"key": "value"}, {"key1": "value1"}]), - (["topic1"], [{"key": "value"}, {"key1": "value1"}]), - (["topic2"], [{"key": "value"}, {"key1": "value1"}]), - (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]), - (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]), - (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]), - (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}])]) +@pytest.mark.parametrize( + "topics, datas", + [ + (["topic"], [{"key": "value"}]), + (["topic1"], [{"key": "value"}]), + (["topic2"], [{"key": "value"}]), + (["topic", "topic1"], [{"key": "value"}]), + (["topic", "topic2"], [{"key": "value"}]), + (["topic1", "topic2"], [{"key": "value"}]), + (["topic", "topic1", "topic2"], [{"key": "value"}]), + (["topic"], [{"key": "value"}, {"key1": "value1"}]), + (["topic1"], [{"key": "value"}, {"key1": "value1"}]), + (["topic2"], [{"key": "value"}, {"key1": "value1"}]), + (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]), + (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]), + (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]), + (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]), + ], +) def test_aioread(msg_local_with_data, event_loop, topics, datas): def write_to_topic(topics, datas): time.sleep(2) @@ -396,6 +445,7 @@ def test_aioread(msg_local_with_data, event_loop, topics, datas): with open(msg_local_with_data.path + topic, "a+") as fp: yaml.safe_dump(data, fp, default_flow_style=True, width=20000) fp.flush() + # If file is not opened first, the messages written won't be seen for topic in topics: if topic not in msg_local_with_data.files_read: @@ -405,7 +455,9 @@ def test_aioread(msg_local_with_data, event_loop, topics, datas): t.start() for topic in topics: for data in datas: - recv = event_loop.run_until_complete(msg_local_with_data.aioread(topic, event_loop)) + recv = event_loop.run_until_complete( + msg_local_with_data.aioread(topic, event_loop) + ) recv_topic, recv_key, recv_msg = recv key = list(data.keys())[0] val = data[key] @@ -435,55 +487,69 @@ def test_aioread_general_exception(msg_local_with_data, event_loop): assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR -@pytest.mark.parametrize("topic, key, msg", [ - ("test_topic", "test_key", "test_msg"), - ("test", "test_key", "test_msg"), - ("test_topic", "test", "test_msg"), - ("test_topic", "test_key", "test"), - ("test_topic", "test_list", ["a", "b", "c"]), - ("test_topic", "test_tuple", ("c", "b", "a")), - ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}), - ("test_topic", "test_number", 123), - ("test_topic", "test_float", 1.23), - ("test_topic", "test_boolean", True), - ("test_topic", "test_none", None)]) +@pytest.mark.parametrize( + "topic, key, msg", + [ + ("test_topic", "test_key", "test_msg"), + ("test", "test_key", "test_msg"), + ("test_topic", "test", "test_msg"), + ("test_topic", "test_key", "test"), + ("test_topic", "test_list", ["a", "b", "c"]), + ("test_topic", "test_tuple", ("c", "b", "a")), + ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}), + ("test_topic", "test_number", 123), + ("test_topic", "test_float", 1.23), + ("test_topic", "test_boolean", True), + ("test_topic", "test_none", None), + ], +) def test_aiowrite(msg_local_config, event_loop, topic, key, msg): file_path = msg_local_config.path + topic event_loop.run_until_complete(msg_local_config.aiowrite(topic, key, msg)) assert os.path.exists(file_path) - with open(file_path, 'r') as stream: - assert yaml.safe_load(stream) == {key: msg if not isinstance(msg, tuple) else list(msg)} - - -@pytest.mark.parametrize("topic, key, msg, times", [ - ("test_topic", "test_key", "test_msg", 2), - ("test", "test_key", "test_msg", 3), - ("test_topic", "test", "test_msg", 4), - ("test_topic", "test_key", "test", 2), - ("test_topic", "test_list", ["a", "b", "c"], 3), - ("test_topic", "test_tuple", ("c", "b", "a"), 4), - ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}, 2), - ("test_topic", "test_number", 123, 3), - ("test_topic", "test_float", 1.23, 4), - ("test_topic", "test_boolean", True, 2), - ("test_topic", "test_none", None, 3)]) -def test_aiowrite_with_multiple_calls(msg_local_config, event_loop, topic, key, msg, times): + with open(file_path, "r") as stream: + assert yaml.safe_load(stream) == { + key: msg if not isinstance(msg, tuple) else list(msg) + } + + +@pytest.mark.parametrize( + "topic, key, msg, times", + [ + ("test_topic", "test_key", "test_msg", 2), + ("test", "test_key", "test_msg", 3), + ("test_topic", "test", "test_msg", 4), + ("test_topic", "test_key", "test", 2), + ("test_topic", "test_list", ["a", "b", "c"], 3), + ("test_topic", "test_tuple", ("c", "b", "a"), 4), + ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}, 2), + ("test_topic", "test_number", 123, 3), + ("test_topic", "test_float", 1.23, 4), + ("test_topic", "test_boolean", True, 2), + ("test_topic", "test_none", None, 3), + ], +) +def test_aiowrite_with_multiple_calls( + msg_local_config, event_loop, topic, key, msg, times +): file_path = msg_local_config.path + topic for _ in range(times): event_loop.run_until_complete(msg_local_config.aiowrite(topic, key, msg)) assert os.path.exists(file_path) - with open(file_path, 'r') as stream: + with open(file_path, "r") as stream: for _ in range(times): data = stream.readline() - assert yaml.safe_load(data) == {key: msg if not isinstance(msg, tuple) else list(msg)} + assert yaml.safe_load(data) == { + key: msg if not isinstance(msg, tuple) else list(msg) + } def test_aiowrite_exception(msg_local_config, event_loop): msg_local_config.files_write = MagicMock() msg_local_config.files_write.__contains__.side_effect = Exception() - + with pytest.raises(MsgException) as excinfo: event_loop.run_until_complete(msg_local_config.aiowrite("test", "test", "test")) assert str(excinfo.value).startswith(empty_exception_message()) diff --git a/osm_common/tests/test_sol004_package.py b/osm_common/tests/test_sol004_package.py index b9f13af..dc71d91 100644 --- a/osm_common/tests/test_sol004_package.py +++ b/osm_common/tests/test_sol004_package.py @@ -28,82 +28,124 @@ import unittest class SOL004ValidatorTest(unittest.TestCase): def test_get_package_file_hash_algorithm_from_manifest_with_metadata_dir(self): - package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf') - algorithm = package.get_package_file_hash_algorithm_from_manifest('Scripts/charms/simple/src/charm.py') - self.assertEqual(algorithm, 'SHA-256') + package = SOL004Package( + "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" + ) + algorithm = package.get_package_file_hash_algorithm_from_manifest( + "Scripts/charms/simple/src/charm.py" + ) + self.assertEqual(algorithm, "SHA-256") def test_get_package_file_hash_algorithm_from_manifest_without_metadata_dir(self): - package = SOL004Package('osm_common/tests/packages/native_charm_without_metadata_dir_vnf') - algorithm = package.get_package_file_hash_algorithm_from_manifest('Scripts/charms/simple/src/charm.py') - self.assertEqual(algorithm, 'SHA-256') + package = SOL004Package( + "osm_common/tests/packages/native_charm_without_metadata_dir_vnf" + ) + algorithm = package.get_package_file_hash_algorithm_from_manifest( + "Scripts/charms/simple/src/charm.py" + ) + self.assertEqual(algorithm, "SHA-256") def test_get_package_file_hash_algorithm_from_manifest_on_non_existent_file(self): - package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf') + package = SOL004Package( + "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" + ) with self.assertRaises(SOL004PackageException): - package.get_package_file_hash_algorithm_from_manifest('Non/Existing/file') + package.get_package_file_hash_algorithm_from_manifest("Non/Existing/file") def test_get_package_file_hash_digest_from_manifest_with_metadata_dir(self): - package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf') - digest = package.get_package_file_hash_digest_from_manifest('Scripts/charms/simple/src/charm.py') - self.assertEqual(digest, '7895f7b9e1b7ed5b5bcd64398950ca95b456d7fc973334351474eed466c2f480') + package = SOL004Package( + "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" + ) + digest = package.get_package_file_hash_digest_from_manifest( + "Scripts/charms/simple/src/charm.py" + ) + self.assertEqual( + digest, "ea72f897a966e6174ed9164fabc3c500df5a2f712eb6b22ab2408afb07d04d14" + ) def test_get_package_file_hash_digest_from_manifest_without_metadata_dir(self): - package = SOL004Package('osm_common/tests/packages/native_charm_without_metadata_dir_vnf') - digest = package.get_package_file_hash_digest_from_manifest('Scripts/charms/simple/src/charm.py') - self.assertEqual(digest, '7895f7b9e1b7ed5b5bcd64398950ca95b456d7fc973334351474eed466c2f480') + package = SOL004Package( + "osm_common/tests/packages/native_charm_without_metadata_dir_vnf" + ) + digest = package.get_package_file_hash_digest_from_manifest( + "Scripts/charms/simple/src/charm.py" + ) + self.assertEqual( + digest, "ea72f897a966e6174ed9164fabc3c500df5a2f712eb6b22ab2408afb07d04d14" + ) def test_get_package_file_hash_digest_from_manifest_on_non_existent_file(self): - package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf') + package = SOL004Package( + "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" + ) with self.assertRaises(SOL004PackageException): - package.get_package_file_hash_digest_from_manifest('Non/Existing/file') + package.get_package_file_hash_digest_from_manifest("Non/Existing/file") - def test_get_package_file_hash_digest_from_manifest_on_non_existing_hash_entry(self): - package = SOL004Package('osm_common/tests/packages/invalid_package_vnf') + def test_get_package_file_hash_digest_from_manifest_on_non_existing_hash_entry( + self, + ): + package = SOL004Package("osm_common/tests/packages/invalid_package_vnf") with self.assertRaises(SOL004PackageException): - package.get_package_file_hash_digest_from_manifest('Scripts/charms/simple/hooks/upgrade-charm') + package.get_package_file_hash_digest_from_manifest( + "Scripts/charms/simple/hooks/upgrade-charm" + ) def test_validate_package_file_hash_with_metadata_dir(self): - package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf') - package.validate_package_file_hash('Scripts/charms/simple/src/charm.py') + package = SOL004Package( + "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" + ) + package.validate_package_file_hash("Scripts/charms/simple/src/charm.py") def test_validate_package_file_hash_without_metadata_dir(self): - package = SOL004Package('osm_common/tests/packages/native_charm_without_metadata_dir_vnf') - package.validate_package_file_hash('Scripts/charms/simple/src/charm.py') + package = SOL004Package( + "osm_common/tests/packages/native_charm_without_metadata_dir_vnf" + ) + package.validate_package_file_hash("Scripts/charms/simple/src/charm.py") def test_validate_package_file_hash_on_non_existing_file(self): - package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf') + package = SOL004Package( + "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" + ) with self.assertRaises(SOL004PackageException): - package.validate_package_file_hash('Non/Existing/file') + package.validate_package_file_hash("Non/Existing/file") def test_validate_package_file_hash_on_wrong_manifest_hash(self): - package = SOL004Package('osm_common/tests/packages/invalid_package_vnf') + package = SOL004Package("osm_common/tests/packages/invalid_package_vnf") with self.assertRaises(SOL004PackageException): - package.validate_package_file_hash('Scripts/charms/simple/hooks/start') + package.validate_package_file_hash("Scripts/charms/simple/hooks/start") def test_validate_package_file_hash_on_unsupported_hash_algorithm(self): - package = SOL004Package('osm_common/tests/packages/invalid_package_vnf') + package = SOL004Package("osm_common/tests/packages/invalid_package_vnf") with self.assertRaises(SOL004PackageException): - package.validate_package_file_hash('Scripts/charms/simple/src/charm.py') + package.validate_package_file_hash("Scripts/charms/simple/src/charm.py") def test_validate_package_hashes_with_metadata_dir(self): - package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf') + package = SOL004Package( + "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" + ) package.validate_package_hashes() def test_validate_package_hashes_without_metadata_dir(self): - package = SOL004Package('osm_common/tests/packages/native_charm_without_metadata_dir_vnf') + package = SOL004Package( + "osm_common/tests/packages/native_charm_without_metadata_dir_vnf" + ) package.validate_package_hashes() def test_validate_package_hashes_on_invalid_package(self): - package = SOL004Package('osm_common/tests/packages/invalid_package_vnf') + package = SOL004Package("osm_common/tests/packages/invalid_package_vnf") with self.assertRaises(SOL004PackageException): package.validate_package_hashes() def test_get_descriptor_location_with_metadata_dir(self): - package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf') + package = SOL004Package( + "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" + ) descriptor_path = package.get_descriptor_location() - self.assertEqual(descriptor_path, 'Definitions/native_charm_vnfd.yaml') + self.assertEqual(descriptor_path, "Definitions/native_charm_vnfd.yaml") def test_get_descriptor_location_without_metadata_dir(self): - package = SOL004Package('osm_common/tests/packages/native_charm_without_metadata_dir_vnf') + package = SOL004Package( + "osm_common/tests/packages/native_charm_without_metadata_dir_vnf" + ) descriptor_path = package.get_descriptor_location() - self.assertEqual(descriptor_path, 'native_charm_vnfd.yaml') + self.assertEqual(descriptor_path, "native_charm_vnfd.yaml") diff --git a/setup.py b/setup.py index f53c839..cb19584 100644 --- a/setup.py +++ b/setup.py @@ -22,24 +22,23 @@ from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) _name = "osm_common" -README = open(os.path.join(here, 'README.rst')).read() +README = open(os.path.join(here, "README.rst")).read() setup( name=_name, - description='OSM common utilities', + description="OSM common utilities", long_description=README, - version_command=('git describe --tags --long --dirty --match v*', 'pep440-git-full'), - - author='ETSI OSM', - author_email='osmsupport@etsi.com', - maintainer='ETSI OSM', - maintainer_email='osmsupport@etsi.com', - - url='https://osm.etsi.org/gitweb/?p=osm/common.git;a=summary', - license='Apache 2.0', - setup_requires=['setuptools-version-command'], - + version_command=( + "git describe --tags --long --dirty --match v*", + "pep440-git-full", + ), + author="ETSI OSM", + author_email="osmsupport@etsi.com", + maintainer="ETSI OSM", + maintainer_email="osmsupport@etsi.com", + url="https://osm.etsi.org/gitweb/?p=osm/common.git;a=summary", + license="Apache 2.0", + setup_requires=["setuptools-version-command"], packages=[_name], include_package_data=True, - ) diff --git a/tox.ini b/tox.ini index c3341ea..9ab9b46 100644 --- a/tox.ini +++ b/tox.ini @@ -34,7 +34,8 @@ deps = -r{toxinidir}/requirements.txt deps = black skip_install = true commands = - - black --check --diff osm_common/ + black --check --diff osm_common/ + black --check --diff setup.py ####################################################################################### [testenv:cover] @@ -61,7 +62,7 @@ deps = {[testenv]deps} -r{toxinidir}/requirements-test.txt pylint commands = - pylint -E osm_common + pylint -E osm_common ####################################################################################### [testenv:safety] @@ -102,8 +103,10 @@ whitelist_externals = sh ignore = W291, W293, + W503, E123, E125, + E203, E226, E241 exclude = -- 2.25.1 From 98fc8f01d18d1a05c16fed7ccee355611a6e20ce Mon Sep 17 00:00:00 2001 From: bravof Date: Thu, 4 Nov 2021 21:16:00 -0300 Subject: [PATCH 05/16] feature: sol004 and sol007 Change-Id: Ib4c4656f3660011a0f1ee80e4903a31958068be5 Signed-off-by: bravof --- osm_common/fslocal.py | 14 +- osm_common/fsmongo.py | 68 ++++-- osm_common/sol004_package.py | 221 +++++------------- osm_common/sol007_package.py | 119 ++++++++++ osm_common/sol_package.py | 289 ++++++++++++++++++++++++ osm_common/tests/test_fsmongo.py | 2 +- osm_common/tests/test_sol004_package.py | 151 ------------- 7 files changed, 525 insertions(+), 339 deletions(-) create mode 100644 osm_common/sol007_package.py create mode 100644 osm_common/sol_package.py delete mode 100644 osm_common/tests/test_sol004_package.py diff --git a/osm_common/fslocal.py b/osm_common/fslocal.py index 4a82c4f..ae22c6a 100644 --- a/osm_common/fslocal.py +++ b/osm_common/fslocal.py @@ -16,6 +16,8 @@ # limitations under the License. import os +import tarfile +import zipfile import logging # import tarfile @@ -116,10 +118,10 @@ class FsLocal(FsBase): f = "/".join(storage) return os.path.getsize(self.path + f) - def file_extract(self, tar_object, path): + def file_extract(self, compressed_object, path): """ extract a tar file - :param tar_object: object of type tar + :param compressed_object: object of type tar or zip :param path: can be a str or a str list, or a tar object where to extract the tar_object :return: None """ @@ -127,7 +129,13 @@ class FsLocal(FsBase): f = self.path + path else: f = self.path + "/".join(path) - tar_object.extractall(path=f) + + if type(compressed_object) is tarfile.TarFile: + compressed_object.extractall(path=f) + elif ( + type(compressed_object) is zipfile.ZipFile + ): # Just a check to know if this works with both tar and zip + compressed_object.extractall(path=f) def file_open(self, storage, mode): """ diff --git a/osm_common/fsmongo.py b/osm_common/fsmongo.py index ff37c42..7fb071a 100644 --- a/osm_common/fsmongo.py +++ b/osm_common/fsmongo.py @@ -22,6 +22,8 @@ from io import BytesIO, StringIO import logging import os import datetime +import tarfile +import zipfile from gridfs import GridFSBucket, errors from osm_common.fsbase import FsBase, FsException @@ -336,6 +338,8 @@ class FsMongo(FsBase): "Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR ) + print(requested_file.metadata) + # if no special mode is required just check it does exists if not mode: return True @@ -368,35 +372,60 @@ class FsMongo(FsBase): return requested_file.length - def file_extract(self, tar_object, path): + def file_extract(self, compressed_object, path): """ extract a tar file - :param tar_object: object of type tar + :param compressed_object: object of type tar or zip :param path: can be a str or a str list, or a tar object where to extract the tar_object :return: None """ f = path if isinstance(path, str) else "/".join(path) - for member in tar_object.getmembers(): - if member.isfile(): - stream = tar_object.extractfile(member) - elif member.issym(): - stream = BytesIO(member.linkname.encode("utf-8")) - else: - stream = BytesIO() + if type(compressed_object) is tarfile.TarFile: + for member in compressed_object.getmembers(): + if member.isfile(): + stream = compressed_object.extractfile(member) + elif member.issym(): + stream = BytesIO(member.linkname.encode("utf-8")) + else: + stream = BytesIO() - if member.isfile(): - file_type = "file" - elif member.issym(): - file_type = "sym" - else: - file_type = "dir" + if member.isfile(): + file_type = "file" + elif member.issym(): + file_type = "sym" + else: + file_type = "dir" - metadata = {"type": file_type, "permissions": member.mode} + metadata = {"type": file_type, "permissions": member.mode} - self.fs.upload_from_stream(f + "/" + member.name, stream, metadata=metadata) + self.fs.upload_from_stream( + f + "/" + member.name, stream, metadata=metadata + ) - stream.close() + stream.close() + elif type(compressed_object) is zipfile.ZipFile: + for member in compressed_object.infolist(): + if member.is_dir(): + stream = BytesIO() + else: + stream = compressed_object.read(member) + + if member.is_dir(): + file_type = "dir" + else: + file_type = "file" + + metadata = {"type": file_type} + + print("Now uploading...") + print(f + "/" + member.filename) + self.fs.upload_from_stream( + f + "/" + member.filename, stream, metadata=metadata + ) + + if member.is_dir(): + stream.close() def file_open(self, storage, mode): """ @@ -447,6 +476,9 @@ class FsMongo(FsBase): http_code=HTTPStatus.NOT_FOUND, ) + if f.endswith("/"): + f = f[:-1] + files_cursor = self.fs.find( {"filename": {"$regex": "^{}/([^/])*".format(f)}} ) diff --git a/osm_common/sol004_package.py b/osm_common/sol004_package.py index e6b40b4..813e52d 100644 --- a/osm_common/sol004_package.py +++ b/osm_common/sol004_package.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Whitestack, LLC +# Copyright 2021 Whitestack, LLC # ************************************************************* # # This file is part of OSM common repository. @@ -19,7 +19,7 @@ # under the License. # # For those usages not covered by the Apache License, Version 2.0 please -# contact: agarcia@whitestack.com +# contact: agarcia@whitestack.com or fbravo@whitestack.com ## """Python module for interacting with ETSI GS NFV-SOL004 compliant packages @@ -56,181 +56,70 @@ native_charm_vnf/ native_charm_vnf/ """ import yaml +import datetime import os -import hashlib - - -_METADATA_FILE_PATH = "TOSCA-Metadata/TOSCA.meta" -_METADATA_DESCRIPTOR_FIELD = "Entry-Definitions" -_METADATA_MANIFEST_FIELD = "ETSI-Entry-Manifest" -_METADATA_CHANGELOG_FIELD = "ETSI-Entry-Change-Log" -_METADATA_LICENSES_FIELD = "ETSI-Entry-Licenses" -_METADATA_DEFAULT_CHANGELOG_PATH = "ChangeLog.txt" -_METADATA_DEFAULT_LICENSES_PATH = "Licenses" -_MANIFEST_FILE_PATH_FIELD = "Source" -_MANIFEST_FILE_HASH_ALGORITHM_FIELD = "Algorithm" -_MANIFEST_FILE_HASH_DIGEST_FIELD = "Hash" +from .sol_package import SOLPackage class SOL004PackageException(Exception): pass -class SOL004Package: - def __init__(self, package_path=""): - self._package_path = package_path - self._package_metadata = self._parse_package_metadata() - self._manifest_data = self._parse_manifest_data() - - def _parse_package_metadata(self): - try: - return self._parse_package_metadata_with_metadata_dir() - except FileNotFoundError: - return self._parse_package_metadata_without_metadata_dir() - - def _parse_package_metadata_with_metadata_dir(self): - try: - return self._parse_file_in_blocks(_METADATA_FILE_PATH) - except FileNotFoundError as e: - raise e - except (Exception, OSError) as e: - raise SOL004PackageException( - "Error parsing {}: {}".format(_METADATA_FILE_PATH, e) - ) - - def _parse_package_metadata_without_metadata_dir(self): - package_root_files = {f for f in os.listdir(self._package_path)} - package_root_yamls = [ - f for f in package_root_files if f.endswith(".yml") or f.endswith(".yaml") - ] - if len(package_root_yamls) != 1: - error_msg = "Error parsing package metadata: there should be exactly 1 descriptor YAML, found {}" - raise SOL004PackageException(error_msg.format(len(package_root_yamls))) - # TODO: Parse extra metadata from descriptor YAML? - return [ - { - _METADATA_DESCRIPTOR_FIELD: package_root_yamls[0], - _METADATA_MANIFEST_FIELD: "{}.mf".format( - os.path.splitext(package_root_yamls[0])[0] - ), - _METADATA_CHANGELOG_FIELD: _METADATA_DEFAULT_CHANGELOG_PATH, - _METADATA_LICENSES_FIELD: _METADATA_DEFAULT_LICENSES_PATH, - } - ] - - def _parse_manifest_data(self): - manifest_path = None - for tosca_meta in self._package_metadata: - if _METADATA_MANIFEST_FIELD in tosca_meta: - manifest_path = tosca_meta[_METADATA_MANIFEST_FIELD] - break - else: - error_msg = "Error parsing {}: no {} field on path".format( - _METADATA_FILE_PATH, _METADATA_MANIFEST_FIELD - ) - raise SOL004PackageException(error_msg) - - try: - return self._parse_file_in_blocks(manifest_path) - except (Exception, OSError) as e: - raise SOL004PackageException( - "Error parsing {}: {}".format(manifest_path, e) - ) +class SOL004Package(SOLPackage): + _MANIFEST_VNFD_ID = "vnfd_id" + _MANIFEST_VNFD_PRODUCT_NAME = "vnfd_product_name" + _MANIFEST_VNFD_PROVIDER_ID = "vnfd_provider_id" + _MANIFEST_VNFD_SOFTWARE_VERSION = "vnfd_software_version" + _MANIFEST_VNFD_PACKAGE_VERSION = "vnfd_package_version" + _MANIFEST_VNFD_RELEASE_DATE_TIME = "vnfd_release_date_time" + _MANIFEST_VNFD_COMPATIBLE_SPECIFICATION_VERSIONS = ( + "compatible_specification_versions" + ) + _MANIFEST_VNFM_INFO = "vnfm_info" + + _MANIFEST_ALL_FIELDS = [ + _MANIFEST_VNFD_ID, + _MANIFEST_VNFD_PRODUCT_NAME, + _MANIFEST_VNFD_PROVIDER_ID, + _MANIFEST_VNFD_SOFTWARE_VERSION, + _MANIFEST_VNFD_PACKAGE_VERSION, + _MANIFEST_VNFD_RELEASE_DATE_TIME, + _MANIFEST_VNFD_COMPATIBLE_SPECIFICATION_VERSIONS, + _MANIFEST_VNFM_INFO, + ] - def _get_package_file_full_path(self, file_relative_path): - return os.path.join(self._package_path, file_relative_path) - - def _parse_file_in_blocks(self, file_relative_path): - file_path = self._get_package_file_full_path(file_relative_path) - with open(file_path) as f: - blocks = f.read().split("\n\n") - parsed_blocks = map(yaml.safe_load, blocks) - return [block for block in parsed_blocks if block is not None] - - def _get_package_file_manifest_data(self, file_relative_path): - for file_data in self._manifest_data: - if file_data.get(_MANIFEST_FILE_PATH_FIELD, "") == file_relative_path: - return file_data + def __init__(self, package_path=""): + super().__init__(package_path) - error_msg = ( - "Error parsing {} manifest data: file not found on manifest file".format( - file_relative_path - ) + def generate_manifest_data_from_descriptor(self): + descriptor_path = os.path.join( + self._package_path, self.get_descriptor_location() ) - raise SOL004PackageException(error_msg) - - def get_package_file_hash_digest_from_manifest(self, file_relative_path): - """Returns the hash digest of a file inside this package as specified on the manifest file.""" - file_manifest_data = self._get_package_file_manifest_data(file_relative_path) - try: - return file_manifest_data[_MANIFEST_FILE_HASH_DIGEST_FIELD] - except Exception as e: - raise SOL004PackageException( - "Error parsing {} hash digest: {}".format(file_relative_path, e) + with open(descriptor_path, "r") as descriptor: + try: + vnfd_data = yaml.safe_load(descriptor)["vnfd"] + except yaml.YAMLError as e: + print("Error reading descriptor {}: {}".format(descriptor_path, e)) + return + + self._manifest_metadata = {} + self._manifest_metadata[self._MANIFEST_VNFD_ID] = vnfd_data.get( + "id", "default-id" ) - - def get_package_file_hash_algorithm_from_manifest(self, file_relative_path): - """Returns the hash algorithm of a file inside this package as specified on the manifest file.""" - file_manifest_data = self._get_package_file_manifest_data(file_relative_path) - try: - return file_manifest_data[_MANIFEST_FILE_HASH_ALGORITHM_FIELD] - except Exception as e: - raise SOL004PackageException( - "Error parsing {} hash digest: {}".format(file_relative_path, e) + self._manifest_metadata[self._MANIFEST_VNFD_PRODUCT_NAME] = vnfd_data.get( + "product-name", "default-product-name" ) - - @staticmethod - def _get_hash_function_from_hash_algorithm(hash_algorithm): - function_to_algorithm = {"SHA-256": hashlib.sha256, "SHA-512": hashlib.sha512} - if hash_algorithm not in function_to_algorithm: - error_msg = ( - "Error checking hash function: hash algorithm {} not supported".format( - hash_algorithm - ) + self._manifest_metadata[self._MANIFEST_VNFD_PROVIDER_ID] = vnfd_data.get( + "provider", "OSM" ) - raise SOL004PackageException(error_msg) - return function_to_algorithm[hash_algorithm] - - def _calculate_file_hash(self, file_relative_path, hash_algorithm): - file_path = self._get_package_file_full_path(file_relative_path) - hash_function = self._get_hash_function_from_hash_algorithm(hash_algorithm) - try: - with open(file_path, "rb") as f: - return hash_function(f.read()).hexdigest() - except Exception as e: - raise SOL004PackageException( - "Error hashing {}: {}".format(file_relative_path, e) + self._manifest_metadata[ + self._MANIFEST_VNFD_SOFTWARE_VERSION + ] = vnfd_data.get("version", "1.0") + self._manifest_metadata[self._MANIFEST_VNFD_PACKAGE_VERSION] = "1.0.0" + self._manifest_metadata[self._MANIFEST_VNFD_RELEASE_DATE_TIME] = ( + datetime.datetime.now().astimezone().isoformat() ) - - def validate_package_file_hash(self, file_relative_path): - """Validates the integrity of a file using the hash algorithm and digest on the package manifest.""" - hash_algorithm = self.get_package_file_hash_algorithm_from_manifest( - file_relative_path - ) - file_hash = self._calculate_file_hash(file_relative_path, hash_algorithm) - expected_file_hash = self.get_package_file_hash_digest_from_manifest( - file_relative_path - ) - if file_hash != expected_file_hash: - error_msg = "Error validating {} hash: calculated hash {} is different than manifest hash {}" - raise SOL004PackageException( - error_msg.format(file_relative_path, file_hash, expected_file_hash) - ) - - def validate_package_hashes(self): - """Validates the integrity of all files listed on the package manifest.""" - for file_data in self._manifest_data: - if _MANIFEST_FILE_PATH_FIELD in file_data: - file_relative_path = file_data[_MANIFEST_FILE_PATH_FIELD] - self.validate_package_file_hash(file_relative_path) - - def get_descriptor_location(self): - """Returns this package descriptor location as a relative path from the package root.""" - for tosca_meta in self._package_metadata: - if _METADATA_DESCRIPTOR_FIELD in tosca_meta: - return tosca_meta[_METADATA_DESCRIPTOR_FIELD] - - error_msg = "Error: no {} entry found on {}".format( - _METADATA_DESCRIPTOR_FIELD, _METADATA_FILE_PATH - ) - raise SOL004PackageException(error_msg) + self._manifest_metadata[ + self._MANIFEST_VNFD_COMPATIBLE_SPECIFICATION_VERSIONS + ] = "2.7.1" + self._manifest_metadata[self._MANIFEST_VNFM_INFO] = "OSM" diff --git a/osm_common/sol007_package.py b/osm_common/sol007_package.py new file mode 100644 index 0000000..4add8a9 --- /dev/null +++ b/osm_common/sol007_package.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- + +# Copyright 2021 Whitestack, LLC +# ************************************************************* +# +# This file is part of OSM common repository. +# All Rights Reserved to Whitestack, LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# For those usages not covered by the Apache License, Version 2.0 please +# contact: fbravo@whitestack.com +## + +"""Python module for interacting with ETSI GS NFV-SOL007 compliant packages + +This module provides a SOL007Package class for validating and interacting with +ETSI SOL007 packages. A valid SOL007 package may have its files arranged according +to one of the following two structures: + +SOL007 with metadata directory SOL007 without metadata directory + +native_charm_vnf/ native_charm_vnf/ +├── TOSCA-Metadata ├── native_charm_nsd.mf +│ └── TOSCA.meta ├── native_charm_nsd.yaml +├── manifest.mf ├── ChangeLog.txt +├── Definitions ├── Licenses +│ └── native_charm_nsd.yaml │ └── license.lic +├── Files ├── Files +│ ├── icons │ └── icons +│ │ └── osm.png │ └── osm.png +│ ├── Licenses └── Scripts +│ │ └── license.lic ├── cloud_init +│ └── changelog.txt │ └── cloud-config.txt +└── Scripts └── charms + ├── cloud_init └── simple + │ └── cloud-config.txt ├── config.yaml + └── charms ├── hooks + └── simple │ ├── install + ├── config.yaml ... + ├── hooks │ + │ ├── install └── src + ... └── charm.py + └── src + └── charm.py +""" + +import yaml +import datetime +import os +from .sol_package import SOLPackage + + +class SOL007PackageException(Exception): + pass + + +class SOL007Package(SOLPackage): + _MANIFEST_NSD_INVARIANT_ID = "nsd_invariant_id" + _MANIFEST_NSD_NAME = "nsd_name" + _MANIFEST_NSD_DESIGNER = "nsd_designer" + _MANIFEST_NSD_FILE_STRUCTURE_VERSION = "nsd_file_structure_version" + _MANIFEST_NSD_RELEASE_DATE_TIME = "nsd_release_date_time" + _MANIFEST_NSD_COMPATIBLE_SPECIFICATION_VERSIONS = ( + "compatible_specification_versions" + ) + + _MANIFEST_ALL_FIELDS = [ + _MANIFEST_NSD_INVARIANT_ID, + _MANIFEST_NSD_NAME, + _MANIFEST_NSD_DESIGNER, + _MANIFEST_NSD_FILE_STRUCTURE_VERSION, + _MANIFEST_NSD_RELEASE_DATE_TIME, + _MANIFEST_NSD_COMPATIBLE_SPECIFICATION_VERSIONS, + ] + + def __init__(self, package_path=""): + super().__init__(package_path) + + def generate_manifest_data_from_descriptor(self): + descriptor_path = os.path.join( + self._package_path, self.get_descriptor_location() + ) + with open(descriptor_path, "r") as descriptor: + try: + nsd_data = yaml.safe_load(descriptor)["nsd"] + except yaml.YAMLError as e: + print("Error reading descriptor {}: {}".format(descriptor_path, e)) + return + + self._manifest_metadata = {} + self._manifest_metadata[self._MANIFEST_NSD_INVARIANT_ID] = nsd_data.get( + "id", "default-id" + ) + self._manifest_metadata[self._MANIFEST_NSD_NAME] = nsd_data.get( + "name", "default-name" + ) + self._manifest_metadata[self._MANIFEST_NSD_DESIGNER] = nsd_data.get( + "designer", "OSM" + ) + self._manifest_metadata[ + self._MANIFEST_NSD_FILE_STRUCTURE_VERSION + ] = nsd_data.get("version", "1.0") + self._manifest_metadata[self._MANIFEST_NSD_RELEASE_DATE_TIME] = ( + datetime.datetime.now().astimezone().isoformat() + ) + self._manifest_metadata[ + self._MANIFEST_NSD_COMPATIBLE_SPECIFICATION_VERSIONS + ] = "2.7.1" diff --git a/osm_common/sol_package.py b/osm_common/sol_package.py new file mode 100644 index 0000000..e336cd5 --- /dev/null +++ b/osm_common/sol_package.py @@ -0,0 +1,289 @@ +# -*- coding: utf-8 -*- + +# Copyright 2021 Whitestack, LLC +# ************************************************************* +# +# This file is part of OSM common repository. +# All Rights Reserved to Whitestack, LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +# For those usages not covered by the Apache License, Version 2.0 please +# contact: fbravo@whitestack.com or agarcia@whitestack.com +## + +import os +import yaml +import hashlib + + +class SOLPackageException(Exception): + pass + + +class SOLPackage: + _METADATA_FILE_PATH = "TOSCA-Metadata/TOSCA.meta" + _METADATA_DESCRIPTOR_FIELD = "Entry-Definitions" + _METADATA_MANIFEST_FIELD = "ETSI-Entry-Manifest" + _METADATA_CHANGELOG_FIELD = "ETSI-Entry-Change-Log" + _METADATA_LICENSES_FIELD = "ETSI-Entry-Licenses" + _METADATA_DEFAULT_CHANGELOG_PATH = "ChangeLog.txt" + _METADATA_DEFAULT_LICENSES_PATH = "Licenses" + _MANIFEST_FILE_PATH_FIELD = "Source" + _MANIFEST_FILE_HASH_ALGORITHM_FIELD = "Algorithm" + _MANIFEST_FILE_HASH_DIGEST_FIELD = "Hash" + + _MANIFEST_ALL_FIELDS = [] + + def __init__(self, package_path=""): + self._package_path = package_path + + self._package_metadata = self._parse_package_metadata() + + try: + self._manifest_data = self._parse_manifest_data() + except Exception: + self._manifest_data = None + + try: + self._manifest_metadata = self._parse_manifest_metadata() + except Exception: + self._manifest_metadata = None + + def _parse_package_metadata(self): + try: + return self._parse_package_metadata_with_metadata_dir() + except FileNotFoundError: + return self._parse_package_metadata_without_metadata_dir() + + def _parse_package_metadata_with_metadata_dir(self): + try: + return self._parse_file_in_blocks(self._METADATA_FILE_PATH) + except FileNotFoundError as e: + raise e + except (Exception, OSError) as e: + raise SOLPackageException( + "Error parsing {}: {}".format(self._METADATA_FILE_PATH, e) + ) + + def _parse_package_metadata_without_metadata_dir(self): + package_root_files = {f for f in os.listdir(self._package_path)} + package_root_yamls = [ + f for f in package_root_files if f.endswith(".yml") or f.endswith(".yaml") + ] + if len(package_root_yamls) != 1: + error_msg = "Error parsing package metadata: there should be exactly 1 descriptor YAML, found {}" + raise SOLPackageException(error_msg.format(len(package_root_yamls))) + + base_manifest = [ + { + SOLPackage._METADATA_DESCRIPTOR_FIELD: package_root_yamls[0], + SOLPackage._METADATA_MANIFEST_FIELD: "{}.mf".format( + os.path.splitext(package_root_yamls[0])[0] + ), + SOLPackage._METADATA_CHANGELOG_FIELD: SOLPackage._METADATA_DEFAULT_CHANGELOG_PATH, + SOLPackage._METADATA_LICENSES_FIELD: SOLPackage._METADATA_DEFAULT_LICENSES_PATH, + } + ] + + return base_manifest + + def _parse_manifest_data(self): + manifest_path = None + for tosca_meta in self._package_metadata: + if SOLPackage._METADATA_MANIFEST_FIELD in tosca_meta: + manifest_path = tosca_meta[SOLPackage._METADATA_MANIFEST_FIELD] + break + else: + error_msg = "Error parsing {}: no {} field on path".format( + self._METADATA_FILE_PATH, self._METADATA_MANIFEST_FIELD + ) + raise SOLPackageException(error_msg) + + try: + return self._parse_file_in_blocks(manifest_path) + + except (Exception, OSError) as e: + raise SOLPackageException("Error parsing {}: {}".format(manifest_path, e)) + + def _parse_manifest_metadata(self): + try: + base_manifest = {} + manifest_file = os.open( + os.path.join( + self._package_path, base_manifest[self._METADATA_MANIFEST_FIELD] + ), + "rw", + ) + for line in manifest_file: + fields_in_line = line.split(":", maxsplit=1) + fields_in_line[0] = fields_in_line[0].strip() + fields_in_line[1] = fields_in_line[1].strip() + if fields_in_line[0] in self._MANIFEST_ALL_FIELDS: + base_manifest[fields_in_line[0]] = fields_in_line[1] + return base_manifest + except (Exception, OSError) as e: + raise SOLPackageException( + "Error parsing {}: {}".format( + base_manifest[SOLPackage._METADATA_MANIFEST_FIELD], e + ) + ) + + def _get_package_file_full_path(self, file_relative_path): + return os.path.join(self._package_path, file_relative_path) + + def _parse_file_in_blocks(self, file_relative_path): + file_path = self._get_package_file_full_path(file_relative_path) + with open(file_path) as f: + blocks = f.read().split("\n\n") + parsed_blocks = map(yaml.safe_load, blocks) + return [block for block in parsed_blocks if block is not None] + + def _get_package_file_manifest_data(self, file_relative_path): + for file_data in self._manifest_data: + if ( + file_data.get(SOLPackage._MANIFEST_FILE_PATH_FIELD, "") + == file_relative_path + ): + return file_data + + error_msg = ( + "Error parsing {} manifest data: file not found on manifest file".format( + file_relative_path + ) + ) + raise SOLPackageException(error_msg) + + def get_package_file_hash_digest_from_manifest(self, file_relative_path): + """Returns the hash digest of a file inside this package as specified on the manifest file.""" + file_manifest_data = self._get_package_file_manifest_data(file_relative_path) + try: + return file_manifest_data[SOLPackage._MANIFEST_FILE_HASH_DIGEST_FIELD] + except Exception as e: + raise SOLPackageException( + "Error parsing {} hash digest: {}".format(file_relative_path, e) + ) + + def get_package_file_hash_algorithm_from_manifest(self, file_relative_path): + """Returns the hash algorithm of a file inside this package as specified on the manifest file.""" + file_manifest_data = self._get_package_file_manifest_data(file_relative_path) + try: + return file_manifest_data[SOLPackage._MANIFEST_FILE_HASH_ALGORITHM_FIELD] + except Exception as e: + raise SOLPackageException( + "Error parsing {} hash digest: {}".format(file_relative_path, e) + ) + + @staticmethod + def _get_hash_function_from_hash_algorithm(hash_algorithm): + function_to_algorithm = {"SHA-256": hashlib.sha256, "SHA-512": hashlib.sha512} + if hash_algorithm not in function_to_algorithm: + error_msg = ( + "Error checking hash function: hash algorithm {} not supported".format( + hash_algorithm + ) + ) + raise SOLPackageException(error_msg) + return function_to_algorithm[hash_algorithm] + + def _calculate_file_hash(self, file_relative_path, hash_algorithm): + file_path = self._get_package_file_full_path(file_relative_path) + hash_function = self._get_hash_function_from_hash_algorithm(hash_algorithm) + try: + with open(file_path, "rb") as f: + return hash_function(f.read()).hexdigest() + except Exception as e: + raise SOLPackageException( + "Error hashing {}: {}".format(file_relative_path, e) + ) + + def validate_package_file_hash(self, file_relative_path): + """Validates the integrity of a file using the hash algorithm and digest on the package manifest.""" + hash_algorithm = self.get_package_file_hash_algorithm_from_manifest( + file_relative_path + ) + file_hash = self._calculate_file_hash(file_relative_path, hash_algorithm) + expected_file_hash = self.get_package_file_hash_digest_from_manifest( + file_relative_path + ) + if file_hash != expected_file_hash: + error_msg = "Error validating {} hash: calculated hash {} is different than manifest hash {}" + raise SOLPackageException( + error_msg.format(file_relative_path, file_hash, expected_file_hash) + ) + + def validate_package_hashes(self): + """Validates the integrity of all files listed on the package manifest.""" + for file_data in self._manifest_data: + if SOLPackage._MANIFEST_FILE_PATH_FIELD in file_data: + file_relative_path = file_data[SOLPackage._MANIFEST_FILE_PATH_FIELD] + self.validate_package_file_hash(file_relative_path) + + def create_or_update_metadata_file(self): + """ + Creates or updates the metadata file with the hashes calculated for each one of the package's files + """ + if not self._manifest_metadata: + self.generate_manifest_data_from_descriptor() + + self.write_manifest_data_into_file() + + def generate_manifest_data_from_descriptor(self): + pass + + def write_manifest_data_into_file(self): + with open(self.get_manifest_location(), "w") as metadata_file: + # Write manifest metadata + for metadata_entry in self._manifest_metadata: + metadata_file.write( + "{}: {}\n".format( + metadata_entry, self._manifest_metadata[metadata_entry] + ) + ) + + # Write package's files hashes + file_hashes = {} + for root, dirs, files in os.walk(self._package_path): + for a_file in files: + file_path = os.path.join(root, a_file) + file_relative_path = file_path[len(self._package_path) :] + if file_relative_path.startswith("/"): + file_relative_path = file_relative_path[1:] + file_hashes[file_relative_path] = self._calculate_file_hash( + file_relative_path, "SHA-512" + ) + + for file, hash in file_hashes.items(): + file_block = "Source: {}\nAlgorithm: SHA-512\nHash: {}\n\n".format( + file, hash + ) + metadata_file.write(file_block) + + def get_descriptor_location(self): + """Returns this package descriptor location as a relative path from the package root.""" + for tosca_meta in self._package_metadata: + if SOLPackage._METADATA_DESCRIPTOR_FIELD in tosca_meta: + return tosca_meta[SOLPackage._METADATA_DESCRIPTOR_FIELD] + + error_msg = "Error: no {} entry found on {}".format( + SOLPackage._METADATA_DESCRIPTOR_FIELD, SOLPackage._METADATA_FILE_PATH + ) + raise SOLPackageException(error_msg) + + def get_manifest_location(self): + """Return the VNF/NS manifest location as a relative path from the package root.""" + for tosca_meta in self._package_metadata: + if SOLPackage._METADATA_MANIFEST_FIELD in tosca_meta: + return tosca_meta[SOLPackage._METADATA_MANIFEST_FIELD] + + raise SOLPackageException("No manifest file defined for this package") diff --git a/osm_common/tests/test_fsmongo.py b/osm_common/tests/test_fsmongo.py index 6a8ec58..da01ff3 100644 --- a/osm_common/tests/test_fsmongo.py +++ b/osm_common/tests/test_fsmongo.py @@ -601,7 +601,7 @@ def test_file_extract(): tar = tarfile.open(tar_path, "r") fs = FsMongo() fs.fs = FakeFS() - fs.file_extract(tar_object=tar, path=".") + fs.file_extract(compressed_object=tar, path=".") finally: os.remove(tar_path) subprocess.call(["rm", "-rf", "./tmp"]) diff --git a/osm_common/tests/test_sol004_package.py b/osm_common/tests/test_sol004_package.py deleted file mode 100644 index dc71d91..0000000 --- a/osm_common/tests/test_sol004_package.py +++ /dev/null @@ -1,151 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Whitestack, LLC -# ************************************************************* -# -# This file is part of OSM common repository. -# All Rights Reserved to Whitestack, LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. -# -# For those usages not covered by the Apache License, Version 2.0 please -# contact: agarcia@whitestack.com -## - -from osm_common.sol004_package import SOL004Package, SOL004PackageException -import unittest - - -class SOL004ValidatorTest(unittest.TestCase): - def test_get_package_file_hash_algorithm_from_manifest_with_metadata_dir(self): - package = SOL004Package( - "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" - ) - algorithm = package.get_package_file_hash_algorithm_from_manifest( - "Scripts/charms/simple/src/charm.py" - ) - self.assertEqual(algorithm, "SHA-256") - - def test_get_package_file_hash_algorithm_from_manifest_without_metadata_dir(self): - package = SOL004Package( - "osm_common/tests/packages/native_charm_without_metadata_dir_vnf" - ) - algorithm = package.get_package_file_hash_algorithm_from_manifest( - "Scripts/charms/simple/src/charm.py" - ) - self.assertEqual(algorithm, "SHA-256") - - def test_get_package_file_hash_algorithm_from_manifest_on_non_existent_file(self): - package = SOL004Package( - "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" - ) - with self.assertRaises(SOL004PackageException): - package.get_package_file_hash_algorithm_from_manifest("Non/Existing/file") - - def test_get_package_file_hash_digest_from_manifest_with_metadata_dir(self): - package = SOL004Package( - "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" - ) - digest = package.get_package_file_hash_digest_from_manifest( - "Scripts/charms/simple/src/charm.py" - ) - self.assertEqual( - digest, "ea72f897a966e6174ed9164fabc3c500df5a2f712eb6b22ab2408afb07d04d14" - ) - - def test_get_package_file_hash_digest_from_manifest_without_metadata_dir(self): - package = SOL004Package( - "osm_common/tests/packages/native_charm_without_metadata_dir_vnf" - ) - digest = package.get_package_file_hash_digest_from_manifest( - "Scripts/charms/simple/src/charm.py" - ) - self.assertEqual( - digest, "ea72f897a966e6174ed9164fabc3c500df5a2f712eb6b22ab2408afb07d04d14" - ) - - def test_get_package_file_hash_digest_from_manifest_on_non_existent_file(self): - package = SOL004Package( - "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" - ) - with self.assertRaises(SOL004PackageException): - package.get_package_file_hash_digest_from_manifest("Non/Existing/file") - - def test_get_package_file_hash_digest_from_manifest_on_non_existing_hash_entry( - self, - ): - package = SOL004Package("osm_common/tests/packages/invalid_package_vnf") - with self.assertRaises(SOL004PackageException): - package.get_package_file_hash_digest_from_manifest( - "Scripts/charms/simple/hooks/upgrade-charm" - ) - - def test_validate_package_file_hash_with_metadata_dir(self): - package = SOL004Package( - "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" - ) - package.validate_package_file_hash("Scripts/charms/simple/src/charm.py") - - def test_validate_package_file_hash_without_metadata_dir(self): - package = SOL004Package( - "osm_common/tests/packages/native_charm_without_metadata_dir_vnf" - ) - package.validate_package_file_hash("Scripts/charms/simple/src/charm.py") - - def test_validate_package_file_hash_on_non_existing_file(self): - package = SOL004Package( - "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" - ) - with self.assertRaises(SOL004PackageException): - package.validate_package_file_hash("Non/Existing/file") - - def test_validate_package_file_hash_on_wrong_manifest_hash(self): - package = SOL004Package("osm_common/tests/packages/invalid_package_vnf") - with self.assertRaises(SOL004PackageException): - package.validate_package_file_hash("Scripts/charms/simple/hooks/start") - - def test_validate_package_file_hash_on_unsupported_hash_algorithm(self): - package = SOL004Package("osm_common/tests/packages/invalid_package_vnf") - with self.assertRaises(SOL004PackageException): - package.validate_package_file_hash("Scripts/charms/simple/src/charm.py") - - def test_validate_package_hashes_with_metadata_dir(self): - package = SOL004Package( - "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" - ) - package.validate_package_hashes() - - def test_validate_package_hashes_without_metadata_dir(self): - package = SOL004Package( - "osm_common/tests/packages/native_charm_without_metadata_dir_vnf" - ) - package.validate_package_hashes() - - def test_validate_package_hashes_on_invalid_package(self): - package = SOL004Package("osm_common/tests/packages/invalid_package_vnf") - with self.assertRaises(SOL004PackageException): - package.validate_package_hashes() - - def test_get_descriptor_location_with_metadata_dir(self): - package = SOL004Package( - "osm_common/tests/packages/native_charm_with_metadata_dir_vnf" - ) - descriptor_path = package.get_descriptor_location() - self.assertEqual(descriptor_path, "Definitions/native_charm_vnfd.yaml") - - def test_get_descriptor_location_without_metadata_dir(self): - package = SOL004Package( - "osm_common/tests/packages/native_charm_without_metadata_dir_vnf" - ) - descriptor_path = package.get_descriptor_location() - self.assertEqual(descriptor_path, "native_charm_vnfd.yaml") -- 2.25.1 From f73a900d6ca2e6002efe1533d7188fb52a5ee7fe Mon Sep 17 00:00:00 2001 From: bravof Date: Tue, 23 Nov 2021 10:34:43 -0300 Subject: [PATCH 06/16] bugfix: fix for folders missing in mongoFS Change-Id: I9c8a07755457c3c297853bda8e6c5516409a6fde Signed-off-by: bravof --- osm_common/fsmongo.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/osm_common/fsmongo.py b/osm_common/fsmongo.py index 7fb071a..b04057e 100644 --- a/osm_common/fsmongo.py +++ b/osm_common/fsmongo.py @@ -205,10 +205,13 @@ class FsMongo(FsBase): def __update_local_fs(self, from_path=None): dir_cursor = self.fs.find({"metadata.type": "dir"}, no_cursor_timeout=True) + valid_paths = [] + for directory in dir_cursor: if from_path and not directory.filename.startswith(from_path): continue os.makedirs(self.path + directory.filename, exist_ok=True) + valid_paths.append(self.path + directory.filename) file_cursor = self.fs.find( {"metadata.type": {"$in": ["file", "sym"]}}, no_cursor_timeout=True @@ -233,6 +236,9 @@ class FsMongo(FsBase): raise os.symlink(link, file_path) else: + folder = os.path.dirname(file_path) + if folder not in valid_paths: + os.makedirs(folder, exist_ok=True) with open(file_path, "wb+") as file_stream: self.fs.download_to_stream(writing_file._id, file_stream) if "permissions" in writing_file.metadata: -- 2.25.1 From b0291ff5010f5776a2f1e9e16154fc87e6fa95dc Mon Sep 17 00:00:00 2001 From: beierlm Date: Thu, 13 Jan 2022 11:37:31 -0500 Subject: [PATCH 07/16] Bug 1840: LTS Support Updates base to Ubuntu 20.04 Updates python to 3.8 Updates pip dependencies to more recent versions Adds apt cache to stage 2 dockerfile Fixes bug 1840 Change-Id: I2cf79d9e697ba1267e2a8d00a180412a2887e1bd Signed-off-by: beierlm --- Dockerfile | 13 +++++++++--- requirements-dist.in | 3 ++- requirements-dist.txt | 17 ++++++++------- requirements-test.txt | 48 +++++++++++++++++++++---------------------- requirements.in | 4 ++-- requirements.txt | 24 +++++++++++----------- tox.ini | 19 ++++++++++------- 7 files changed, 72 insertions(+), 56 deletions(-) diff --git a/Dockerfile b/Dockerfile index 8230f42..f99b758 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,19 +21,26 @@ # devops-stages/stage-build.sh # -FROM ubuntu:18.04 +FROM ubuntu:20.04 + +ARG APT_PROXY +RUN if [ ! -z $APT_PROXY ] ; then \ + echo "Acquire::http::Proxy \"$APT_PROXY\";" > /etc/apt/apt.conf.d/proxy.conf ;\ + echo "Acquire::https::Proxy \"$APT_PROXY\";" >> /etc/apt/apt.conf.d/proxy.conf ;\ + fi RUN DEBIAN_FRONTEND=noninteractive apt-get update && \ DEBIAN_FRONTEND=noninteractive apt-get -y install \ debhelper \ + dh-python \ git \ python3 \ python3-all \ python3-dev \ python3-setuptools -RUN python3 -m easy_install pip==21.0.1 -RUN pip3 install tox==3.22.0 +RUN python3 -m easy_install pip==21.3.1 +RUN pip install tox==3.24.5 ENV LC_ALL C.UTF-8 ENV LANG C.UTF-8 diff --git a/requirements-dist.in b/requirements-dist.in index 11f0a2a..4f8784f 100644 --- a/requirements-dist.in +++ b/requirements-dist.in @@ -14,4 +14,5 @@ # limitations under the License. stdeb -setuptools-version-command \ No newline at end of file +setuptools-version-command +setuptools<60 \ No newline at end of file diff --git a/requirements-dist.txt b/requirements-dist.txt index 7393626..8192b48 100644 --- a/requirements-dist.txt +++ b/requirements-dist.txt @@ -1,10 +1,3 @@ -setuptools-version-command==2.2 - # via -r requirements-dist.in -stdeb==0.10.0 - # via -r requirements-dist.in - -# The following packages are considered to be unsafe in a requirements file: -# setuptools ####################################################################################### # Copyright ETSI Contributors and Others. # @@ -21,3 +14,13 @@ stdeb==0.10.0 # See the License for the specific language governing permissions and # limitations under the License. ####################################################################################### +setuptools-version-command==99.9 + # via -r requirements-dist.in +stdeb==0.10.0 + # via -r requirements-dist.in + +# The following packages are considered to be unsafe in a requirements file: +setuptools==59.8.0 + # via + # -r requirements-dist.in + # setuptools-version-command diff --git a/requirements-test.txt b/requirements-test.txt index 58e185a..bfa751a 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,27 +1,3 @@ -attrs==20.3.0 - # via pytest -coverage==5.5 - # via - # -r requirements-test.in - # nose2 -iniconfig==1.1.1 - # via pytest -nose2==0.10.0 - # via -r requirements-test.in -packaging==20.9 - # via pytest -pluggy==0.13.1 - # via pytest -py==1.10.0 - # via pytest -pyparsing==2.4.7 - # via packaging -pytest==6.2.2 - # via -r requirements-test.in -six==1.15.0 - # via nose2 -toml==0.10.2 - # via pytest ####################################################################################### # Copyright ETSI Contributors and Others. # @@ -38,3 +14,27 @@ toml==0.10.2 # See the License for the specific language governing permissions and # limitations under the License. ####################################################################################### +attrs==21.4.0 + # via pytest +coverage==6.2 + # via + # -r requirements-test.in + # nose2 +iniconfig==1.1.1 + # via pytest +nose2==0.10.0 + # via -r requirements-test.in +packaging==21.3 + # via pytest +pluggy==1.0.0 + # via pytest +py==1.11.0 + # via pytest +pyparsing==3.0.6 + # via packaging +pytest==6.2.5 + # via -r requirements-test.in +six==1.16.0 + # via nose2 +toml==0.10.2 + # via pytest diff --git a/requirements.in b/requirements.in index 1ce6a65..b8e0f2e 100644 --- a/requirements.in +++ b/requirements.in @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -pymongo +pymongo<4 aiokafka -pyyaml +pyyaml==5.4.1 pycrypto dataclasses \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 2ca19f5..5d297e0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,15 +1,3 @@ -aiokafka==0.7.0 - # via -r requirements.in -dataclasses==0.6 - # via -r requirements.in -kafka-python==2.0.2 - # via aiokafka -pycrypto==2.6.1 - # via -r requirements.in -pymongo==3.11.3 - # via -r requirements.in -pyyaml==5.4.1 - # via -r requirements.in ####################################################################################### # Copyright ETSI Contributors and Others. # @@ -26,3 +14,15 @@ pyyaml==5.4.1 # See the License for the specific language governing permissions and # limitations under the License. ####################################################################################### +aiokafka==0.7.2 + # via -r requirements.in +dataclasses==0.6 + # via -r requirements.in +kafka-python==2.0.2 + # via aiokafka +pycrypto==2.6.1 + # via -r requirements.in +pymongo==3.12.3 + # via -r requirements.in +pyyaml==5.4.1 + # via -r requirements.in diff --git a/tox.ini b/tox.ini index 9ab9b46..ce0ffcb 100644 --- a/tox.ini +++ b/tox.ini @@ -23,7 +23,7 @@ toxworkdir = /tmp/.tox [testenv] usedevelop = True -basepython = python3 +basepython = python3.8 setenv = VIRTUAL_ENV={envdir} PYTHONDONTWRITEBYTECODE = 1 deps = -r{toxinidir}/requirements.txt @@ -76,13 +76,18 @@ commands = ####################################################################################### [testenv:pip-compile] -deps = pip-tools==5.5.0 +deps = pip-tools==6.4.0 +skip_install = true +whitelist_externals = bash + [ commands = - - sh -c 'for file in requirements*.in ; do pip-compile -rU --no-header $file ;\ - out=`echo $file | sed "s/.in/.txt/"` ; \ - head -16 tox.ini >> $out ;\ - done' -whitelist_externals = sh + - bash -c "for file in requirements*.in ; do \ + UNSAFE="" ; \ + if [[ $file =~ 'dist' ]] ; then UNSAFE='--allow-unsafe' ; fi ; \ + pip-compile -rU --no-header $UNSAFE $file ;\ + out=`echo $file | sed 's/.in/.txt/'` ; \ + sed -i -e '1 e head -16 tox.ini' $out ;\ + done" ####################################################################################### [testenv:dist] -- 2.25.1 From 945fa22703037593544f3ea0c93913d562cf1532 Mon Sep 17 00:00:00 2001 From: beierlm Date: Mon, 4 Apr 2022 11:44:03 -0400 Subject: [PATCH 08/16] Bug 1977: FSMongo Reverse Sync Directories represented by a symlink are not stored properly on reverse sync, resulting in empty directories being created instead of preserving the link Fixes Bug 1977 Change-Id: Ia6a3b3dfdfde498751a79e04b2824ac38d0b7feb Signed-off-by: beierlm --- osm_common/fsmongo.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/osm_common/fsmongo.py b/osm_common/fsmongo.py index b04057e..487eaf8 100644 --- a/osm_common/fsmongo.py +++ b/osm_common/fsmongo.py @@ -557,6 +557,8 @@ class FsMongo(FsBase): for root, dirs, files in os.walk(os_path): for folder in dirs: member = {"filename": os.path.join(root, folder), "type": "dir"} + if os.path.islink(member["filename"]): + member["type"] = "sym" members.append(member) for file in files: filename = os.path.join(root, file) -- 2.25.1 From 40f57c87816f6e3e99f495b4e6f8b5abe8cce7a9 Mon Sep 17 00:00:00 2001 From: beierlm Date: Thu, 14 Apr 2022 14:45:07 -0400 Subject: [PATCH 09/16] Bug 1977: Strip trailing / Ensure no files or directories are stored with a trailing / Ensure file/directory lookups happen without trailing / Change-Id: Id34438495170756883a4adeac3b6970e4f91b6b6 Signed-off-by: beierlm --- osm_common/fsmongo.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/osm_common/fsmongo.py b/osm_common/fsmongo.py index 487eaf8..a057e37 100644 --- a/osm_common/fsmongo.py +++ b/osm_common/fsmongo.py @@ -294,6 +294,7 @@ class FsMongo(FsBase): :param folder: :return: None or raises an exception """ + folder = folder.rstrip("/") try: self.fs.upload_from_stream(folder, BytesIO(), metadata={"type": "dir"}) except errors.FileExists: # make it idempotent @@ -308,6 +309,9 @@ class FsMongo(FsBase): :param dst: destination directory :return: None or raises and exception """ + dst = dst.rstrip("/") + src = src.rstrip("/") + try: dst_cursor = self.fs.find( {"filename": {"$regex": "^{}(/|$)".format(dst)}}, no_cursor_timeout=True @@ -333,6 +337,7 @@ class FsMongo(FsBase): :return: True, False """ f = storage if isinstance(storage, str) else "/".join(storage) + f = f.rstrip("/") cursor = self.fs.find({"filename": f}) @@ -344,7 +349,7 @@ class FsMongo(FsBase): "Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR ) - print(requested_file.metadata) + self.logger.debug("Entry {} metadata {}".format(f, requested_file.metadata)) # if no special mode is required just check it does exists if not mode: @@ -365,6 +370,7 @@ class FsMongo(FsBase): :return: file size """ f = storage if isinstance(storage, str) else "/".join(storage) + f = f.rstrip("/") cursor = self.fs.find({"filename": f}) @@ -386,6 +392,7 @@ class FsMongo(FsBase): :return: None """ f = path if isinstance(path, str) else "/".join(path) + f = f.rstrip("/") if type(compressed_object) is tarfile.TarFile: for member in compressed_object.getmembers(): @@ -404,7 +411,9 @@ class FsMongo(FsBase): file_type = "dir" metadata = {"type": file_type, "permissions": member.mode} + member.name = member.name.rstrip("/") + self.logger.debug("Uploading {}".format(member.name)) self.fs.upload_from_stream( f + "/" + member.name, stream, metadata=metadata ) @@ -423,9 +432,9 @@ class FsMongo(FsBase): file_type = "file" metadata = {"type": file_type} + member.filename = member.filename.rstrip("/") - print("Now uploading...") - print(f + "/" + member.filename) + self.logger.debug("Uploading {}".format(member.filename)) self.fs.upload_from_stream( f + "/" + member.filename, stream, metadata=metadata ) @@ -442,6 +451,7 @@ class FsMongo(FsBase): """ try: f = storage if isinstance(storage, str) else "/".join(storage) + f = f.rstrip("/") if "b" in mode: return GridByteStream(f, self.fs, mode) @@ -464,6 +474,7 @@ class FsMongo(FsBase): """ try: f = storage if isinstance(storage, str) else "/".join(storage) + f = f.rstrip("/") files = [] dir_cursor = self.fs.find({"filename": f}) @@ -506,6 +517,7 @@ class FsMongo(FsBase): """ try: f = storage if isinstance(storage, str) else "/".join(storage) + f = f.rstrip("/") file_cursor = self.fs.find({"filename": f}) found = False -- 2.25.1 From 3d82ba2cc4c3ebb340a88400aaa4a8d3683806a1 Mon Sep 17 00:00:00 2001 From: beierlm Date: Tue, 19 Apr 2022 14:12:50 -0400 Subject: [PATCH 10/16] Increase Logging, fix directory delete When deleting a directory, we need to do a rexep search of all files that start with the directory that we want to delete, but not revisions that happen to share the same UUID. Changes the regexp for directory delete to include the trailing '/' so it limits the scope. Adds logging where needed Change-Id: Ia60618c17a863417224fadd9c055be658fb4ba4a Signed-off-by: beierlm --- osm_common/fsmongo.py | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/osm_common/fsmongo.py b/osm_common/fsmongo.py index a057e37..d923842 100644 --- a/osm_common/fsmongo.py +++ b/osm_common/fsmongo.py @@ -210,6 +210,7 @@ class FsMongo(FsBase): for directory in dir_cursor: if from_path and not directory.filename.startswith(from_path): continue + self.logger.debug("Making dir {}".format(self.path + directory.filename)) os.makedirs(self.path + directory.filename, exist_ok=True) valid_paths.append(self.path + directory.filename) @@ -229,6 +230,7 @@ class FsMongo(FsBase): link = b.read().decode("utf-8") try: + self.logger.debug("Sync removing {}".format(file_path)) os.remove(file_path) except OSError as e: if e.errno != errno.ENOENT: @@ -238,8 +240,10 @@ class FsMongo(FsBase): else: folder = os.path.dirname(file_path) if folder not in valid_paths: + self.logger.debug("Sync local directory {}".format(file_path)) os.makedirs(folder, exist_ok=True) with open(file_path, "wb+") as file_stream: + self.logger.debug("Sync download {}".format(file_path)) self.fs.download_to_stream(writing_file._id, file_stream) if "permissions" in writing_file.metadata: os.chmod(file_path, writing_file.metadata["permissions"]) @@ -413,7 +417,7 @@ class FsMongo(FsBase): metadata = {"type": file_type, "permissions": member.mode} member.name = member.name.rstrip("/") - self.logger.debug("Uploading {}".format(member.name)) + self.logger.debug("Uploading {}/{}".format(f, member.name)) self.fs.upload_from_stream( f + "/" + member.name, stream, metadata=metadata ) @@ -434,7 +438,7 @@ class FsMongo(FsBase): metadata = {"type": file_type} member.filename = member.filename.rstrip("/") - self.logger.debug("Uploading {}".format(member.filename)) + self.logger.debug("Uploading {}/{}".format(f, member.filename)) self.fs.upload_from_stream( f + "/" + member.filename, stream, metadata=metadata ) @@ -526,18 +530,27 @@ class FsMongo(FsBase): exception_file = next(file_cursor, None) if exception_file: + self.logger.error( + "Cannot delete duplicate file: {} and {}".format( + requested_file.filename, exception_file.filename + ) + ) raise FsException( "Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR, ) if requested_file.metadata["type"] == "dir": - dir_cursor = self.fs.find({"filename": {"$regex": "^{}".format(f)}}) + dir_cursor = self.fs.find( + {"filename": {"$regex": "^{}/".format(f)}} + ) for tmp in dir_cursor: + self.logger.debug("Deleting {}".format(tmp.filename)) self.fs.delete(tmp._id) - else: - self.fs.delete(requested_file._id) + + self.logger.debug("Deleting {}".format(requested_file.filename)) + self.fs.delete(requested_file._id) if not found and not ignore_non_exist: raise FsException( "File {} does not exist".format(storage), @@ -623,11 +636,13 @@ class FsMongo(FsBase): metadata = {"type": file_type, "permissions": mask} + self.logger.debug("Sync upload {}".format(rel_filename)) self.fs.upload_from_stream(rel_filename, stream, metadata=metadata) # delete old files if remote_file: for file in remote_file: + self.logger.debug("Sync deleting {}".format(file.filename)) self.fs.delete(file._id) finally: if fh: -- 2.25.1 From 3dd0db6efaab109fa1bd43395fbbddecf1eb73d4 Mon Sep 17 00:00:00 2001 From: aticig Date: Fri, 4 Mar 2022 19:35:45 +0300 Subject: [PATCH 11/16] Adding release notes and enabling import order check Enabling Flake8 library import order check, adding release_notes, enabling stage-test failing by tox results. Change-Id: I490b4cb3fe5904f54d84f0ce9b41fe91ec8b4f5d Signed-off-by: aticig --- devops-stages/stage-test.sh | 18 ++++++++++++--- osm_common/dbbase.py | 11 +++++----- osm_common/dbmemory.py | 10 +++++---- osm_common/dbmongo.py | 13 ++++++----- osm_common/fsbase.py | 6 +++-- osm_common/fslocal.py | 7 +++--- osm_common/fsmongo.py | 5 ++--- osm_common/msgbase.py | 5 +++-- osm_common/msgkafka.py | 5 +++-- osm_common/msglocal.py | 10 ++++----- osm_common/sol004_package.py | 4 +++- osm_common/sol007_package.py | 4 +++- osm_common/sol_package.py | 4 ++-- .../Scripts/charms/simple/src/charm.py | 2 +- .../Scripts/charms/simple/src/charm.py | 2 +- .../Scripts/charms/simple/src/charm.py | 2 +- osm_common/tests/test_dbbase.py | 11 ++++------ osm_common/tests/test_dbmemory.py | 8 +++---- osm_common/tests/test_fsbase.py | 2 +- osm_common/tests/test_fslocal.py | 6 ++--- osm_common/tests/test_fsmongo.py | 17 ++++++-------- osm_common/tests/test_msgbase.py | 2 +- osm_common/tests/test_msglocal.py | 14 ++++++------ ...leanup_release_notes-2eaaface039a3179.yaml | 22 +++++++++++++++++++ setup.py | 1 + tox.ini | 14 +++++++++++- 26 files changed, 127 insertions(+), 78 deletions(-) create mode 100644 releasenotes/notes/cleanup_release_notes-2eaaface039a3179.yaml diff --git a/devops-stages/stage-test.sh b/devops-stages/stage-test.sh index e5a8e85..8c684dd 100755 --- a/devops-stages/stage-test.sh +++ b/devops-stages/stage-test.sh @@ -12,6 +12,18 @@ # implied. # See the License for the specific language governing permissions and # limitations under the License. - -OUTPUT=$(TOX_PARALLEL_NO_SPINNER=1 tox --parallel=auto) -printf "$OUTPUT" +set -e +echo Launching tox +tox --parallel=auto +echo "Checking the presence of release notes ..." +nb_rn=$(git diff --diff-filter=A --name-only HEAD~1 |grep "releasenotes\/notes" |wc -l) +if [ "${nb_rn}" -lt 1 ]; then + echo "The commit needs release notes" + echo "Run the following command to generate release notes: tox -e release_notes ''" + echo "Please read README.md for more details" + exit 1 +elif [ "${nb_rn}" -gt 1 ]; then + echo "Only one release notes file should be added in a commit" + exit 1 +fi +echo "OK. Release notes present in commit" diff --git a/osm_common/dbbase.py b/osm_common/dbbase.py index 74378d0..4021805 100644 --- a/osm_common/dbbase.py +++ b/osm_common/dbbase.py @@ -15,15 +15,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -import yaml +from base64 import b64decode, b64encode +from copy import deepcopy +from http import HTTPStatus import logging import re -from http import HTTPStatus -from copy import deepcopy +from threading import Lock + from Crypto.Cipher import AES -from base64 import b64decode, b64encode from osm_common.common_utils import FakeLock -from threading import Lock +import yaml __author__ = "Alfonso Tierno " diff --git a/osm_common/dbmemory.py b/osm_common/dbmemory.py index d089575..9f8c571 100644 --- a/osm_common/dbmemory.py +++ b/osm_common/dbmemory.py @@ -15,12 +15,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging -from osm_common.dbbase import DbException, DbBase -from osm_common.dbmongo import deep_update +from copy import deepcopy from http import HTTPStatus +import logging from uuid import uuid4 -from copy import deepcopy + +from osm_common.dbbase import DbBase, DbException +from osm_common.dbmongo import deep_update + __author__ = "Alfonso Tierno " diff --git a/osm_common/dbmongo.py b/osm_common/dbmongo.py index d8b373a..8561e96 100644 --- a/osm_common/dbmongo.py +++ b/osm_common/dbmongo.py @@ -16,15 +16,16 @@ # limitations under the License. -import logging -from pymongo import MongoClient, errors -from osm_common.dbbase import DbException, DbBase -from http import HTTPStatus -from time import time, sleep -from copy import deepcopy from base64 import b64decode +from copy import deepcopy +from http import HTTPStatus +import logging +from time import sleep, time from uuid import uuid4 +from osm_common.dbbase import DbBase, DbException +from pymongo import errors, MongoClient + __author__ = "Alfonso Tierno " # TODO consider use this decorator for database access retries diff --git a/osm_common/fsbase.py b/osm_common/fsbase.py index 219dbe1..a03fb32 100644 --- a/osm_common/fsbase.py +++ b/osm_common/fsbase.py @@ -16,11 +16,13 @@ # limitations under the License. -import logging from http import HTTPStatus -from osm_common.common_utils import FakeLock +import logging from threading import Lock +from osm_common.common_utils import FakeLock + + __author__ = "Alfonso Tierno " diff --git a/osm_common/fslocal.py b/osm_common/fslocal.py index ae22c6a..5bae0e9 100644 --- a/osm_common/fslocal.py +++ b/osm_common/fslocal.py @@ -15,14 +15,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +from http import HTTPStatus +import logging import os +from shutil import rmtree import tarfile import zipfile -import logging -# import tarfile -from http import HTTPStatus -from shutil import rmtree from osm_common.fsbase import FsBase, FsException __author__ = "Alfonso Tierno " diff --git a/osm_common/fsmongo.py b/osm_common/fsmongo.py index d923842..1ce5909 100644 --- a/osm_common/fsmongo.py +++ b/osm_common/fsmongo.py @@ -15,17 +15,16 @@ # For those usages not covered by the Apache License, Version 2.0 please # contact: eduardo.sousa@canonical.com ## - +import datetime import errno from http import HTTPStatus from io import BytesIO, StringIO import logging import os -import datetime import tarfile import zipfile -from gridfs import GridFSBucket, errors +from gridfs import errors, GridFSBucket from osm_common.fsbase import FsBase, FsException from pymongo import MongoClient diff --git a/osm_common/msgbase.py b/osm_common/msgbase.py index 49adcb8..80c5be5 100644 --- a/osm_common/msgbase.py +++ b/osm_common/msgbase.py @@ -15,11 +15,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging from http import HTTPStatus -from osm_common.common_utils import FakeLock +import logging from threading import Lock +from osm_common.common_utils import FakeLock + __author__ = "Alfonso Tierno " diff --git a/osm_common/msgkafka.py b/osm_common/msgkafka.py index 5caa5b1..5487093 100644 --- a/osm_common/msgkafka.py +++ b/osm_common/msgkafka.py @@ -13,13 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging import asyncio -import yaml +import logging + from aiokafka import AIOKafkaConsumer from aiokafka import AIOKafkaProducer from aiokafka.errors import KafkaError from osm_common.msgbase import MsgBase, MsgException +import yaml __author__ = ( "Alfonso Tierno , " diff --git a/osm_common/msglocal.py b/osm_common/msglocal.py index c10ff17..2f90307 100644 --- a/osm_common/msglocal.py +++ b/osm_common/msglocal.py @@ -15,16 +15,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +import asyncio +from http import HTTPStatus import logging import os -import yaml -import asyncio -from osm_common.msgbase import MsgBase, MsgException from time import sleep -from http import HTTPStatus -__author__ = "Alfonso Tierno " +from osm_common.msgbase import MsgBase, MsgException +import yaml +__author__ = "Alfonso Tierno " """ This emulated kafka bus by just using a shared file system. Useful for testing or devops. One file is used per topic. Only one producer and one consumer is allowed per topic. Both consumer and producer diff --git a/osm_common/sol004_package.py b/osm_common/sol004_package.py index 813e52d..8a3cb95 100644 --- a/osm_common/sol004_package.py +++ b/osm_common/sol004_package.py @@ -55,9 +55,11 @@ native_charm_vnf/ native_charm_vnf/ └── charm.py """ -import yaml import datetime import os + +import yaml + from .sol_package import SOLPackage diff --git a/osm_common/sol007_package.py b/osm_common/sol007_package.py index 4add8a9..ca14ce8 100644 --- a/osm_common/sol007_package.py +++ b/osm_common/sol007_package.py @@ -55,9 +55,11 @@ native_charm_vnf/ native_charm_vnf/ └── charm.py """ -import yaml import datetime import os + +import yaml + from .sol_package import SOLPackage diff --git a/osm_common/sol_package.py b/osm_common/sol_package.py index e336cd5..8af52b9 100644 --- a/osm_common/sol_package.py +++ b/osm_common/sol_package.py @@ -21,10 +21,10 @@ # For those usages not covered by the Apache License, Version 2.0 please # contact: fbravo@whitestack.com or agarcia@whitestack.com ## - +import hashlib import os + import yaml -import hashlib class SOLPackageException(Exception): diff --git a/osm_common/tests/packages/invalid_package_vnf/Scripts/charms/simple/src/charm.py b/osm_common/tests/packages/invalid_package_vnf/Scripts/charms/simple/src/charm.py index 54cefc4..8607b1f 100755 --- a/osm_common/tests/packages/invalid_package_vnf/Scripts/charms/simple/src/charm.py +++ b/osm_common/tests/packages/invalid_package_vnf/Scripts/charms/simple/src/charm.py @@ -16,8 +16,8 @@ # under the License. ## -import sys import subprocess +import sys from ops.charm import CharmBase from ops.main import main diff --git a/osm_common/tests/packages/native_charm_with_metadata_dir_vnf/Scripts/charms/simple/src/charm.py b/osm_common/tests/packages/native_charm_with_metadata_dir_vnf/Scripts/charms/simple/src/charm.py index 54cefc4..8607b1f 100755 --- a/osm_common/tests/packages/native_charm_with_metadata_dir_vnf/Scripts/charms/simple/src/charm.py +++ b/osm_common/tests/packages/native_charm_with_metadata_dir_vnf/Scripts/charms/simple/src/charm.py @@ -16,8 +16,8 @@ # under the License. ## -import sys import subprocess +import sys from ops.charm import CharmBase from ops.main import main diff --git a/osm_common/tests/packages/native_charm_without_metadata_dir_vnf/Scripts/charms/simple/src/charm.py b/osm_common/tests/packages/native_charm_without_metadata_dir_vnf/Scripts/charms/simple/src/charm.py index 54cefc4..8607b1f 100755 --- a/osm_common/tests/packages/native_charm_without_metadata_dir_vnf/Scripts/charms/simple/src/charm.py +++ b/osm_common/tests/packages/native_charm_without_metadata_dir_vnf/Scripts/charms/simple/src/charm.py @@ -16,8 +16,8 @@ # under the License. ## -import sys import subprocess +import sys from ops.charm import CharmBase from ops.main import main diff --git a/osm_common/tests/test_dbbase.py b/osm_common/tests/test_dbbase.py index 117350e..eabf5e0 100644 --- a/osm_common/tests/test_dbbase.py +++ b/osm_common/tests/test_dbbase.py @@ -18,11 +18,12 @@ ## import http -import pytest +from http import HTTPStatus +from os import urandom import unittest + from osm_common.dbbase import DbBase, DbException, deep_update -from os import urandom -from http import HTTPStatus +import pytest def exception_message(message): @@ -124,10 +125,6 @@ class TestEncryption(unittest.TestCase): TEST = ( ("plain text 1 ! ", None), ("plain text 2 with salt ! ", "1afd5d1a-4a7e-4d9c-8c65-251290183106"), - ("plain text 3 with usalt ! ", u"1afd5d1a-4a7e-4d9c-8c65-251290183106"), - (u"plain unicode 4 ! ", None), - (u"plain unicode 5 with salt ! ", "1a000d1a-4a7e-4d9c-8c65-251290183106"), - (u"plain unicode 6 with usalt ! ", u"1abcdd1a-4a7e-4d9c-8c65-251290183106"), ) for db_base in self.db_bases: for value, salt in TEST: diff --git a/osm_common/tests/test_dbmemory.py b/osm_common/tests/test_dbmemory.py index 3c45527..9a0e749 100644 --- a/osm_common/tests/test_dbmemory.py +++ b/osm_common/tests/test_dbmemory.py @@ -16,17 +16,15 @@ # For those usages not covered by the Apache License, Version 2.0 please # contact: esousa@whitestack.com or alfonso.tiernosepulveda@telefonica.com ## - +from copy import deepcopy import http import logging -import pytest import unittest -from unittest.mock import Mock +from unittest.mock import MagicMock, Mock -from unittest.mock import MagicMock from osm_common.dbbase import DbException from osm_common.dbmemory import DbMemory -from copy import deepcopy +import pytest __author__ = "Eduardo Sousa " diff --git a/osm_common/tests/test_fsbase.py b/osm_common/tests/test_fsbase.py index 43349ad..cb5c560 100644 --- a/osm_common/tests/test_fsbase.py +++ b/osm_common/tests/test_fsbase.py @@ -18,9 +18,9 @@ ## import http -import pytest from osm_common.fsbase import FsBase, FsException +import pytest def exception_message(message): diff --git a/osm_common/tests/test_fslocal.py b/osm_common/tests/test_fslocal.py index 6f52984..6336211 100644 --- a/osm_common/tests/test_fslocal.py +++ b/osm_common/tests/test_fslocal.py @@ -17,18 +17,18 @@ # contact: esousa@whitestack.com or alfonso.tiernosepulveda@telefonica.com ## +import http import io import logging -import http import os -import pytest +import shutil import tarfile import tempfile import uuid -import shutil from osm_common.fsbase import FsException from osm_common.fslocal import FsLocal +import pytest __author__ = "Eduardo Sousa " diff --git a/osm_common/tests/test_fsmongo.py b/osm_common/tests/test_fsmongo.py index da01ff3..01a8ef2 100644 --- a/osm_common/tests/test_fsmongo.py +++ b/osm_common/tests/test_fsmongo.py @@ -16,23 +16,20 @@ # contact: eduardo.sousa@canonical.com ## +from io import BytesIO import logging -import pytest -import tempfile -import tarfile import os +from pathlib import Path import subprocess - -from pymongo import MongoClient -from gridfs import GridFSBucket - -from io import BytesIO - +import tarfile +import tempfile from unittest.mock import Mock +from gridfs import GridFSBucket from osm_common.fsbase import FsException from osm_common.fsmongo import FsMongo -from pathlib import Path +from pymongo import MongoClient +import pytest __author__ = "Eduardo Sousa " diff --git a/osm_common/tests/test_msgbase.py b/osm_common/tests/test_msgbase.py index 665968e..d5092b1 100644 --- a/osm_common/tests/test_msgbase.py +++ b/osm_common/tests/test_msgbase.py @@ -18,9 +18,9 @@ ## import http -import pytest from osm_common.msgbase import MsgBase, MsgException +import pytest def exception_message(message): diff --git a/osm_common/tests/test_msglocal.py b/osm_common/tests/test_msglocal.py index 9548885..fb74586 100644 --- a/osm_common/tests/test_msglocal.py +++ b/osm_common/tests/test_msglocal.py @@ -19,18 +19,18 @@ import http import logging -import pytest -import tempfile -import shutil -import uuid import os -import yaml -import time +import shutil +import tempfile import threading - +import time from unittest.mock import MagicMock +import uuid + from osm_common.msgbase import MsgException from osm_common.msglocal import MsgLocal +import pytest +import yaml __author__ = "Eduardo Sousa " diff --git a/releasenotes/notes/cleanup_release_notes-2eaaface039a3179.yaml b/releasenotes/notes/cleanup_release_notes-2eaaface039a3179.yaml new file mode 100644 index 0000000..4163b1f --- /dev/null +++ b/releasenotes/notes/cleanup_release_notes-2eaaface039a3179.yaml @@ -0,0 +1,22 @@ +####################################################################################### +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +####################################################################################### +--- +other: + - | + Flake8 import order check is added in tox.ini, library import order is arranged in all files. + Release note creation is added in tox.ini, stage-test failing by tox results is enabled, release note + checking script is added. \ No newline at end of file diff --git a/setup.py b/setup.py index cb19584..390ca7d 100644 --- a/setup.py +++ b/setup.py @@ -18,6 +18,7 @@ import os + from setuptools import setup here = os.path.abspath(os.path.dirname(__file__)) diff --git a/tox.ini b/tox.ini index ce0ffcb..e763ca2 100644 --- a/tox.ini +++ b/tox.ini @@ -52,7 +52,9 @@ whitelist_externals = sh ####################################################################################### [testenv:flake8] -deps = flake8 +deps = + flake8 + flake8-import-order commands = flake8 osm_common/ setup.py @@ -103,6 +105,15 @@ commands = sh -c 'rm osm_common/requirements.txt' whitelist_externals = sh +####################################################################################### +[testenv:release_notes] +deps = reno +skip_install = true +whitelist_externals = bash +commands = + reno new {posargs:new_feature} + bash -c "sed -i -e '1 e head -16 tox.ini' releasenotes/notes/{posargs:new_feature}*.yaml" + ####################################################################################### [flake8] ignore = @@ -121,3 +132,4 @@ exclude = max-line-length = 120 show-source = True builtins = _ +import-order-style = google -- 2.25.1 From 7da9795a4b73c72e81ac4880a9e9507e441aa90f Mon Sep 17 00:00:00 2001 From: aticig Date: Thu, 31 Mar 2022 23:07:21 +0300 Subject: [PATCH 12/16] Code Cleanup and adding unit tests Cleaning code to allow only URI type MongoClient connection, adding unit tests Change-Id: Iacca44c00006a1072ff70989b0220e2b27bc5fd8 Signed-off-by: aticig --- osm_common/dbmongo.py | 7 - osm_common/fsmongo.py | 5 +- osm_common/tests/test_dbmongo.py | 501 ++++++++++++++++++ osm_common/tests/test_fsmongo.py | 162 +----- .../notes/code_cleanup-ee340441905782bf.yaml | 21 + 5 files changed, 530 insertions(+), 166 deletions(-) create mode 100644 osm_common/tests/test_dbmongo.py create mode 100644 releasenotes/notes/code_cleanup-ee340441905782bf.yaml diff --git a/osm_common/dbmongo.py b/osm_common/dbmongo.py index 8561e96..f64949d 100644 --- a/osm_common/dbmongo.py +++ b/osm_common/dbmongo.py @@ -105,13 +105,6 @@ class DbMongo(DbBase): self.client = MongoClient( config["uri"], replicaSet=config.get("replicaset", None) ) - else: - self.client = MongoClient( - config["host"], - config["port"], - replicaSet=config.get("replicaset", None), - ) - # TODO add as parameters also username=config.get("user"), password=config.get("password")) # when all modules are ready self.db = self.client[config["name"]] if "loglevel" in config: diff --git a/osm_common/fsmongo.py b/osm_common/fsmongo.py index 1ce5909..727410e 100644 --- a/osm_common/fsmongo.py +++ b/osm_common/fsmongo.py @@ -275,14 +275,11 @@ class FsMongo(FsBase): if all(key in config.keys() for key in ["uri", "collection"]): self.client = MongoClient(config["uri"]) self.fs = GridFSBucket(self.client[config["collection"]]) - elif all(key in config.keys() for key in ["host", "port", "collection"]): - self.client = MongoClient(config["host"], config["port"]) - self.fs = GridFSBucket(self.client[config["collection"]]) else: if "collection" not in config.keys(): raise FsException('Missing parameter "collection"') else: - raise FsException('Missing parameters: "uri" or "host" + "port"') + raise FsException('Missing parameters: "uri"') except FsException: raise except Exception as e: # TODO refine diff --git a/osm_common/tests/test_dbmongo.py b/osm_common/tests/test_dbmongo.py new file mode 100644 index 0000000..52c9c55 --- /dev/null +++ b/osm_common/tests/test_dbmongo.py @@ -0,0 +1,501 @@ +####################################################################################### +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +####################################################################################### + +import logging +from urllib.parse import quote + +from osm_common.dbbase import DbException, FakeLock +from osm_common.dbmongo import DbMongo +from pymongo import MongoClient +import pytest + + +def db_status_exception_message(): + return "database exception Wrong database status" + + +def db_version_exception_message(): + return "database exception Invalid database version" + + +def mock_get_one_status_not_enabled(a, b, c, fail_on_empty=False, fail_on_more=True): + return {"status": "ERROR", "version": "", "serial": ""} + + +def mock_get_one_wrong_db_version(a, b, c, fail_on_empty=False, fail_on_more=True): + return {"status": "ENABLED", "version": "4.0", "serial": "MDY4OA=="} + + +def db_generic_exception(exception): + return exception + + +def db_generic_exception_message(message): + return f"database exception {message}" + + +def test_constructor(): + db = DbMongo(lock=True) + assert db.logger == logging.getLogger("db") + assert db.db is None + assert db.client is None + assert db.database_key is None + assert db.secret_obtained is False + assert db.lock.acquire() is True + + +def test_constructor_with_logger(): + logger_name = "db_mongo" + db = DbMongo(logger_name=logger_name, lock=False) + assert db.logger == logging.getLogger(logger_name) + assert db.db is None + assert db.client is None + assert db.database_key is None + assert db.secret_obtained is False + assert type(db.lock) == FakeLock + + +@pytest.mark.parametrize( + "config, target_version, serial, lock", + [ + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": "mongo:27017", + "replicaset": "rs0", + "name": "osmdb", + "loglevel": "CRITICAL", + }, + "5.0", + "MDY=", + True, + ), + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "masterpassword": "master", + "uri": "mongo:27017", + "replicaset": "rs0", + "name": "osmdb", + "loglevel": "CRITICAL", + }, + "5.0", + "MDY=", + False, + ), + ( + { + "logger_name": "logger", + "uri": "mongo:27017", + "name": "newdb", + "commonkey": "common", + }, + "3.6", + "", + True, + ), + ( + { + "uri": "mongo:27017", + "commonkey": "common", + "name": "newdb", + }, + "5.0", + "MDIy", + False, + ), + ( + { + "uri": "mongo:27017", + "masterpassword": "common", + "name": "newdb", + "loglevel": "CRITICAL", + }, + "4.4", + "OTA=", + False, + ), + ( + { + "uri": "mongo", + "masterpassword": "common", + "name": "osmdb", + "loglevel": "CRITICAL", + }, + "4.4", + "OTA=", + True, + ), + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": quote("user4:password4@mongo"), + "replicaset": "rs0", + "name": "osmdb", + "loglevel": "CRITICAL", + }, + "5.0", + "NTM=", + True, + ), + ( + { + "logger_name": "logger", + "uri": quote("user3:password3@mongo:27017"), + "name": "newdb", + "commonkey": "common", + }, + "4.0", + "NjEx", + False, + ), + ( + { + "uri": quote("user2:password2@mongo:27017"), + "commonkey": "common", + "name": "newdb", + }, + "5.0", + "cmV0MzI=", + False, + ), + ( + { + "uri": quote("user1:password1@mongo:27017"), + "commonkey": "common", + "masterpassword": "master", + "name": "newdb", + "loglevel": "CRITICAL", + }, + "4.0", + "MjMyNQ==", + False, + ), + ( + { + "uri": quote("user1:password1@mongo"), + "masterpassword": "common", + "name": "newdb", + "loglevel": "CRITICAL", + }, + "4.0", + "MjMyNQ==", + True, + ), + ], +) +def test_db_connection_with_valid_config( + config, target_version, serial, lock, monkeypatch +): + def mock_get_one(a, b, c, fail_on_empty=False, fail_on_more=True): + return {"status": "ENABLED", "version": target_version, "serial": serial} + + monkeypatch.setattr(DbMongo, "get_one", mock_get_one) + db = DbMongo(lock=lock) + db.db_connect(config, target_version) + assert ( + db.logger == logging.getLogger(config.get("logger_name")) + if config.get("logger_name") + else logging.getLogger("db") + ) + assert type(db.client) == MongoClient + assert db.database_key == "common" + assert db.logger.getEffectiveLevel() == 50 if config.get("loglevel") else 20 + + +@pytest.mark.parametrize( + "config, target_version, version_data, expected_exception_message", + [ + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": "mongo:27017", + "replicaset": "rs2", + "name": "osmdb", + "loglevel": "CRITICAL", + }, + "4.0", + mock_get_one_status_not_enabled, + db_status_exception_message(), + ), + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": "mongo:27017", + "replicaset": "rs4", + "name": "osmdb", + "loglevel": "CRITICAL", + }, + "5.0", + mock_get_one_wrong_db_version, + db_version_exception_message(), + ), + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": quote("user2:pa@word2@mongo:27017"), + "replicaset": "rs0", + "name": "osmdb", + "loglevel": "DEBUG", + }, + "4.0", + mock_get_one_status_not_enabled, + db_status_exception_message(), + ), + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": quote("username:pass1rd@mongo:27017"), + "replicaset": "rs0", + "name": "osmdb", + "loglevel": "DEBUG", + }, + "5.0", + mock_get_one_wrong_db_version, + db_version_exception_message(), + ), + ], +) +def test_db_connection_db_status_error( + config, target_version, version_data, expected_exception_message, monkeypatch +): + monkeypatch.setattr(DbMongo, "get_one", version_data) + db = DbMongo(lock=False) + with pytest.raises(DbException) as exception_info: + db.db_connect(config, target_version) + assert str(exception_info.value).startswith(expected_exception_message) + + +@pytest.mark.parametrize( + "config, target_version, lock, expected_exception", + [ + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": "27017@/:", + "replicaset": "rs0", + "name": "osmdb", + "loglevel": "CRITICAL", + }, + "4.0", + True, + db_generic_exception(DbException), + ), + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": "user@pass", + "replicaset": "rs0", + "name": "osmdb", + "loglevel": "CRITICAL", + }, + "4.0", + False, + db_generic_exception(DbException), + ), + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": "user@pass:27017", + "replicaset": "rs0", + "name": "osmdb", + "loglevel": "CRITICAL", + }, + "4.0", + True, + db_generic_exception(DbException), + ), + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": "", + "replicaset": "rs0", + "name": "osmdb", + "loglevel": "CRITICAL", + }, + "5.0", + False, + db_generic_exception(TypeError), + ), + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": "user2::@mon:27017", + "replicaset": "rs0", + "name": "osmdb", + "loglevel": "DEBUG", + }, + "4.0", + True, + db_generic_exception(ValueError), + ), + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "replicaset": 33, + "uri": "user2@@mongo:27017", + "name": "osmdb", + "loglevel": "DEBUG", + }, + "5.0", + False, + db_generic_exception(TypeError), + ), + ], +) +def test_db_connection_with_invalid_uri( + config, target_version, lock, expected_exception, monkeypatch +): + def mock_get_one(a, b, c, fail_on_empty=False, fail_on_more=True): + pass + + monkeypatch.setattr(DbMongo, "get_one", mock_get_one) + db = DbMongo(lock=lock) + with pytest.raises(expected_exception) as exception_info: + db.db_connect(config, target_version) + assert type(exception_info.value) == expected_exception + + +@pytest.mark.parametrize( + "config, target_version, expected_exception", + [ + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "replicaset": "rs0", + "name": "osmdb", + "loglevel": "CRITICAL", + }, + "", + db_generic_exception(TypeError), + ), + ( + { + "logger_name": "mongo_logger", + "uri": "mongo:27017", + "replicaset": "rs0", + "loglevel": "CRITICAL", + }, + "4.0", + db_generic_exception(KeyError), + ), + ( + { + "replicaset": "rs0", + "loglevel": "CRITICAL", + }, + None, + db_generic_exception(KeyError), + ), + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": "", + "replicaset": "rs0", + "name": "osmdb", + "loglevel": "CRITICAL", + }, + "5.0", + db_generic_exception(TypeError), + ), + ( + { + "logger_name": "mongo_logger", + "name": "osmdb", + }, + "4.0", + db_generic_exception(TypeError), + ), + ( + { + "logger_name": "logger", + "replicaset": "", + "uri": "user2@@mongo:27017", + }, + "5.0", + db_generic_exception(KeyError), + ), + ], +) +def test_db_connection_with_missing_parameters( + config, target_version, expected_exception, monkeypatch +): + def mock_get_one(a, b, c, fail_on_empty=False, fail_on_more=True): + return + + monkeypatch.setattr(DbMongo, "get_one", mock_get_one) + db = DbMongo(lock=False) + with pytest.raises(expected_exception) as exception_info: + db.db_connect(config, target_version) + assert type(exception_info.value) == expected_exception + + +@pytest.mark.parametrize( + "config, expected_exception_message", + [ + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": "mongo:27017", + "replicaset": "rs0", + "name": "osmdb1", + "loglevel": "CRITICAL", + }, + "MongoClient crashed", + ), + ( + { + "logger_name": "mongo_logger", + "commonkey": "common", + "uri": "username:pas1ed@mongo:27017", + "replicaset": "rs1", + "name": "osmdb2", + "loglevel": "DEBUG", + }, + "MongoClient crashed", + ), + ], +) +def test_db_connection_with_invalid_mongoclient( + config, expected_exception_message, monkeypatch +): + def generate_exception(a, b, replicaSet=None): + raise DbException(expected_exception_message) + + monkeypatch.setattr(MongoClient, "__init__", generate_exception) + db = DbMongo() + with pytest.raises(DbException) as exception_info: + db.db_connect(config) + assert str(exception_info.value) == db_generic_exception_message( + expected_exception_message + ) diff --git a/osm_common/tests/test_fsmongo.py b/osm_common/tests/test_fsmongo.py index 01a8ef2..7e1e47c 100644 --- a/osm_common/tests/test_fsmongo.py +++ b/osm_common/tests/test_fsmongo.py @@ -44,7 +44,7 @@ def invalid_path(): @pytest.fixture(scope="function", params=[True, False]) def fs_mongo(request, monkeypatch): - def mock_mongoclient_constructor(a, b, c): + def mock_mongoclient_constructor(a, b): pass def mock_mongoclient_getitem(a, b): @@ -57,9 +57,7 @@ def fs_mongo(request, monkeypatch): monkeypatch.setattr(MongoClient, "__getitem__", mock_mongoclient_getitem) monkeypatch.setattr(GridFSBucket, "__init__", mock_gridfs_constructor) fs = FsMongo(lock=request.param) - fs.fs_connect( - {"path": valid_path(), "host": "mongo", "port": 27017, "collection": "files"} - ) + fs.fs_connect({"path": valid_path(), "uri": "mongo:27017", "collection": "files"}) return fs @@ -140,17 +138,6 @@ def test_get_params(fs_mongo, monkeypatch): "fs_mongo", valid_path(), ), - ( - { - "logger_name": "fs_mongo", - "path": valid_path(), - "host": "mongo", - "port": 27017, - "collection": "files", - }, - "fs_mongo", - valid_path(), - ), ( { "logger_name": "fs_mongo", @@ -161,47 +148,16 @@ def test_get_params(fs_mongo, monkeypatch): "fs_mongo", valid_path(), ), - ( - { - "logger_name": "fs_mongo", - "path": valid_path()[:-1], - "host": "mongo", - "port": 27017, - "collection": "files", - }, - "fs_mongo", - valid_path(), - ), ( {"path": valid_path(), "uri": "mongo:27017", "collection": "files"}, "fs", valid_path(), ), - ( - { - "path": valid_path(), - "host": "mongo", - "port": 27017, - "collection": "files", - }, - "fs", - valid_path(), - ), ( {"path": valid_path()[:-1], "uri": "mongo:27017", "collection": "files"}, "fs", valid_path(), ), - ( - { - "path": valid_path()[:-1], - "host": "mongo", - "port": 27017, - "collection": "files", - }, - "fs", - valid_path(), - ), ], ) def test_fs_connect_with_valid_config(config, exp_logger, exp_path): @@ -225,16 +181,6 @@ def test_fs_connect_with_valid_config(config, exp_logger, exp_path): }, fs_connect_exception_message(invalid_path()), ), - ( - { - "logger_name": "fs_mongo", - "path": invalid_path(), - "host": "mongo", - "port": 27017, - "collection": "files", - }, - fs_connect_exception_message(invalid_path()), - ), ( { "logger_name": "fs_mongo", @@ -244,44 +190,16 @@ def test_fs_connect_with_valid_config(config, exp_logger, exp_path): }, fs_connect_exception_message(invalid_path()[:-1]), ), - ( - { - "logger_name": "fs_mongo", - "path": invalid_path()[:-1], - "host": "mongo", - "port": 27017, - "collection": "files", - }, - fs_connect_exception_message(invalid_path()[:-1]), - ), ( {"path": invalid_path(), "uri": "mongo:27017", "collection": "files"}, fs_connect_exception_message(invalid_path()), ), - ( - { - "path": invalid_path(), - "host": "mongo", - "port": 27017, - "collection": "files", - }, - fs_connect_exception_message(invalid_path()), - ), ( {"path": invalid_path()[:-1], "uri": "mongo:27017", "collection": "files"}, fs_connect_exception_message(invalid_path()[:-1]), ), ( - { - "path": invalid_path()[:-1], - "host": "mongo", - "port": 27017, - "collection": "files", - }, - fs_connect_exception_message(invalid_path()[:-1]), - ), - ( - {"path": "/", "host": "mongo", "port": 27017, "collection": "files"}, + {"path": "/", "uri": "mongo:27017", "collection": "files"}, generic_fs_exception_message( "Invalid configuration param at '[storage]': path '/' is not writable" ), @@ -302,50 +220,14 @@ def test_fs_connect_with_invalid_path(config, exp_exception_message): {"logger_name": "fs_mongo", "uri": "mongo:27017", "collection": "files"}, 'Missing parameter "path"', ), - ( - { - "logger_name": "fs_mongo", - "host": "mongo", - "port": 27017, - "collection": "files", - }, - 'Missing parameter "path"', - ), ( {"logger_name": "fs_mongo", "path": valid_path(), "collection": "files"}, - 'Missing parameters: "uri" or "host" + "port"', - ), - ( - { - "logger_name": "fs_mongo", - "path": valid_path(), - "port": 27017, - "collection": "files", - }, - 'Missing parameters: "uri" or "host" + "port"', - ), - ( - { - "logger_name": "fs_mongo", - "path": valid_path(), - "host": "mongo", - "collection": "files", - }, - 'Missing parameters: "uri" or "host" + "port"', + 'Missing parameters: "uri"', ), ( {"logger_name": "fs_mongo", "path": valid_path(), "uri": "mongo:27017"}, 'Missing parameter "collection"', ), - ( - { - "logger_name": "fs_mongo", - "path": valid_path(), - "host": "mongo", - "port": 27017, - }, - 'Missing parameter "collection"', - ), ], ) def test_fs_connect_with_missing_parameters(config, exp_exception_message): @@ -367,22 +249,12 @@ def test_fs_connect_with_missing_parameters(config, exp_exception_message): }, "MongoClient crashed", ), - ( - { - "logger_name": "fs_mongo", - "path": valid_path(), - "host": "mongo", - "port": 27017, - "collection": "files", - }, - "MongoClient crashed", - ), ], ) def test_fs_connect_with_invalid_mongoclient( config, exp_exception_message, monkeypatch ): - def generate_exception(a, b, c=None): + def generate_exception(a, b=None): raise Exception(exp_exception_message) monkeypatch.setattr(MongoClient, "__init__", generate_exception) @@ -405,22 +277,12 @@ def test_fs_connect_with_invalid_mongoclient( }, "Collection unavailable", ), - ( - { - "logger_name": "fs_mongo", - "path": valid_path(), - "host": "mongo", - "port": 27017, - "collection": "files", - }, - "Collection unavailable", - ), ], ) def test_fs_connect_with_invalid_mongo_collection( config, exp_exception_message, monkeypatch ): - def mock_mongoclient_constructor(a, b, c=None): + def mock_mongoclient_constructor(a, b=None): pass def generate_exception(a, b): @@ -447,22 +309,12 @@ def test_fs_connect_with_invalid_mongo_collection( }, "GridFsBucket crashed", ), - ( - { - "logger_name": "fs_mongo", - "path": valid_path(), - "host": "mongo", - "port": 27017, - "collection": "files", - }, - "GridFsBucket crashed", - ), ], ) def test_fs_connect_with_invalid_gridfsbucket( config, exp_exception_message, monkeypatch ): - def mock_mongoclient_constructor(a, b, c=None): + def mock_mongoclient_constructor(a, b=None): pass def mock_mongoclient_getitem(a, b): diff --git a/releasenotes/notes/code_cleanup-ee340441905782bf.yaml b/releasenotes/notes/code_cleanup-ee340441905782bf.yaml new file mode 100644 index 0000000..c026c54 --- /dev/null +++ b/releasenotes/notes/code_cleanup-ee340441905782bf.yaml @@ -0,0 +1,21 @@ +####################################################################################### +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +####################################################################################### +--- +other: + - | + Only uri type authentication is enabled in MongoClient, parameter based (host, port..) + authentication is removed. Unit tests are added in dbmongo, changed in fsmongo. -- 2.25.1 From 22cd57a4c6e35b6f07f1a2391e5a5bb85f340b21 Mon Sep 17 00:00:00 2001 From: garciadeblas Date: Fri, 24 Jun 2022 17:35:34 +0200 Subject: [PATCH 13/16] Update pip requirements Change-Id: I5becd1dd45052c8b1a4024a7be5af18a57e2e3d7 Signed-off-by: garciadeblas --- ...ate_pip_requirements-01300482292b21d7.yaml | 21 +++++++++++++++++++ requirements-test.txt | 10 ++++----- tox.ini | 2 +- 3 files changed, 27 insertions(+), 6 deletions(-) create mode 100644 releasenotes/notes/update_pip_requirements-01300482292b21d7.yaml diff --git a/releasenotes/notes/update_pip_requirements-01300482292b21d7.yaml b/releasenotes/notes/update_pip_requirements-01300482292b21d7.yaml new file mode 100644 index 0000000..6590952 --- /dev/null +++ b/releasenotes/notes/update_pip_requirements-01300482292b21d7.yaml @@ -0,0 +1,21 @@ +####################################################################################### +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +####################################################################################### +--- +other: + - | + Update pip requirements before creating v12 branch + diff --git a/requirements-test.txt b/requirements-test.txt index bfa751a..13c777b 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -16,13 +16,13 @@ ####################################################################################### attrs==21.4.0 # via pytest -coverage==6.2 +coverage==6.4.1 # via # -r requirements-test.in # nose2 iniconfig==1.1.1 # via pytest -nose2==0.10.0 +nose2==0.11.0 # via -r requirements-test.in packaging==21.3 # via pytest @@ -30,11 +30,11 @@ pluggy==1.0.0 # via pytest py==1.11.0 # via pytest -pyparsing==3.0.6 +pyparsing==3.0.9 # via packaging -pytest==6.2.5 +pytest==7.1.2 # via -r requirements-test.in six==1.16.0 # via nose2 -toml==0.10.2 +tomli==2.0.1 # via pytest diff --git a/tox.ini b/tox.ini index e763ca2..3a5b32b 100644 --- a/tox.ini +++ b/tox.ini @@ -78,7 +78,7 @@ commands = ####################################################################################### [testenv:pip-compile] -deps = pip-tools==6.4.0 +deps = pip-tools==6.6.2 skip_install = true whitelist_externals = bash [ -- 2.25.1 From 15ec83a20391bf91618952247023716218292d72 Mon Sep 17 00:00:00 2001 From: Mark Beierl Date: Wed, 13 Jul 2022 12:48:40 -0400 Subject: [PATCH 14/16] Jenkins refresh Change the jenkins node label from 'docker' to 'stage_2' as part of the Jenkins cleanup Change-Id: I01a971d9253c751f36d507da4bcc62c428c85b95 Signed-off-by: Mark Beierl --- Jenkinsfile | 32 ++++++++++--------- .../jenkins-cleanup-ef4d18f0300ec7a6.yaml | 22 +++++++++++++ 2 files changed, 39 insertions(+), 15 deletions(-) create mode 100644 releasenotes/notes/jenkins-cleanup-ef4d18f0300ec7a6.yaml diff --git a/Jenkinsfile b/Jenkinsfile index cb8bb70..b67e130 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,17 +1,19 @@ -/* - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ +/* Copyright ETSI OSM and others + * + * All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ properties([ parameters([ @@ -31,7 +33,7 @@ def devops_checkout() { } } -node('docker') { +node('stage_2') { checkout scm devops_checkout() diff --git a/releasenotes/notes/jenkins-cleanup-ef4d18f0300ec7a6.yaml b/releasenotes/notes/jenkins-cleanup-ef4d18f0300ec7a6.yaml new file mode 100644 index 0000000..80e8fd3 --- /dev/null +++ b/releasenotes/notes/jenkins-cleanup-ef4d18f0300ec7a6.yaml @@ -0,0 +1,22 @@ +####################################################################################### +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +####################################################################################### +--- +prelude: > + Jenkins uses labels for each agent to determine where jobs should run. This + change requests to use a more meaningful label. + No code is impacted by this change + -- 2.25.1 From d3b582a7268d90c072bb7fbe10a25c80851f3c1e Mon Sep 17 00:00:00 2001 From: aticig Date: Wed, 24 Aug 2022 22:41:56 +0300 Subject: [PATCH 15/16] Fixing common security vulnerabilities Correcting try-except-pass usage, removing assert improper usage. Change-Id: Ic24c7e8a8f579c6dfd4a9740eff11ab1561af5e1 Signed-off-by: aticig --- osm_common/__init__.py | 6 ++-- osm_common/dbmemory.py | 14 +++++--- osm_common/msglocal.py | 36 ++++++++++++++++--- ...rity_vulnerabilities-5e91fae03833135a.yaml | 20 +++++++++++ 4 files changed, 65 insertions(+), 11 deletions(-) create mode 100644 releasenotes/notes/Fixing_security_vulnerabilities-5e91fae03833135a.yaml diff --git a/osm_common/__init__.py b/osm_common/__init__.py index 8bc5507..c4c32da 100644 --- a/osm_common/__init__.py +++ b/osm_common/__init__.py @@ -14,6 +14,7 @@ # implied. # See the License for the specific language governing permissions and # limitations under the License. +import logging version = "7.0.0.post4" date_version = "2019-01-21" @@ -23,5 +24,6 @@ try: from pkg_resources import get_distribution version = get_distribution("osm_common").version -except Exception: - pass + +except Exception as init_error: + logging.exception(f"{init_error} occured while getting the common version") diff --git a/osm_common/dbmemory.py b/osm_common/dbmemory.py index 9f8c571..ad52135 100644 --- a/osm_common/dbmemory.py +++ b/osm_common/dbmemory.py @@ -398,16 +398,18 @@ class DbMemory(DbBase): ) del dict_to_update[key_to_update] updated = True - except Exception: - pass + except Exception as unset_error: + self.logger.error(f"{unset_error} occured while updating DB.") if pull: for dot_k, v in pull.items(): try: dict_to_update, key_to_update, _ = _iterate_keys( dot_k, db_item, populate=False ) - except Exception: + except Exception as pull_error: + self.logger.error(f"{pull_error} occured while updating DB.") continue + if key_to_update not in dict_to_update: continue if not isinstance(dict_to_update[key_to_update], list): @@ -430,8 +432,12 @@ class DbMemory(DbBase): dict_to_update, key_to_update, _ = _iterate_keys( dot_k, db_item, populate=False ) - except Exception: + except Exception as iterate_error: + self.logger.error( + f"{iterate_error} occured while iterating keys in db update." + ) continue + if key_to_update not in dict_to_update: continue if not isinstance(dict_to_update[key_to_update], list): diff --git a/osm_common/msglocal.py b/osm_common/msglocal.py index 2f90307..6d4cb58 100644 --- a/osm_common/msglocal.py +++ b/osm_common/msglocal.py @@ -64,14 +64,37 @@ class MsgLocal(MsgBase): try: f.close() self.files_read[topic] = None - except Exception: # TODO refine - pass + except Exception as read_topic_error: + if isinstance(read_topic_error, (IOError, FileNotFoundError)): + self.logger.exception( + f"{read_topic_error} occured while closing read topic files." + ) + elif isinstance(read_topic_error, KeyError): + self.logger.exception( + f"{read_topic_error} occured while reading from files_read dictionary." + ) + else: + self.logger.exception( + f"{read_topic_error} occured while closing read topics." + ) + for topic, f in self.files_write.items(): try: f.close() self.files_write[topic] = None - except Exception: # TODO refine - pass + except Exception as write_topic_error: + if isinstance(write_topic_error, (IOError, FileNotFoundError)): + self.logger.exception( + f"{write_topic_error} occured while closing write topic files." + ) + elif isinstance(write_topic_error, KeyError): + self.logger.exception( + f"{write_topic_error} occured while reading from files_write dictionary." + ) + else: + self.logger.exception( + f"{write_topic_error} occured while closing write topics." + ) def write(self, topic, key, msg): """ @@ -122,7 +145,10 @@ class MsgLocal(MsgBase): continue msg_dict = yaml.safe_load(self.buffer[single_topic]) self.buffer[single_topic] = "" - assert len(msg_dict) == 1 + if len(msg_dict) != 1: + raise ValueError( + "Length of message dictionary is not equal to 1" + ) for k, v in msg_dict.items(): return single_topic, k, v if not blocks: diff --git a/releasenotes/notes/Fixing_security_vulnerabilities-5e91fae03833135a.yaml b/releasenotes/notes/Fixing_security_vulnerabilities-5e91fae03833135a.yaml new file mode 100644 index 0000000..abf37f8 --- /dev/null +++ b/releasenotes/notes/Fixing_security_vulnerabilities-5e91fae03833135a.yaml @@ -0,0 +1,20 @@ +####################################################################################### +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +####################################################################################### +--- +security: + - | + Correcting try-except-pass usage, removing assert improper usage. -- 2.25.1 From ee6c89fd655a76eb9b9695cd326bfc6b297580fd Mon Sep 17 00:00:00 2001 From: Mark Beierl Date: Wed, 25 Jan 2023 21:20:07 -0500 Subject: [PATCH 16/16] Updating Python Dependencies Update of Python packages (using pip-compile) in preparation for Release FOURTEEN Change-Id: I604fa3686d79d04c9937c2d363889b100e2d1dd2 Signed-off-by: Mark Beierl --- ...ate_pip_requirements-6d71643a51c7333f.yaml | 21 ++++++++++++++++ requirements-test.txt | 24 +++++++------------ requirements.txt | 8 +++++-- 3 files changed, 36 insertions(+), 17 deletions(-) create mode 100644 releasenotes/notes/update_pip_requirements-6d71643a51c7333f.yaml diff --git a/releasenotes/notes/update_pip_requirements-6d71643a51c7333f.yaml b/releasenotes/notes/update_pip_requirements-6d71643a51c7333f.yaml new file mode 100644 index 0000000..2d646aa --- /dev/null +++ b/releasenotes/notes/update_pip_requirements-6d71643a51c7333f.yaml @@ -0,0 +1,21 @@ +####################################################################################### +# Copyright ETSI Contributors and Others. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +####################################################################################### +--- +other: + - | + Update pip requirements at start of v14 branch + diff --git a/requirements-test.txt b/requirements-test.txt index 13c777b..bed91b0 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -14,27 +14,21 @@ # See the License for the specific language governing permissions and # limitations under the License. ####################################################################################### -attrs==21.4.0 +attrs==22.2.0 # via pytest -coverage==6.4.1 - # via - # -r requirements-test.in - # nose2 -iniconfig==1.1.1 +coverage==7.1.0 + # via -r requirements-test.in +exceptiongroup==1.1.0 + # via pytest +iniconfig==2.0.0 # via pytest -nose2==0.11.0 +nose2==0.12.0 # via -r requirements-test.in -packaging==21.3 +packaging==23.0 # via pytest pluggy==1.0.0 # via pytest -py==1.11.0 - # via pytest -pyparsing==3.0.9 - # via packaging -pytest==7.1.2 +pytest==7.2.1 # via -r requirements-test.in -six==1.16.0 - # via nose2 tomli==2.0.1 # via pytest diff --git a/requirements.txt b/requirements.txt index 5d297e0..5995b6a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,15 +14,19 @@ # See the License for the specific language governing permissions and # limitations under the License. ####################################################################################### -aiokafka==0.7.2 +aiokafka==0.8.0 # via -r requirements.in +async-timeout==4.0.2 + # via aiokafka dataclasses==0.6 # via -r requirements.in kafka-python==2.0.2 # via aiokafka +packaging==23.0 + # via aiokafka pycrypto==2.6.1 # via -r requirements.in -pymongo==3.12.3 +pymongo==3.13.0 # via -r requirements.in pyyaml==5.4.1 # via -r requirements.in -- 2.25.1