--- /dev/null
+##
+# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
+# This file is part of openmano
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact with: nfvlabs@tid.es
+##
+
+# This is a template with common files to be igonored, after clone make a copy to .gitignore
+# cp .gitignore-common .gitignore
+
+*.pyc
+*.pyo
+
+#auto-ignore
+.gitignore
+
+#logs of openmano
+logs
+
+#pycharm
+.idea
+
+#eclipse
+.project
+.pydevproject
+.settings
+
+#local stuff files that end in ".local" or folders called "local"
+*.local
+vnfs/*.local
+test/*.local
+scenarios/*.local
+instance-scenarios/*.local
+database_utils/*.local
+scripts/*.local
+local
+vnfs/local
+test/local
+scenarios/local
+instance-scenarios/local
+database_utils/local
+scripts/local
+
--- /dev/null
+FROM ubuntu:16.04
+
+# Set the working directory to /app
+WORKDIR /app/osm_nbi
+
+# Copy the current directory contents into the container at /app
+ADD . /app
+
+RUN apt-get update && apt-get -y install git python3 \
+ python3-cherrypy3 python3-pymongo python3-yaml python3-pip \
+ && pip3 install aiokafka \
+ && mkdir -p /app/storage/kafka && mkdir -p /app/log
+
+
+EXPOSE 9999
+
+LABEL Maintainer="alfonso.tiernosepulveda@telefonica.com" \
+ Description="This implements a north bound interface for OSM" \
+ Version="1.0" \
+ Author="Alfonso Tierno"
+
+# Used for local storage
+VOLUME /app/storage
+# Used for logs
+VOLUME /app/log
+
+# The following ENV can be added with "docker run -e xxx' to configure
+# server
+ENV OSMNBI_SOCKET_HOST 0.0.0.0
+ENV OSMNBI_SOCKET_PORT 9999
+# storage
+ENV OSMNBI_STORAGE_PATH /app/storage
+# database
+ENV OSMNBI_DATABASE_DRIVER mongo
+ENV OSMNBI_DATABASE_HOST mongo
+ENV OSMNBI_DATABASE_PORT 27017
+# web
+ENV OSMNBI_STATIC_DIR /app/osm_nbi/html_public
+# logs
+ENV OSMNBI_LOG_FILE /app/log
+ENV OSMNBI_LOG_LEVEL DEBUG
+# message
+ENV OSMNBI_MESSAGE_DRIVER kafka
+ENV OSMNBI_MESSAGE_HOST kafka
+ENV OSMNBI_MESSAGE_PORT 9092
+
+# Run app.py when the container launches
+CMD ["python3", "nbi.py"]
+
--- /dev/null
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
--- /dev/null
+#include MANIFEST.in
+#include requirements.txt
+include README.rst
+recursive-include osm_nbi *
+
--- /dev/null
+.PHONY: all test clean
+
+SHELL := /bin/bash
+
+BRANCH ?= master
+
+all:
+ $(MAKE) clean_build build
+ $(MAKE) clean_build package
+
+clean: clean_build
+ rm -rf .build
+
+clean_build:
+ rm -rf build
+ find osm_nbi -name '*.pyc' -delete
+ find osm_nbi -name '*.pyo' -delete
+
+prepare:
+ mkdir -p build/
+ cp tox.ini build/
+ cp MANIFEST.in build/
+ cp requirements.txt build/
+ cp README.rst build/
+ cp setup.py build/
+ cp stdeb.cfg build/
+ cp -r osm_nbi build/
+ cp LICENSE build/osm_nbi
+
+
+package: prepare
+# apt-get install -y python-stdeb
+ cd build && python3 setup.py --command-packages=stdeb.command sdist_dsc # --with-python2=False
+ cd build/deb_dist/osm-nbi-* && dpkg-buildpackage -rfakeroot -uc -us
+ mkdir -p .build
+ cp build/deb_dist/python3-*.deb .build/
+
+snap:
+ echo "Nothing to be done yet"
+
+install: package
+ dpkg -i .build/python-osm-nbi*.deb
+ cd .. && \
+ OSMLIBOVIM_PATH=`python -c 'import lib_osm_openvim; print lib_osm_openvim.__path__[0]'` || FATAL "lib-osm-openvim was not properly installed" && \
+ OSMNBI_PATH=`python3 -c 'import osm_nbi; print(osm_nbi.__path__[0])'` || FATAL "osm-nbi was not properly installed" && \
+ service osm-nbi restart
+
+develop: prepare
+# pip install -r requirements.txt
+ cd build && ./setup.py develop
+
+test:
+ echo "TODO"
+
--- /dev/null
+===========
+osm-nbi
+===========
+
+osm-nbi is the North Bound Interface for OSM, REST client serving json/yaml
+It also implements a functional html web server
+
+
--- /dev/null
+from http import HTTPStatus
+
+__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
+
+
+class DbException(Exception):
+
+ def __init__(self, message, http_code=HTTPStatus.NOT_FOUND):
+ # TODO change to http.HTTPStatus instead of int that allows .value and .name
+ self.http_code = http_code
+ Exception.__init__(self, "database exception " + message)
+
+
+class DbBase(object):
+
+ def __init__(self):
+ pass
+
+ def db_connect(self, config):
+ pass
+
+ def db_disconnect(self):
+ pass
+
+ def get_list(self, table, filter={}):
+ pass
+
+ def get_one(self, table, filter={}, fail_on_empty=True, fail_on_more=True):
+ pass
+
+ def create(self, table, indata):
+ pass
+
+ def del_list(self, table, filter={}):
+ pass
+
+ def del_one(self, table, filter={}, fail_on_empty=True):
+ pass
--- /dev/null
+import logging
+from dbbase import DbException, DbBase
+from http import HTTPStatus
+from uuid import uuid4
+from copy import deepcopy
+
+__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
+
+
+class DbMemory(DbBase):
+
+ def __init__(self, logger_name='db'):
+ self.logger = logging.getLogger(logger_name)
+ self.db = {}
+
+ def db_connect(self, config):
+ if "logger_name" in config:
+ self.logger = logging.getLogger(config["logger_name"])
+
+ @staticmethod
+ def _format_filter(filter):
+ return filter # TODO
+
+ def _find(self, table, filter):
+ for i, row in enumerate(self.db.get(table, ())):
+ match = True
+ if filter:
+ for k, v in filter.items():
+ if k not in row or v != row[k]:
+ match = False
+ if match:
+ yield i, row
+
+ def get_list(self, table, filter={}):
+ try:
+ l = []
+ for _, row in self._find(table, self._format_filter(filter)):
+ l.append(deepcopy(row))
+ return l
+ except DbException:
+ raise
+ except Exception as e: # TODO refine
+ raise DbException(str(e))
+
+ def get_one(self, table, filter={}, fail_on_empty=True, fail_on_more=True):
+ try:
+ l = None
+ for _, row in self._find(table, self._format_filter(filter)):
+ if not fail_on_more:
+ return deepcopy(row)
+ if l:
+ raise DbException("Found more than one entry with filter='{}'".format(filter),
+ HTTPStatus.CONFLICT.value)
+ l = row
+ if not l and fail_on_empty:
+ raise DbException("Not found entry with filter='{}'".format(filter), HTTPStatus.NOT_FOUND)
+ return deepcopy(l)
+ except Exception as e: # TODO refine
+ raise DbException(str(e))
+
+ def del_list(self, table, filter={}):
+ try:
+ id_list = []
+ for i, _ in self._find(table, self._format_filter(filter)):
+ id_list.append(i)
+ deleted = len(id_list)
+ for i in id_list:
+ del self.db[table][i]
+ return {"deleted": deleted}
+ except DbException:
+ raise
+ except Exception as e: # TODO refine
+ raise DbException(str(e))
+
+ def del_one(self, table, filter={}, fail_on_empty=True):
+ try:
+ for i, _ in self._find(table, self._format_filter(filter)):
+ break
+ else:
+ if fail_on_empty:
+ raise DbException("Not found entry with filter='{}'".format(filter), HTTPStatus.NOT_FOUND)
+ return None
+ del self.db[table][i]
+ return {"deleted": 1}
+ except Exception as e: # TODO refine
+ raise DbException(str(e))
+
+ def replace(self, table, filter, indata, fail_on_empty=True):
+ try:
+ for i, _ in self._find(table, self._format_filter(filter)):
+ break
+ else:
+ if fail_on_empty:
+ raise DbException("Not found entry with filter='{}'".format(filter), HTTPStatus.NOT_FOUND)
+ return None
+ self.db[table][i] = deepcopy(indata)
+ return {"upadted": 1}
+ except Exception as e: # TODO refine
+ raise DbException(str(e))
+
+ def create(self, table, indata):
+ try:
+ id = indata.get("_id")
+ if not id:
+ id = str(uuid4())
+ indata["_id"] = id
+ if table not in self.db:
+ self.db[table] = []
+ self.db[table].append(deepcopy(indata))
+ return id
+ except Exception as e: # TODO refine
+ raise DbException(str(e))
+
+
+if __name__ == '__main__':
+ # some test code
+ db = dbmemory()
+ db.create("test", {"_id": 1, "data": 1})
+ db.create("test", {"_id": 2, "data": 2})
+ db.create("test", {"_id": 3, "data": 3})
+ print("must be 3 items:", db.get_list("test"))
+ print("must return item 2:", db.get_list("test", {"_id": 2}))
+ db.del_one("test", {"_id": 2})
+ print("must be emtpy:", db.get_list("test", {"_id": 2}))
--- /dev/null
+#import pymongo
+import logging
+from pymongo import MongoClient, errors
+from dbbase import DbException, DbBase
+from http import HTTPStatus
+from time import time, sleep
+
+__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
+
+# TODO consider use this decorator for database access retries
+# @retry_mongocall
+# def retry_mongocall(call):
+# def _retry_mongocall(*args, **kwargs):
+# retry = 1
+# while True:
+# try:
+# return call(*args, **kwargs)
+# except pymongo.AutoReconnect as e:
+# if retry == 4:
+# raise DbException(str(e))
+# sleep(retry)
+# return _retry_mongocall
+
+class DbMongo(DbBase):
+ conn_initial_timout = 120
+ conn_timout = 10
+
+ def __init__(self, logger_name='db'):
+ self.logger = logging.getLogger(logger_name)
+
+ def db_connect(self, config):
+ try:
+ if "logger_name" in config:
+ self.logger = logging.getLogger(config["logger_name"])
+ self.client = MongoClient(config["host"], config["port"])
+ self.db = self.client[config["name"]]
+ if "loglevel" in config:
+ self.logger.setLevel(getattr(logging, config['loglevel']))
+ # get data to try a connection
+ now = time()
+ while True:
+ try:
+ self.db.users.find_one({"username": "admin"})
+ return
+ except errors.ConnectionFailure as e:
+ if time() - now >= self.conn_initial_timout:
+ raise
+ self.logger.info("Waiting to database up {}".format(e))
+ sleep(2)
+ except errors.PyMongoError as e:
+ raise DbException(str(e))
+
+ def db_disconnect(self):
+ pass # TODO
+
+ @staticmethod
+ def _format_filter(filter):
+ try:
+ db_filter = {}
+ for query_k, query_v in filter.items():
+ dot_index = query_k.rfind(".")
+ if dot_index > 1 and query_k[dot_index+1:] in ("eq", "ne", "gt", "gte", "lt", "lte", "cont",
+ "ncont", "neq"):
+ operator = "$" + query_k[dot_index+1:]
+ if operator == "$neq":
+ operator = "$nq"
+ k = query_k[:dot_index]
+ else:
+ operator = "$eq"
+ k = query_k
+
+ v = query_v
+ if isinstance(v, list):
+ if operator in ("$eq", "$cont"):
+ operator = "$in"
+ v = query_v
+ elif operator in ("$ne", "$ncont"):
+ operator = "$nin"
+ v = query_v
+ else:
+ v = query_v.join(",")
+
+ if operator in ("$eq", "$cont"):
+ # v cannot be a comma separated list, because operator would have been changed to $in
+ db_filter[k] = v
+ elif operator == "$ncount":
+ # v cannot be a comma separated list, because operator would have been changed to $nin
+ db_filter[k] = {"$ne": v}
+ else:
+ # maybe db_filter[k] exist. e.g. in the query string for values between 5 and 8: "a.gt=5&a.lt=8"
+ if k not in db_filter:
+ db_filter[k] = {}
+ db_filter[k][operator] = v
+
+ return db_filter
+ except Exception as e:
+ raise DbException("Invalid query string filter at {}:{}. Error: {}".format(query_k, v, e),
+ http_code=HTTPStatus.BAD_REQUEST)
+
+
+ def get_list(self, table, filter={}):
+ try:
+ l = []
+ collection = self.db[table]
+ rows = collection.find(self._format_filter(filter))
+ for row in rows:
+ l.append(row)
+ return l
+ except DbException:
+ raise
+ except Exception as e: # TODO refine
+ raise DbException(str(e))
+
+ def get_one(self, table, filter={}, fail_on_empty=True, fail_on_more=True):
+ try:
+ if filter:
+ filter = self._format_filter(filter)
+ collection = self.db[table]
+ if not (fail_on_empty and fail_on_more):
+ return collection.find_one(filter)
+ rows = collection.find(filter)
+ if rows.count() == 0:
+ if fail_on_empty:
+ raise DbException("Not found entry with filter='{}'".format(filter), HTTPStatus.NOT_FOUND)
+ return None
+ elif rows.count() > 1:
+ if fail_on_more:
+ raise DbException("Found more than one entry with filter='{}'".format(filter),
+ HTTPStatus.CONFLICT)
+ return rows[0]
+ except Exception as e: # TODO refine
+ raise DbException(str(e))
+
+ def del_list(self, table, filter={}):
+ try:
+ collection = self.db[table]
+ rows = collection.delete_many(self._format_filter(filter))
+ return {"deleted": rows.deleted_count}
+ except DbException:
+ raise
+ except Exception as e: # TODO refine
+ raise DbException(str(e))
+
+ def del_one(self, table, filter={}, fail_on_empty=True):
+ try:
+ collection = self.db[table]
+ rows = collection.delete_one(self._format_filter(filter))
+ if rows.deleted_count == 0:
+ if fail_on_empty:
+ raise DbException("Not found entry with filter='{}'".format(filter), HTTPStatus.NOT_FOUND)
+ return None
+ return {"deleted": rows.deleted_count}
+ except Exception as e: # TODO refine
+ raise DbException(str(e))
+
+ def create(self, table, indata):
+ try:
+ collection = self.db[table]
+ data = collection.insert_one(indata)
+ return data.inserted_id
+ except Exception as e: # TODO refine
+ raise DbException(str(e))
+
+ def set_one(self, table, filter, update_dict, fail_on_empty=True):
+ try:
+ collection = self.db[table]
+ rows = collection.update_one(self._format_filter(filter), {"$set": update_dict})
+ if rows.updated_count == 0:
+ if fail_on_empty:
+ raise DbException("Not found entry with filter='{}'".format(filter), HTTPStatus.NOT_FOUND)
+ return None
+ return {"deleted": rows.deleted_count}
+ except Exception as e: # TODO refine
+ raise DbException(str(e))
+
+ def replace(self, table, id, indata, fail_on_empty=True):
+ try:
+ collection = self.db[table]
+ rows = collection.replace_one({"_id": id}, indata)
+ if rows.modified_count == 0:
+ if fail_on_empty:
+ raise DbException("Not found entry with filter='{}'".format(filter), HTTPStatus.NOT_FOUND)
+ return None
+ return {"replace": rows.modified_count}
+ except Exception as e: # TODO refine
+ raise DbException(str(e))
--- /dev/null
+# -*- coding: utf-8 -*-
+
+import dbmongo
+import dbmemory
+import fslocal
+import msglocal
+import msgkafka
+import tarfile
+import yaml
+import json
+import logging
+from random import choice as random_choice
+from uuid import uuid4
+from hashlib import sha256, md5
+from dbbase import DbException
+from fsbase import FsException
+from msgbase import MsgException
+from http import HTTPStatus
+from time import time
+from copy import deepcopy
+
+__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
+
+
+class EngineException(Exception):
+
+ def __init__(self, message, http_code=HTTPStatus.BAD_REQUEST):
+ self.http_code = http_code
+ Exception.__init__(self, message)
+
+
+class Engine(object):
+
+ def __init__(self):
+ self.tokens = {}
+ self.db = None
+ self.fs = None
+ self.msg = None
+ self.config = None
+ self.logger = logging.getLogger("nbi.engine")
+
+ def start(self, config):
+ """
+ Connect to database, filesystem storage, and messaging
+ :param config: two level dictionary with configuration. Top level should contain 'database', 'storage',
+ :return: None
+ """
+ self.config = config
+ try:
+ if not self.db:
+ if config["database"]["driver"] == "mongo":
+ self.db = dbmongo.DbMongo()
+ self.db.db_connect(config["database"])
+ elif config["database"]["driver"] == "memory":
+ self.db = dbmemory.DbMemory()
+ self.db.db_connect(config["database"])
+ else:
+ raise EngineException("Invalid configuration param '{}' at '[database]':'driver'".format(
+ config["database"]["driver"]))
+ if not self.fs:
+ if config["storage"]["driver"] == "local":
+ self.fs = fslocal.FsLocal()
+ self.fs.fs_connect(config["storage"])
+ else:
+ raise EngineException("Invalid configuration param '{}' at '[storage]':'driver'".format(
+ config["storage"]["driver"]))
+ if not self.msg:
+ if config["message"]["driver"] == "local":
+ self.msg = msglocal.MsgLocal()
+ self.msg.connect(config["message"])
+ elif config["message"]["driver"] == "kafka":
+ self.msg = msgkafka.MsgKafka()
+ self.msg.connect(config["message"])
+ else:
+ raise EngineException("Invalid configuration param '{}' at '[message]':'driver'".format(
+ config["storage"]["driver"]))
+ except (DbException, FsException, MsgException) as e:
+ raise EngineException(str(e), http_code=e.http_code)
+
+ def stop(self):
+ try:
+ if self.db:
+ self.db.db_disconnect()
+ if self.fs:
+ self.fs.fs_disconnect()
+ if self.fs:
+ self.fs.fs_disconnect()
+ except (DbException, FsException, MsgException) as e:
+ raise EngineException(str(e), http_code=e.http_code)
+
+ def authorize(self, token):
+ try:
+ if not token:
+ raise EngineException("Needed a token or Authorization http header",
+ http_code=HTTPStatus.UNAUTHORIZED)
+ if token not in self.tokens:
+ raise EngineException("Invalid token or Authorization http header",
+ http_code=HTTPStatus.UNAUTHORIZED)
+ session = self.tokens[token]
+ now = time()
+ if session["expires"] < now:
+ del self.tokens[token]
+ raise EngineException("Expired Token or Authorization http header",
+ http_code=HTTPStatus.UNAUTHORIZED)
+ return session
+ except EngineException:
+ if self.config["global"].get("test.user_not_authorized"):
+ return {"id": "fake-token-id-for-test",
+ "project_id": self.config["global"].get("test.project_not_authorized", "admin"),
+ "username": self.config["global"]["test.user_not_authorized"]}
+ else:
+ raise
+
+ def new_token(self, session, indata, remote):
+ now = time()
+ user_content = None
+
+ # Try using username/password
+ if indata.get("username"):
+ user_rows = self.db.get_list("users", {"username": indata.get("username")})
+ user_content = None
+ if user_rows:
+ user_content = user_rows[0]
+ salt = user_content["_admin"]["salt"]
+ shadow_password = sha256(indata.get("password", "").encode('utf-8') + salt.encode('utf-8')).hexdigest()
+ if shadow_password != user_content["password"]:
+ user_content = None
+ if not user_content:
+ raise EngineException("Invalid username/password", http_code=HTTPStatus.UNAUTHORIZED)
+ elif session:
+ user_rows = self.db.get_list("users", {"username": session["username"]})
+ if user_rows:
+ user_content = user_rows[0]
+ else:
+ raise EngineException("Invalid token", http_code=HTTPStatus.UNAUTHORIZED)
+ else:
+ raise EngineException("Provide credentials: username/password or Authorization Bearer token",
+ http_code=HTTPStatus.UNAUTHORIZED)
+
+ token_id = ''.join(random_choice('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789')
+ for _ in range(0, 32))
+ if indata.get("project_id"):
+ project_id = indata.get("project_id")
+ if project_id not in user_content["projects"]:
+ raise EngineException("project {} not allowed for this user".format(project_id),
+ http_code=HTTPStatus.UNAUTHORIZED)
+ else:
+ project_id = user_content["projects"][0]
+ if project_id == "admin":
+ session_admin = True
+ else:
+ project = self.db.get_one("projects", {"_id": project_id})
+ session_admin = project.get("admin", False)
+ new_session = {"issued_at": now, "expires": now+3600,
+ "_id": token_id, "id": token_id, "project_id": project_id, "username": user_content["username"],
+ "remote_port": remote.port, "admin": session_admin}
+ if remote.name:
+ new_session["remote_host"] = remote.name
+ elif remote.ip:
+ new_session["remote_host"] = remote.ip
+
+ self.tokens[token_id] = new_session
+ return deepcopy(new_session)
+
+ def get_token_list(self, session):
+ token_list = []
+ for token_id, token_value in self.tokens.items():
+ if token_value["username"] == session["username"]:
+ token_list.append(deepcopy(token_value))
+ return token_list
+
+ def get_token(self, session, token_id):
+ token_value = self.tokens.get(token_id)
+ if not token_value:
+ raise EngineException("token not found", http_code=HTTPStatus.NOT_FOUND)
+ if token_value["username"] != session["username"] and not session["admin"]:
+ raise EngineException("needed admin privileges", http_code=HTTPStatus.UNAUTHORIZED)
+ return token_value
+
+ def del_token(self, token_id):
+ try:
+ del self.tokens[token_id]
+ return "token '{}' deleted".format(token_id)
+ except KeyError:
+ raise EngineException("Token '{}' not found".format(token_id), http_code=HTTPStatus.NOT_FOUND)
+
+ @staticmethod
+ def _remove_envelop(item, indata=None):
+ """
+ Obtain the useful data removing the envelop. It goes throw the vnfd or nsd catalog and returns the
+ vnfd or nsd content
+ :param item: can be vnfds, nsds, users, projects,
+ :param indata: Content to be inspected
+ :return: the useful part of indata
+ """
+ clean_indata = indata
+ if not indata:
+ return {}
+ if item == "vnfds":
+ if clean_indata.get('vnfd:vnfd-catalog'):
+ clean_indata = clean_indata['vnfd:vnfd-catalog']
+ elif clean_indata.get('vnfd-catalog'):
+ clean_indata = clean_indata['vnfd-catalog']
+ if clean_indata.get('vnfd'):
+ if not isinstance(clean_indata['vnfd'], list) or len(clean_indata['vnfd']) != 1:
+ raise EngineException("'vnfd' must be a list only one element")
+ clean_indata = clean_indata['vnfd'][0]
+ elif item == "nsds":
+ if clean_indata.get('nsd:nsd-catalog'):
+ clean_indata = clean_indata['nsd:nsd-catalog']
+ elif clean_indata.get('nsd-catalog'):
+ clean_indata = clean_indata['nsd-catalog']
+ if clean_indata.get('nsd'):
+ if not isinstance(clean_indata['nsd'], list) or len(clean_indata['nsd']) != 1:
+ raise EngineException("'nsd' must be a list only one element")
+ clean_indata = clean_indata['nsd'][0]
+ return clean_indata
+
+ def _validate_new_data(self, session, item, indata):
+ if item == "users":
+ if not indata.get("username"):
+ raise EngineException("missing 'username'", HTTPStatus.UNPROCESSABLE_ENTITY)
+ if not indata.get("password"):
+ raise EngineException("missing 'password'", HTTPStatus.UNPROCESSABLE_ENTITY)
+ if not indata.get("projects"):
+ raise EngineException("missing 'projects'", HTTPStatus.UNPROCESSABLE_ENTITY)
+ # check username not exist
+ if self.db.get_one(item, {"username": indata.get("username")}, fail_on_empty=False, fail_on_more=False):
+ raise EngineException("username '{}' exist".format(indata["username"]), HTTPStatus.CONFLICT)
+ elif item == "projects":
+ if not indata.get("name"):
+ raise EngineException("missing 'name'")
+ # check name not exist
+ if self.db.get_one(item, {"name": indata.get("name")}, fail_on_empty=False, fail_on_more=False):
+ raise EngineException("name '{}' exist".format(indata["name"]), HTTPStatus.CONFLICT)
+ elif item == "vnfds" or item == "nsds":
+ filter = {"id": indata["id"]}
+ # TODO add admin to filter, validate rights
+ self._add_read_filter(session, item, filter)
+ if self.db.get_one(item, filter, fail_on_empty=False):
+ raise EngineException("{} with id '{}' already exist for this tenant".format(item[:-1], indata["id"]),
+ HTTPStatus.CONFLICT)
+
+ # TODO validate with pyangbind
+ elif item == "nsrs":
+ pass
+
+ def _format_new_data(self, session, item, indata, admin=None):
+ now = time()
+ if not "_admin" in indata:
+ indata["_admin"] = {}
+ indata["_admin"]["created"] = now
+ indata["_admin"]["modified"] = now
+ if item == "users":
+ _id = indata["username"]
+ salt = uuid4().hex
+ indata["_admin"]["salt"] = salt
+ indata["password"] = sha256(indata["password"].encode('utf-8') + salt.encode('utf-8')).hexdigest()
+ elif item == "projects":
+ _id = indata["name"]
+ else:
+ _id = None
+ storage = None
+ if admin:
+ _id = admin.get("_id")
+ storage = admin.get("storage")
+ if not _id:
+ _id = str(uuid4())
+ if item == "vnfds" or item == "nsds":
+ if not indata["_admin"].get("projects_read"):
+ indata["_admin"]["projects_read"] = [session["project_id"]]
+ if not indata["_admin"].get("projects_write"):
+ indata["_admin"]["projects_write"] = [session["project_id"]]
+ if storage:
+ indata["_admin"]["storage"] = storage
+ indata["_id"] = _id
+
+ def _new_item_partial(self, session, item, indata, headers):
+ """
+ Used for recieve content by chunks (with a transaction_id header and/or gzip file. It will store and extract
+ :param session: session
+ :param item:
+ :param indata: http body request
+ :param headers: http request headers
+ :return: a dict with::
+ _id: <transaction_id>
+ storage: <path>: where it is saving
+ desc: <dict>: descriptor: Only present when all the content is received, extracted and read the descriptor
+ """
+ content_range_text = headers.get("Content-Range")
+ transaction_id = headers.get("Transaction-Id")
+ filename = headers.get("Content-Filename", "pkg")
+ # TODO change to Content-Disposition filename https://tools.ietf.org/html/rfc6266
+ expected_md5 = headers.get("Content-File-MD5")
+ compressed = None
+ if "application/gzip" in headers.get("Content-Type") or "application/x-gzip" in headers.get("Content-Type") or \
+ "application/zip" in headers.get("Content-Type"):
+ compressed = "gzip"
+ file_pkg = None
+ error_text = ""
+ try:
+ if content_range_text:
+ content_range = content_range_text.replace("-", " ").replace("/", " ").split()
+ if content_range[0] != "bytes": # TODO check x<y not negative < total....
+ raise IndexError()
+ start = int(content_range[1])
+ end = int(content_range[2]) + 1
+ total = int(content_range[3])
+ if len(indata) != end-start:
+ raise EngineException("Mismatch between Content-Range header {}-{} and body length of {}".format(
+ start, end-1, len(indata)), HTTPStatus.BAD_REQUEST)
+ else:
+ start = 0
+ total = end = len(indata)
+ if not transaction_id:
+ # generate transaction
+ transaction_id = str(uuid4())
+ self.fs.mkdir(transaction_id)
+ # control_file = open(self.storage["path"] + transaction_id + "/.osm.yaml", 'wb')
+ # control = {"received": 0}
+ elif not self.fs.file_exists(transaction_id):
+ raise EngineException("invalid Transaction-Id header", HTTPStatus.NOT_FOUND)
+ else:
+ pass
+ # control_file = open(self.storage["path"] + transaction_id + "/.osm.yaml", 'rw')
+ # control = yaml.load(control_file)
+ # control_file.seek(0, 0)
+ storage = self.fs.get_params()
+ storage["folder"] = transaction_id
+ storage["file"] = filename
+
+ file_path = (transaction_id, filename)
+ if self.fs.file_exists(file_path):
+ file_size = self.fs.file_size(file_path)
+ else:
+ file_size = 0
+ if file_size != start:
+ raise EngineException("invalid upload transaction sequence, expected '{}' but received '{}'".format(
+ file_size, start), HTTPStatus.BAD_REQUEST)
+ file_pkg = self.fs.file_open(file_path, 'a+b')
+ file_pkg.write(indata)
+ if end != total:
+ return {"_id": transaction_id, "storage": storage}
+ if expected_md5:
+ file_pkg.seek(0, 0)
+ file_md5 = md5()
+ chunk_data = file_pkg.read(1024)
+ while chunk_data:
+ file_md5.update(chunk_data)
+ chunk_data = file_pkg.read(1024)
+ if expected_md5 != file_md5.hexdigest():
+ raise EngineException("Error, MD5 mismatch", HTTPStatus.CONFLICT)
+ file_pkg.seek(0, 0)
+ if compressed == "gzip":
+ # TODO unzip,
+ storage["tarfile"] = filename
+ tar = tarfile.open(mode='r', fileobj=file_pkg)
+ descriptor_file_name = None
+ for tarinfo in tar:
+ tarname = tarinfo.name
+ tarname_path = tarname.split("/")
+ if not tarname_path[0] or ".." in tarname_path: # if start with "/" means absolute path
+ raise EngineException("Absolute path or '..' are not allowed for package descriptor tar.gz")
+ if len(tarname_path) == 1 and not tarinfo.isdir():
+ raise EngineException("All files must be inside a dir for package descriptor tar.gz")
+ if tarname.endswith(".yaml") or tarname.endswith(".json") or tarname.endswith(".yml"):
+ storage["file"] = tarname_path[0]
+ if len(tarname_path) == 2:
+ if descriptor_file_name:
+ raise EngineException("Found more than one descriptor file at package descriptor tar.gz")
+ descriptor_file_name = tarname
+ if not descriptor_file_name:
+ raise EngineException("Not found any descriptor file at package descriptor tar.gz")
+ self.fs.file_extract(tar, transaction_id)
+ with self.fs.file_open((transaction_id, descriptor_file_name), "r") as descriptor_file:
+ content = descriptor_file.read()
+ else:
+ content = file_pkg.read()
+ tarname = ""
+
+ if tarname.endswith(".json"):
+ error_text = "Invalid json format "
+ indata = json.load(content)
+ else:
+ error_text = "Invalid yaml format "
+ indata = yaml.load(content)
+ return {"_id": transaction_id, "storage": storage, "desc": indata}
+ except EngineException:
+ raise
+ except IndexError:
+ raise EngineException("invalid Content-Range header format. Expected 'bytes start-end/total'",
+ HTTPStatus.BAD_REQUEST)
+ except IOError as e:
+ raise EngineException("invalid upload transaction sequence: '{}'".format(e), HTTPStatus.BAD_REQUEST)
+ except (ValueError, yaml.YAMLError) as e:
+ raise EngineException(error_text + str(e))
+ finally:
+ if file_pkg:
+ file_pkg.close()
+
+ def new_nsr(self, session, ns_request):
+ """
+ Creates a new nsr into database
+ :param session: contains the used login username and working project
+ :param ns_request: params to be used for the nsr
+ :return: nsr descriptor to be stored at database and the _id
+ """
+
+ # look for nsr
+ nsd = self.get_item(session, "nsds", ns_request["nsdId"])
+ _id = str(uuid4())
+ nsr_descriptor = {
+ "name": ns_request["nsName"],
+ "name-ref": ns_request["nsName"],
+ "short-name": ns_request["nsName"],
+ "admin-status": "ENABLED",
+ "nsd": nsd,
+ "datacenter": ns_request["vimAccountId"],
+ "resource-orchestrator": "osmopenmano",
+ "description": ns_request.get("nsDescription", ""),
+ "constituent-vnfr-ref": ["TODO datacenter-id, vnfr-id"],
+
+ "operational-status": "init", # typedef ns-operational-
+ "config-status": "init", # typedef config-states
+ "detailed-status": "scheduled",
+
+ "orchestration-progress": {}, # {"networks": {"active": 0, "total": 0}, "vms": {"active": 0, "total": 0}},
+
+ "crete-time": time(),
+ "nsd-name-ref": nsd["name"],
+ "operational-events": [], # "id", "timestamp", "description", "event",
+ "nsd-ref": nsd["id"],
+ "ns-instance-config-ref": _id,
+ "id": _id,
+
+ # "input-parameter": xpath, value,
+ "ssh-authorized-key": ns_request.get("key-pair-ref"),
+ }
+ ns_request["nsr_id"] = _id
+ return nsr_descriptor, _id
+
+ def new_item(self, session, item, indata={}, kwargs=None, headers={}):
+ """
+ Creates a new entry into database
+ :param session: contains the used login username and working project
+ :param item: it can be: users, projects, vnfds, nsds, ...
+ :param indata: data to be inserted
+ :param kwargs: used to override the indata descriptor
+ :param headers: http request headers
+ :return: _id, transaction_id: identity of the inserted data. or transaction_id if Content-Range is used
+ """
+ # TODO validate input. Check not exist at database
+ # TODO add admin and status
+
+ transaction = None
+ if headers.get("Content-Range") or "application/gzip" in headers.get("Content-Type") or \
+ "application/x-gzip" in headers.get("Content-Type") or "application/zip" in headers.get("Content-Type"):
+ if not indata:
+ raise EngineException("Empty payload")
+ transaction = self._new_item_partial(session, item, indata, headers)
+ if "desc" not in transaction:
+ return transaction["_id"], False
+ indata = transaction["desc"]
+
+ content = self._remove_envelop(item, indata)
+
+ # Override descriptor with query string kwargs
+ if kwargs:
+ try:
+ for k, v in kwargs.items():
+ update_content = content
+ kitem_old = None
+ klist = k.split(".")
+ for kitem in klist:
+ if kitem_old is not None:
+ update_content = update_content[kitem_old]
+ if isinstance(update_content, dict):
+ kitem_old = kitem
+ elif isinstance(update_content, list):
+ kitem_old = int(kitem)
+ else:
+ raise EngineException(
+ "Invalid query string '{}'. Descriptor is not a list nor dict at '{}'".format(k, kitem))
+ update_content[kitem_old] = v
+ except KeyError:
+ raise EngineException(
+ "Invalid query string '{}'. Descriptor does not contain '{}'".format(k, kitem_old))
+ except ValueError:
+ raise EngineException("Invalid query string '{}'. Expected integer index list instead of '{}'".format(
+ k, kitem))
+ except IndexError:
+ raise EngineException(
+ "Invalid query string '{}'. Index '{}' out of range".format(k, kitem_old))
+ if not indata:
+ raise EngineException("Empty payload")
+
+ if item == "nsrs":
+ # in this case the imput descriptor is not the data to be stored
+ ns_request = content
+ content, _id = self.new_nsr(session, ns_request)
+ transaction = {"_id": _id}
+
+ self._validate_new_data(session, item, content)
+ self._format_new_data(session, item, content, transaction)
+ _id = self.db.create(item, content)
+ if item == "nsrs":
+ self.msg.write("ns", "create", _id)
+ return _id, True
+
+ def _add_read_filter(self, session, item, filter):
+ if session["project_id"] == "admin": # allows all
+ return filter
+ if item == "users":
+ filter["username"] = session["username"]
+ elif item == "vnfds" or item == "nsds":
+ filter["_admin.projects_read.cont"] = ["ANY", session["project_id"]]
+
+ def _add_delete_filter(self, session, item, filter):
+ if session["project_id"] != "admin" and item in ("users", "projects"):
+ raise EngineException("Only admin users can perform this task", http_code=HTTPStatus.FORBIDDEN)
+ if item == "users":
+ if filter.get("_id") == session["username"] or filter.get("username") == session["username"]:
+ raise EngineException("You cannot delete your own user", http_code=HTTPStatus.CONFLICT)
+ elif item == "project":
+ if filter.get("_id") == session["project_id"]:
+ raise EngineException("You cannot delete your own project", http_code=HTTPStatus.CONFLICT)
+ elif item in ("vnfds", "nsds") and session["project_id"] != "admin":
+ filter["_admin.projects_write.cont"] = ["ANY", session["project_id"]]
+
+ def get_item_list(self, session, item, filter={}):
+ """
+ Get a list of items
+ :param session: contains the used login username and working project
+ :param item: it can be: users, projects, vnfds, nsds, ...
+ :param filter: filter of data to be applied
+ :return: The list, it can be empty if no one match the filter.
+ """
+ # TODO add admin to filter, validate rights
+ self._add_read_filter(session, item, filter)
+ return self.db.get_list(item, filter)
+
+ def get_item(self, session, item, _id):
+ """
+ Get complete information on an items
+ :param session: contains the used login username and working project
+ :param item: it can be: users, projects, vnfds, nsds, ...
+ :param _id: server id of the item
+ :return: dictionary, raise exception if not found.
+ """
+ filter = {"_id": _id}
+ # TODO add admin to filter, validate rights
+ self._add_read_filter(session, item, filter)
+ return self.db.get_one(item, filter)
+
+ def del_item_list(self, session, item, filter={}):
+ """
+ Delete a list of items
+ :param session: contains the used login username and working project
+ :param item: it can be: users, projects, vnfds, nsds, ...
+ :param filter: filter of data to be applied
+ :return: The deleted list, it can be empty if no one match the filter.
+ """
+ # TODO add admin to filter, validate rights
+ self._add_read_filter(session, item, filter)
+ return self.db.del_list(item, filter)
+
+ def del_item(self, session, item, _id):
+ """
+ Get complete information on an items
+ :param session: contains the used login username and working project
+ :param item: it can be: users, projects, vnfds, nsds, ...
+ :param _id: server id of the item
+ :return: dictionary, raise exception if not found.
+ """
+ # TODO add admin to filter, validate rights
+ # data = self.get_item(item, _id)
+ filter = {"_id": _id}
+ self._add_delete_filter(session, item, filter)
+
+ if item == "nsrs":
+ desc = self.db.get_one(item, filter)
+ desc["_admin"]["to_delete"] = True
+ self.db.replace(item, _id, desc) # TODO change to set_one
+ self.msg.write("ns", "delete", _id)
+ return {"deleted": 1}
+
+ v = self.db.del_one(item, filter)
+ self.fs.file_delete(_id, ignore_non_exist=True)
+ if item == "nsrs":
+ self.msg.write("ns", "delete", _id)
+ return v
+
+ def prune(self):
+ """
+ Prune database not needed content
+ :return: None
+ """
+ return self.db.del_list("nsrs", {"_admin.to_delete": True})
+
+ def create_admin(self):
+ """
+ Creates a new user admin/admin into database. Only allowed if database is empty. Useful for initialization
+ :return: _id identity of the inserted data.
+ """
+ users = self.db.get_one("users", fail_on_empty=False, fail_on_more=False)
+ if users:
+ raise EngineException("Unauthorized. Database users is not empty", HTTPStatus.UNAUTHORIZED)
+ indata = {"username": "admin", "password": "admin", "projects": ["admin"]}
+ fake_session = {"project_id": "admin", "username": "admin"}
+ self._format_new_data(fake_session, "users", indata)
+ _id = self.db.create("users", indata)
+ return _id
+
+ def edit_item(self, session, item, id, indata={}, kwargs=None):
+ """
+ Update an existing entry at database
+ :param session: contains the used login username and working project
+ :param item: it can be: users, projects, vnfds, nsds, ...
+ :param id: identity of entry to be updated
+ :param indata: data to be inserted
+ :param kwargs: used to override the indata descriptor
+ :return: dictionary, raise exception if not found.
+ """
+
+ content = self.get_item(session, item, id)
+ if indata:
+ indata = self._remove_envelop(item, indata)
+ # TODO update content with with a deep-update
+
+ # Override descriptor with query string kwargs
+ if kwargs:
+ try:
+ for k, v in kwargs.items():
+ update_content = content
+ kitem_old = None
+ klist = k.split(".")
+ for kitem in klist:
+ if kitem_old is not None:
+ update_content = update_content[kitem_old]
+ if isinstance(update_content, dict):
+ kitem_old = kitem
+ elif isinstance(update_content, list):
+ kitem_old = int(kitem)
+ else:
+ raise EngineException(
+ "Invalid query string '{}'. Descriptor is not a list nor dict at '{}'".format(k, kitem))
+ update_content[kitem_old] = v
+ except KeyError:
+ raise EngineException(
+ "Invalid query string '{}'. Descriptor does not contain '{}'".format(k, kitem_old))
+ except ValueError:
+ raise EngineException("Invalid query string '{}'. Expected integer index list instead of '{}'".format(
+ k, kitem))
+ except IndexError:
+ raise EngineException(
+ "Invalid query string '{}'. Index '{}' out of range".format(k, kitem_old))
+
+ self._validate_new_data(session, item, content)
+ # self._format_new_data(session, item, content)
+ self.db.replace(item, id, content)
+ return id
+
+
--- /dev/null
+
+from http import HTTPStatus
+
+__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
+
+
+class FsException(Exception):
+ def __init__(self, message, http_code=HTTPStatus.INTERNAL_SERVER_ERROR):
+ self.http_code = http_code
+ Exception.__init__(self, "storage exception " + message)
+
+
+class FsBase(object):
+ def __init__(self):
+ pass
+
+ def get_params(self):
+ return {}
+
+ def fs_connect(self, config):
+ pass
+
+ def fs_disconnect(self):
+ pass
+
+ def mkdir(self, folder):
+ pass
+
+ def file_exists(self, storage):
+ pass
+
+ def file_size(self, storage):
+ pass
+
+ def file_extract(self, tar_object, path):
+ pass
+
+ def file_open(self, storage, mode):
+ pass
+
+ def file_delete(self, storage, ignore_non_exist=False):
+ pass
+
--- /dev/null
+import os
+import logging
+import tarfile
+from http import HTTPStatus
+from shutil import rmtree
+from fsbase import FsBase, FsException
+
+__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
+
+
+class FsLocal(FsBase):
+
+ def __init__(self, logger_name='fs'):
+ self.logger = logging.getLogger(logger_name)
+ self.path = None
+
+ def get_params(self):
+ return {"fs": "local", "path": self.path}
+
+ def fs_connect(self, config):
+ try:
+ if "logger_name" in config:
+ self.logger = logging.getLogger(config["logger_name"])
+ self.path = config["path"]
+ if not self.path.endswith("/"):
+ self.path += "/"
+ if not os.path.exists(self.path):
+ raise FsException("Invalid configuration param at '[storage]': path '{}' does not exist".format(
+ config["path"]))
+ except FsException:
+ raise
+ except Exception as e: # TODO refine
+ raise FsException(str(e))
+
+ def fs_disconnect(self):
+ pass # TODO
+
+ def mkdir(self, folder):
+ """
+ Creates a folder or parent object location
+ :param folder:
+ :return: None or raises and exception
+ """
+ try:
+ os.mkdir(self.path + folder)
+ except Exception as e:
+ raise FsException(str(e), http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+
+ def file_exists(self, storage):
+ """
+ Indicates if "storage" file exist
+ :param storage: can be a str or a str list
+ :return: True, False
+ """
+ if isinstance(storage, str):
+ f = storage
+ else:
+ f = "/".join(storage)
+ return os.path.exists(self.path + f)
+
+ def file_size(self, storage):
+ """
+ return file size
+ :param storage: can be a str or a str list
+ :return: file size
+ """
+ if isinstance(storage, str):
+ f = storage
+ else:
+ f = "/".join(storage)
+ return os.path.getsize(self.path + f)
+
+ def file_extract(self, tar_object, path):
+ """
+ extract a tar file
+ :param tar_object: object of type tar
+ :param path: can be a str or a str list, or a tar object where to extract the tar_object
+ :return: None
+ """
+ if isinstance(path, str):
+ f = self.path + path
+ else:
+ f = self.path + "/".join(path)
+ tar_object.extractall(path=f)
+
+ def file_open(self, storage, mode):
+ """
+ Open a file
+ :param storage: can be a str or list of str
+ :param mode: file mode
+ :return: file object
+ """
+ if isinstance(storage, str):
+ f = storage
+ else:
+ f = "/".join(storage)
+ return open(self.path + f, mode)
+
+ def file_delete(self, storage, ignore_non_exist=False):
+ """
+ Delete storage content recursivelly
+ :param storage: can be a str or list of str
+ :param ignore_non_exist: not raise exception if storage does not exist
+ :return: None
+ """
+
+ if isinstance(storage, str):
+ f = self.path + storage
+ else:
+ f = self.path + "/".join(storage)
+ if os.path.exists(f):
+ rmtree(f)
+ elif not ignore_non_exist:
+ raise FsException("File {} does not exist".format(storage), http_code=HTTPStatus.BAD_REQUEST)
--- /dev/null
+"""
+Contains html text in variables to make and html response
+"""
+
+import yaml
+from http import HTTPStatus
+
+__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
+
+html_start = """
+ <!DOCTYPE html>
+<html>
+<head>
+ <link href="/osm/static/style.css" rel="stylesheet">
+<title>Welcome to OSM</title>
+</head>
+<body>
+ <div id="osm_topmenu">
+ <div>
+ <a href="https://osm.etsi.org"> <img src="/osm/static/OSM-logo.png" height="42" width="100" style="vertical-align:middle"> </a>
+ <a>( {} )</a>
+ <a href="/osm/vnfpkgm/v1/vnf_packages">VNFDs </a>
+ <a href="/osm/nsd/v1/ns_descriptors">NSDs </a>
+ <a href="/osm/nslcm/v1/ns_instances">NSs </a>
+ <a href="/osm/user/v1">USERs </a>
+ <a href="/osm/project/v1">PROJECTs </a>
+ <a href="/osm/token/v1">TOKENs </a>
+ <a href="/osm/token/v1?METHOD=DELETE">logout </a>
+ </div>
+ </div>
+"""
+
+html_body = """
+<h1>{item}</h1>
+"""
+
+html_end = """
+</body>
+</html>
+"""
+
+html_body_error = "<h2> Error <pre>{}</pre> </h2>"
+
+
+
+html_auth2 = """
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
+<html>
+<head><META http-equiv="Content-Type" content="text/html; charset=UTF-8">
+ <link href="/osm/static/style.css" rel="stylesheet">
+ <title>OSM Login</title>
+</head>
+<body>
+ <div id="osm_header">
+ <div>
+ <a href="https://osm.etsi.org"> <h1><img src="/osm/static/OSM-logo.png" style="vertical-align:middle"></h1> </a>
+ </div>
+ </div>
+ <div id="osm_error_message">
+ <h1>{error}</h1>
+ </div>
+ <div class="gerritBody" id="osm_body">
+ <h1>Sign in to OSM</h1>
+ <form action="/osm/token/v1" id="login_form" method="POST">
+ <table style="border: 0;">
+ <tr><th>Username</th><td><input id="f_user" name="username" size="25" tabindex="1" type="text"></td></tr>
+ <tr><th>Password</th><td><input id="f_pass" name="password" size="25" tabindex="2" type="password"></td></tr>
+ <tr><td><input tabindex="3" type="submit" value="Sign In"></td></tr>
+ </table>
+ </form>
+ <div style="clear: both; margin-top: 15px; padding-top: 2px; margin-bottom: 15px;">
+ <div id="osm_footer">
+ <div></div>
+ </div>
+ </div>
+ </div>
+ <script src="/osm/static/login.js"> </script>
+</body>
+</html>
+"""
+
+html_upload_body = """
+<form action="/osm{}" method="post" enctype="multipart/form-data">
+ <h3> <table style="border: 0;"> <tr>
+ <td> Upload {} descriptor (tar.gz) file: <input type="file" name="descriptor_file"/> </td>
+ <td> <input type="submit" value="Upload"/> </td>
+ </tr> </table> </h3>
+</form>
+"""
+
+
+def format(data, request, response, session):
+ """
+ Format a nice html response, depending on the data
+ :param data:
+ :param request: cherrypy request
+ :param response: cherrypy response
+ :return: string with teh html response
+ """
+ response.headers["Content-Type"] = 'text/html'
+ if response.status == HTTPStatus.UNAUTHORIZED.value:
+ if response.headers.get("WWW-Authenticate") and request.config.get("auth.allow_basic_authentication"):
+ response.headers["WWW-Authenticate"] = "Basic" + response.headers["WWW-Authenticate"][6:]
+ return
+ else:
+ return html_auth2.format(error=data)
+ body = html_body.format(item=request.path_info)
+ if response.status and response.status > 202:
+ body += html_body_error.format(yaml.safe_dump(data, explicit_start=True, indent=4, default_flow_style=False))
+ elif isinstance(data, (list, tuple)):
+ if request.path_info == "/vnfpkgm/v1/vnf_packages":
+ body += html_upload_body.format("VNFD", request.path_info)
+ elif request.path_info == "/nsd/v1/ns_descriptors":
+ body += html_upload_body.format("NSD", request.path_info)
+ for k in data:
+ data_id = k.pop("_id", None)
+ body += '<p> <a href="/osm/{url}/{id}">{id}</a>: {t} </p>'.format(url=request.path_info, id=data_id, t=k)
+ elif isinstance(data, dict):
+ if "Location" in response.headers:
+ body += '<a href="{}"> show </a>'.format(response.headers["Location"])
+ else:
+ body += '<a href="/osm/{}?METHOD=DELETE"> <img src="/osm/static/delete.png" height="25" width="25"> </a>'.format(request.path_info)
+ body += "<pre>" + yaml.safe_dump(data, explicit_start=True, indent=4, default_flow_style=False) + "</pre>"
+ else:
+ body = str(data)
+ user_text = " "
+ if session:
+ if session.get("username"):
+ user_text += "user: {}".format(session.get("username"))
+ if session.get("project_id"):
+ user_text += ", project: {}".format(session.get("project_id"))
+ return html_start.format(user_text) + body + html_end
+ #yaml.safe_dump(data, explicit_start=True, indent=4, default_flow_style=False)
+ # tags=False,
+ # encoding='utf-8', allow_unicode=True)
+
+
--- /dev/null
+ var login_form = document.getElementById('login_form');
+ var f_user = document.getElementById('f_user');
+ var f_pass = document.getElementById('f_pass');
+ f_user.onkeydown = function(e) {
+ if (e.keyCode == 13) {
+ f_pass.focus();
+ return false;
+ }
+ }
+ f_pass.onkeydown = function(e) {
+ if (e.keyCode == 13) {
+ login_form.submit();
+ return false;
+ }
+ }
+ f_user.focus();
+
--- /dev/null
+
+#osm_header{
+ display: block;
+ position: relative;
+ top: 0px;
+ left: 160px;
+ margin-bottom: -60px;
+ width: 140px;
+ padding-left: 17px;
+ }
+#osm_topmenu {
+ background: none;
+ position: relative;
+ top: 0px;
+ left: 10px;
+ margin-right: 10px;
+}
+#osm_error_message {
+ padding: 5px;
+ margin: 2em;
+ width: 200em;
+ color: red;
+ font-weight: bold;
+}
+
--- /dev/null
+-----BEGIN CERTIFICATE-----
+MIID5zCCAs+gAwIBAgIJAO1kkr7xiRn6MA0GCSqGSIb3DQEBCwUAMIGJMQswCQYD
+VQQGEwJlczEPMA0GA1UECAwGTWFkcmlkMQ8wDQYDVQQHDAZNYWRyaWQxDDAKBgNV
+BAoMA29zbTEMMAoGA1UECwwDb3NtMRcwFQYDVQQDDA5BbGZvbnNvIFRpZXJubzEj
+MCEGCSqGSIb3DQEJARYUYWxmLnRpZXJub0BnbWFpbC5jb20wHhcNMTgwMjA5MTMx
+ODE4WhcNMTkxMDAyMTMxODE4WjCBiTELMAkGA1UEBhMCZXMxDzANBgNVBAgMBk1h
+ZHJpZDEPMA0GA1UEBwwGTWFkcmlkMQwwCgYDVQQKDANvc20xDDAKBgNVBAsMA29z
+bTEXMBUGA1UEAwwOQWxmb25zbyBUaWVybm8xIzAhBgkqhkiG9w0BCQEWFGFsZi50
+aWVybm9AZ21haWwuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
+unj1O8oWptL4wvKFeKE487alxZTQ4tMOgmJuMd5hdcVSEyTz+60rkGrmQxWfezRP
+enM11v8/0jUPJgxKqQ9xoDKCeDwl0ptFDSS0gglUaEe1t94RutvggaLLQigni9/t
+d7bGSe+Hn/P8Z02qGI8yEa6zjfoecOge+/HKPqY6XM2ixwiDJmXAtajzkJhkb9mA
+T2g+AyrMRNq7ycGjRY4MdX52jKDL2Hmr8MCkK7F6FfTAisF85SXsTH1WAJsKn4UH
+UR2k0lZT9tWKdZIGU9D0ra5yZqjLQ37VUbLbCXVndN4kj1f7zSBh+T3ZRaxS+rLk
+8eJvPeX4qaPCSF3Y8MJRgQIDAQABo1AwTjAdBgNVHQ4EFgQU880sL05lAxGkfQBm
+l3lO8OCZ8BkwHwYDVR0jBBgwFoAU880sL05lAxGkfQBml3lO8OCZ8BkwDAYDVR0T
+BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEATKITQ5aC4CRjIl4p27hyGFvMtY/b
+qcxs/MNQ0+jQIVrWTZgVJ24whs7ofV1G7WdaWSCrZ9VEGcFKJJZxQM1dZ+yUeMzB
+VwV4HKrm17h55F0nVKqjFSt4WMlYyXkoCgAx5Kw7XxbbOHJhdFLOsotK8VrCd3ch
+FufxPm9ucXPqykO8VHLxw5b0R39el0eAIZEoZ1sPnRyHdauSvA8TaPLX8QFY/vrG
+yr2AMgapivSunFaqcxPmBpHLpG81Q/NqK/YthWIBq1UJMfFShtM/Mbiklln6QJeF
+O53zY+wdqGb7d59caDqKUb2tFOrFn1tqScaJ3k+/PjKxxhUlMe+Rbla+fA==
+-----END CERTIFICATE-----
--- /dev/null
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAunj1O8oWptL4wvKFeKE487alxZTQ4tMOgmJuMd5hdcVSEyTz
++60rkGrmQxWfezRPenM11v8/0jUPJgxKqQ9xoDKCeDwl0ptFDSS0gglUaEe1t94R
+utvggaLLQigni9/td7bGSe+Hn/P8Z02qGI8yEa6zjfoecOge+/HKPqY6XM2ixwiD
+JmXAtajzkJhkb9mAT2g+AyrMRNq7ycGjRY4MdX52jKDL2Hmr8MCkK7F6FfTAisF8
+5SXsTH1WAJsKn4UHUR2k0lZT9tWKdZIGU9D0ra5yZqjLQ37VUbLbCXVndN4kj1f7
+zSBh+T3ZRaxS+rLk8eJvPeX4qaPCSF3Y8MJRgQIDAQABAoIBAGG4EYuLk+NIm+RN
+g3R7yISvKqAoJG85e1kIjAbGKvy0sxYlP1acoT/FRXXSVwkRmLTqu0Q37xIRjCa8
+OkPuO/Vfgs/PdWKwy7bfjVi32uvq6Ge6kEAECdrlnO7GbDL43lDmD3Ofestv6M2Z
+/ZFxOSPbEW1NrbYtbk9JnlrGcb8/fxXTpS2M/vpF03WfATIYzwhKO/lwrEFMbJzq
+8k39xnvoLroy166xKCb5eapyabsz6JLhDRI4UMGRwf5GdVPiUDVYx/rTZFcf0XBM
+mWF1tNp6Fk8MI19E3+nBS0sFV/SzSfu2d/dyv8yvc+AmBQwZjY58eMy+7UkfcdAs
+4QXu7SECgYEA2+zg2BA7gS6YvbkNAVb6zGuz32k8Yt7UKAakcCTCdqtDhaoubruB
+lr1xZ03qpeC0dbYCDy/K2RKNSIo/C9vUOslhMYWporqYZtj6zKDMjEM7SkRhWDy7
+M4onczW9+j96L5KgExo/Vd0oGFt2aqhnccnff2sQn/Y0gV4/gVnV5wsCgYEA2Q9T
+iPfbFTHA+fIfOTowcx74I6hT3OQVJJki7iB/GoSmUYe8HUunlpnOArInwmVVBB3Y
+LRpFw6nJ3JBbA6MD1fRg0OURmIdnWrXy7UP9Q72k8Y/sjVu85k6K1M1Uxez5Gzf9
+O8gWe69w7Tlyg7TgdS75KZkNU2SvZUr4d4gEESMCgYEAoZbNqdNbdVJgZd4GwTpA
+clycQAXZmaex6Grg/Yj03S5/IUHmdfQNhk1Rep7UTubokSiXIMNz3sTzzB3Prw3p
+RPxcDn/wISHLAp5gwGNRh6mKccoGYEfXIEpdLBiLqshjJXstZHu/l340jJj/S+TK
+hN/X+I4ZG/9RmtA+BvOT5/sCgYBE1JVaw64XZHWbTOoVzZv7hYCOXhxp2XHS7zSi
+am+jyxiqle5TI+B64z7un+v3uwZgjc41TzV8PGUkqioSVykqOZpR47HWumlN3JCy
+KHMxffvGFiufD84xAgsIPCx7rw+3T+Pfhy2J/zJCJ4tmtH7Q6hpc1InjPfRYnJN5
+5AnWHQKBgQCqw9iiDpJoXxphJaOEm5JB7NdFBGFVrSu5MQko/nflf7jaMNyriFgY
+qIHIpXt/Jyd8JJRebxuASbV9befDZ9gxU3skb2iYLJvgv3rsCmr5cgeJDI6KMBtK
+xff6GbHUYD3rrncdLALnD5DnoR5J/oOUO4016J/z33JbB3nMYALnZw==
+-----END RSA PRIVATE KEY-----
--- /dev/null
+
+from http import HTTPStatus
+
+__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
+
+
+class MsgException(Exception):
+ """
+ Base Exception class for all msgXXXX exceptions
+ """
+
+ def __init__(self, message, http_code=HTTPStatus.INTERNAL_SERVER_ERROR):
+ """
+ General exception
+ :param message: descriptive text
+ :param http_code: <http.HTTPStatus> type. It contains ".value" (http error code) and ".name" (http error name
+ """
+ self.http_code = http_code
+ Exception.__init__(self, "messaging exception " + message)
+
+
+class MsgBase(object):
+ """
+ Base class for all msgXXXX classes
+ """
+
+ def __init__(self):
+ pass
+
+ def connect(self, config):
+ pass
+
+ def write(self, msg):
+ pass
+
+ def read(self):
+ pass
+
+ def disconnect(self):
+ pass
+
--- /dev/null
+import logging
+import asyncio
+import yaml
+from aiokafka import AIOKafkaConsumer
+from aiokafka import AIOKafkaProducer
+from aiokafka.errors import KafkaError
+from msgbase import MsgBase, MsgException
+#import json
+
+
+class MsgKafka(MsgBase):
+ def __init__(self, logger_name='msg'):
+ self.logger = logging.getLogger(logger_name)
+ self.host = None
+ self.port = None
+ self.consumer = None
+ self.producer = None
+ # create a different file for each topic
+ #self.files = {}
+
+ def connect(self, config):
+ try:
+ if "logger_name" in config:
+ self.logger = logging.getLogger(config["logger_name"])
+ self.host = config["host"]
+ self.port = config["port"]
+ self.topic_lst = []
+ self.loop = asyncio.get_event_loop()
+ self.broker = str(self.host) + ":" + str(self.port)
+
+ except Exception as e: # TODO refine
+ raise MsgException(str(e))
+
+ def write(self, topic, key, msg):
+ try:
+ self.loop.run_until_complete(self.aiowrite(topic=topic, key=key, msg=yaml.safe_dump(msg, default_flow_style=True)))
+
+ except Exception as e:
+ raise MsgException("Error writing {} topic: {}".format(topic, str(e)))
+
+ def read(self, topic):
+ #self.topic_lst.append(topic)
+ try:
+ return self.loop.run_until_complete(self.aioread(topic))
+ except Exception as e:
+ raise MsgException("Error reading {} topic: {}".format(topic, str(e)))
+
+ async def aiowrite(self, topic, key, msg, loop=None):
+ try:
+ if not loop:
+ loop = self.loop
+ self.producer = AIOKafkaProducer(loop=loop, key_serializer=str.encode, value_serializer=str.encode,
+ bootstrap_servers=self.broker)
+ await self.producer.start()
+ await self.producer.send(topic=topic, key=key, value=msg)
+ except Exception as e:
+ raise MsgException("Error publishing to {} topic: {}".format(topic, str(e)))
+ finally:
+ await self.producer.stop()
+
+ async def aioread(self, topic, loop=None):
+ if not loop:
+ loop = self.loop
+ self.consumer = AIOKafkaConsumer(loop=loop, bootstrap_servers=self.broker)
+ await self.consumer.start()
+ self.consumer.subscribe([topic])
+ try:
+ async for message in self.consumer:
+ return yaml.load(message.key), yaml.load(message.value)
+ except KafkaError as e:
+ raise MsgException(str(e))
+ finally:
+ await self.consumer.stop()
+
+
--- /dev/null
+import logging
+import os
+import yaml
+import asyncio
+from msgbase import MsgBase, MsgException
+
+__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
+
+
+class MsgLocal(MsgBase):
+
+ def __init__(self, logger_name='msg'):
+ self.logger = logging.getLogger(logger_name)
+ self.path = None
+ # create a different file for each topic
+ self.files = {}
+
+ def connect(self, config):
+ try:
+ if "logger_name" in config:
+ self.logger = logging.getLogger(config["logger_name"])
+ self.path = config["path"]
+ if not self.path.endswith("/"):
+ self.path += "/"
+ if not os.path.exists(self.path):
+ os.mkdir(self.path)
+ except MsgException:
+ raise
+ except Exception as e: # TODO refine
+ raise MsgException(str(e))
+
+ def disconnect(self):
+ for f in self.files.values():
+ try:
+ f.close()
+ except Exception as e: # TODO refine
+ pass
+
+ def write(self, topic, key, msg):
+ """
+ Insert a message into topic
+ :param topic: topic
+ :param key: key text to be inserted
+ :param msg: value object to be inserted
+ :return: None or raises and exception
+ """
+ try:
+ if topic not in self.files:
+ self.files[topic] = open(self.path + topic, "a+")
+ yaml.safe_dump({key: msg}, self.files[topic], default_flow_style=True)
+ self.files[topic].flush()
+ except Exception as e: # TODO refine
+ raise MsgException(str(e))
+
+ def read(self, topic):
+ try:
+ msg = ""
+ if topic not in self.files:
+ self.files[topic] = open(self.path + topic, "a+")
+ # ignore previous content
+ for line in self.files[topic]:
+ if not line.endswith("\n"):
+ msg = line
+ msg += self.files[topic].readline()
+ if not msg.endswith("\n"):
+ return None
+ msg_dict = yaml.load(msg)
+ assert len(msg_dict) == 1
+ for k, v in msg_dict.items():
+ return k, v
+ except Exception as e: # TODO refine
+ raise MsgException(str(e))
+
+ async def aioread(self, topic, loop=None):
+ try:
+ msg = ""
+ if not loop:
+ loop = asyncio.get_event_loop()
+ if topic not in self.files:
+ self.files[topic] = open(self.path + topic, "a+")
+ # ignore previous content
+ for line in self.files[topic]:
+ if not line.endswith("\n"):
+ msg = line
+ while True:
+ msg += self.files[topic].readline()
+ if msg.endswith("\n"):
+ break
+ await asyncio.sleep(2, loop=loop)
+ msg_dict = yaml.load(msg)
+ assert len(msg_dict) == 1
+ for k, v in msg_dict.items():
+ return k, v
+ except Exception as e: # TODO refine
+ raise MsgException(str(e))
--- /dev/null
+[/]
+# tools.secureheaders.on = True
+tools.sessions.on = True
+# increase security on sessions
+tools.sessions.secure = True
+tools.sessions.httponly = True
+tools.encode.on: True,
+tools.encode.encoding: 'utf-8'
+tools.response_headers.on = True
+
+# tools.auth_basic.on: True,
+# tools.auth_basic.realm: 'localhost',
+# tools.auth_basic.checkpassword: get_tokens
+
+
+[/static]
+tools.staticdir.on: True
+tools.staticdir.dir: "/app/osm_nbi/html_public"
+
+
+[global]
+server.socket_host: "0.0.0.0"
+server.socket_port: 9999
+
+server.ssl_module: "builtin"
+server.ssl_certificate: "./http/cert.pem"
+server.ssl_private_key: "./http/privkey.pem"
+server.ssl_pass_phrase: "osm4u"
+server.thread_pool: 10
+
+# Only for test. It works without authorization using the provided user and project:
+# test.user_not_authorized: "admin"
+# test.project_not_authorized: "admin"
+
+# Uncomment for allow basic authentication apart from bearer
+# auth.allow_basic_authentication: True
+
+log.screen: False
+log.access_file: ""
+log.error_file: ""
+
+loglevel: "DEBUG"
+#logfile: /var/log/osm/nbi.log
+
+
+[database]
+driver: "mongo" # mongo or memory
+host: "mongo" # hostname or IP
+port: 27017
+name: "osm"
+user: "user"
+password: "password"
+
+loglevel: "DEBUG"
+#logfile: /var/log/osm/nbi-database.log
+
+
+[storage]
+driver: "local" # local filesystem
+# for local provide file path
+path: "/app/storage" #"/home/atierno/OSM/osm/NBI/local/storage"
+
+loglevel: "DEBUG"
+#logfile: /var/log/osm/nbi-storage.log
+
+[message]
+driver: "kafka" # local or kafka
+# for local provide file path
+path: "/app/storage/kafka"
+host: "kafka"
+port: 9092
+
+loglevel: "DEBUG"
+#logfile: /var/log/osm/nbi-message.log
+
--- /dev/null
+#!/usr/bin/python3
+# -*- coding: utf-8 -*-
+
+import cherrypy
+import time
+import json
+import yaml
+import html_out as html
+import logging
+from engine import Engine, EngineException
+from dbbase import DbException
+from base64 import standard_b64decode
+from os import getenv
+from http import HTTPStatus
+from http.client import responses as http_responses
+from codecs import getreader
+from os import environ
+
+__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
+__version__ = "0.1"
+version_date = "Feb 2018"
+
+"""
+North Bound Interface (O: OSM; S: SOL5
+URL: /osm GET POST PUT DELETE PATCH
+ /nsd/v1
+ /ns_descriptors O5 O5
+ /<nsdInfoId> O5 O5 5
+ /nsd_content O5 O5
+ /pnf_descriptors 5 5
+ /<pnfdInfoId> 5 5 5
+ /pnfd_content 5 5
+ /subcriptions 5 5
+ /<subcriptionId> 5 X
+
+ /vnfpkgm/v1
+ /vnf_packages O5 O5
+ /<vnfPkgId> O5 O5 5
+ /vnfd O5 O
+ /package_content O5 O5
+ /upload_from_uri X
+ /artifacts/<artifactPatch X
+ /subcriptions X X
+ /<subcriptionId> X X
+
+ /nslcm/v1
+ /ns_instances O5 O5
+ /<nsInstanceId> O5 O5
+ TO BE COMPLETED
+ /ns_lcm_op_occs 5 5
+ /<nsLcmOpOccId> 5 5 5
+ TO BE COMPLETED 5 5
+ /subcriptions 5 5
+ /<subcriptionId> 5 X
+
+query string.
+ <attrName>[.<attrName>...]*[.<op>]=<value>[,<value>...]&...
+ op: "eq"(or empty to one or the values) | "neq" (to any of the values) | "gt" | "lt" | "gte" | "lte" | "cont" | "ncont"
+ all_fields, fields=x,y,.., exclude_default, exclude_fields=x,y,...
+ (none) … same as “exclude_default”
+ all_fields … all attributes.
+ fields=<list> … all attributes except all complex attributes with minimum cardinality of zero that are not conditionally mandatory, and that are not provided in <list>.
+ exclude_fields=<list> … all attributes except those complex attributes with a minimum cardinality of zero that are not conditionally mandatory, and that are provided in <list>.
+ exclude_default … all attributes except those complex attributes with a minimum cardinality of zero that are not conditionally mandatory, and that are part of the "default exclude set" defined in the present specification for the particular resource
+ exclude_default and include=<list> … all attributes except those complex attributes with a minimum cardinality of zero that are not conditionally mandatory and that are part of the "default exclude set" defined in the present specification for the particular resource, but that are not part of <list>
+Header field name Reference Example Descriptions
+ Accept IETF RFC 7231 [19] application/json Content-Types that are acceptable for the response.
+ This header field shall be present if the response is expected to have a non-empty message body.
+ Content-Type IETF RFC 7231 [19] application/json The MIME type of the body of the request.
+ This header field shall be present if the request has a non-empty message body.
+ Authorization IETF RFC 7235 [22] Bearer mF_9.B5f-4.1JqM The authorization token for the request. Details are specified in clause 4.5.3.
+ Range IETF RFC 7233 [21] 1000-2000 Requested range of bytes from a file
+Header field name Reference Example Descriptions
+ Content-Type IETF RFC 7231 [19] application/json The MIME type of the body of the response.
+ This header field shall be present if the response has a non-empty message body.
+ Location IETF RFC 7231 [19] http://www.example.com/vnflcm/v1/vnf_instances/123 Used in redirection, or when a new resource has been created.
+ This header field shall be present if the response status code is 201 or 3xx.
+ In the present document this header field is also used if the response status code is 202 and a new resource was created.
+ WWW-Authenticate IETF RFC 7235 [22] Bearer realm="example" Challenge if the corresponding HTTP request has not provided authorization, or error details if the corresponding HTTP request has provided an invalid authorization token.
+ Accept-Ranges IETF RFC 7233 [21] bytes Used by the Server to signal whether or not it supports ranges for certain resources.
+ Content-Range IETF RFC 7233 [21] bytes 21010-47021/ 47022 Signals the byte range that is contained in the response, and the total length of the file.
+ Retry-After IETF RFC 7231 [19] Fri, 31 Dec 1999 23:59:59 GMT
+
+ or
+
+ 120 Used to indicate how long the user agent ought to wait before making a follow-up request.
+ It can be used with 503 responses.
+ The value of this field can be an HTTP-date or a number of seconds to delay after the response is received.
+
+ #TODO http header for partial uploads: Content-Range: "bytes 0-1199/15000". Id is returned first time and send in following chunks
+"""
+
+
+class NbiException(Exception):
+
+ def __init__(self, message, http_code=HTTPStatus.METHOD_NOT_ALLOWED):
+ Exception.__init__(self, message)
+ self.http_code = http_code
+
+
+class Server(object):
+ instance = 0
+ # to decode bytes to str
+ reader = getreader("utf-8")
+
+ def __init__(self):
+ self.instance += 1
+ self.engine = Engine()
+
+ def _authorization(self):
+ token = None
+ user_passwd64 = None
+ try:
+ # 1. Get token Authorization bearer
+ auth = cherrypy.request.headers.get("Authorization")
+ if auth:
+ auth_list = auth.split(" ")
+ if auth_list[0].lower() == "bearer":
+ token = auth_list[-1]
+ elif auth_list[0].lower() == "basic":
+ user_passwd64 = auth_list[-1]
+ if not token:
+ if cherrypy.session.get("Authorization"):
+ # 2. Try using session before request a new token. If not, basic authentication will generate
+ token = cherrypy.session.get("Authorization")
+ if token == "logout":
+ token = None # force Unauthorized response to insert user pasword again
+ elif user_passwd64 and cherrypy.request.config.get("auth.allow_basic_authentication"):
+ # 3. Get new token from user password
+ user = None
+ passwd = None
+ try:
+ user_passwd = standard_b64decode(user_passwd64).decode()
+ user, _, passwd = user_passwd.partition(":")
+ except:
+ pass
+ outdata = self.engine.new_token(None, {"username": user, "password": passwd})
+ token = outdata["id"]
+ cherrypy.session['Authorization'] = token
+ # 4. Get token from cookie
+ # if not token:
+ # auth_cookie = cherrypy.request.cookie.get("Authorization")
+ # if auth_cookie:
+ # token = auth_cookie.value
+ return self.engine.authorize(token)
+ except EngineException as e:
+ if cherrypy.session.get('Authorization'):
+ del cherrypy.session['Authorization']
+ cherrypy.response.headers["WWW-Authenticate"] = 'Bearer realm="{}"'.format(e)
+ raise
+
+ def _format_in(self, kwargs):
+ try:
+ indata = None
+ if cherrypy.request.body.length:
+ error_text = "Invalid input format "
+
+ if "Content-Type" in cherrypy.request.headers:
+ if "application/json" in cherrypy.request.headers["Content-Type"]:
+ error_text = "Invalid json format "
+ indata = json.load(self.reader(cherrypy.request.body))
+ elif "application/yaml" in cherrypy.request.headers["Content-Type"]:
+ error_text = "Invalid yaml format "
+ indata = yaml.load(cherrypy.request.body)
+ elif "application/binary" in cherrypy.request.headers["Content-Type"] or \
+ "application/gzip" in cherrypy.request.headers["Content-Type"] or \
+ "application/zip" in cherrypy.request.headers["Content-Type"]:
+ indata = cherrypy.request.body.read()
+ elif "multipart/form-data" in cherrypy.request.headers["Content-Type"]:
+ if "descriptor_file" in kwargs:
+ filecontent = kwargs.pop("descriptor_file")
+ if not filecontent.file:
+ raise NbiException("empty file or content", HTTPStatus.BAD_REQUEST)
+ indata = filecontent.file.read()
+ if filecontent.content_type.value:
+ cherrypy.request.headers["Content-Type"] = filecontent.content_type.value
+ else:
+ # raise cherrypy.HTTPError(HTTPStatus.Not_Acceptable,
+ # "Only 'Content-Type' of type 'application/json' or
+ # 'application/yaml' for input format are available")
+ error_text = "Invalid yaml format "
+ indata = yaml.load(cherrypy.request.body)
+ else:
+ error_text = "Invalid yaml format "
+ indata = yaml.load(cherrypy.request.body)
+ if not indata:
+ indata = {}
+
+ if "METHOD" in kwargs:
+ method = kwargs.pop("METHOD")
+ else:
+ method = cherrypy.request.method
+ format_yaml = False
+ if cherrypy.request.headers.get("Query-String-Format") == "yaml":
+ format_yaml = True
+
+ for k, v in kwargs.items():
+ if isinstance(v, str):
+ if v == "":
+ kwargs[k] = None
+ elif format_yaml:
+ try:
+ kwargs[k] = yaml.load(v)
+ except:
+ pass
+ elif k.endswith(".gt") or k.endswith(".lt") or k.endswith(".gte") or k.endswith(".lte"):
+ try:
+ kwargs[k] = int(v)
+ except:
+ try:
+ kwargs[k] = float(v)
+ except:
+ pass
+ elif v.find(",") > 0:
+ kwargs[k] = v.split(",")
+ elif isinstance(v, (list, tuple)):
+ for index in range(0, len(v)):
+ if v[index] == "":
+ v[index] = None
+ elif format_yaml:
+ try:
+ v[index] = yaml.load(v[index])
+ except:
+ pass
+
+ return indata, method
+ except (ValueError, yaml.YAMLError) as exc:
+ raise NbiException(error_text + str(exc), HTTPStatus.BAD_REQUEST)
+ except KeyError as exc:
+ raise NbiException("Query string error: " + str(exc), HTTPStatus.BAD_REQUEST)
+
+ @staticmethod
+ def _format_out(data, session=None):
+ """
+ return string of dictionary data according to requested json, yaml, xml. By default json
+ :param data: response to be sent. Can be a dict or text
+ :param session:
+ :return: None
+ """
+ if "Accept" in cherrypy.request.headers:
+ accept = cherrypy.request.headers["Accept"]
+ if "application/json" in accept:
+ cherrypy.response.headers["Content-Type"] = 'application/json; charset=utf-8'
+ a = json.dumps(data, indent=4) + "\n"
+ return a.encode("utf8")
+ elif "text/html" in accept:
+ return html.format(data, cherrypy.request, cherrypy.response, session)
+
+ elif "application/yaml" in accept or "*/*" in accept:
+ pass
+ else:
+ raise cherrypy.HTTPError(HTTPStatus.NOT_ACCEPTABLE.value,
+ "Only 'Accept' of type 'application/json' or 'application/yaml' "
+ "for output format are available")
+ cherrypy.response.headers["Content-Type"] = 'application/yaml'
+ return yaml.safe_dump(data, explicit_start=True, indent=4, default_flow_style=False, tags=False,
+ encoding='utf-8', allow_unicode=True) # , canonical=True, default_style='"'
+
+ @cherrypy.expose
+ def index(self, *args, **kwargs):
+ session = None
+ try:
+ if cherrypy.request.method == "GET":
+ session = self._authorization()
+ outdata = "Index page"
+ else:
+ raise cherrypy.HTTPError(HTTPStatus.METHOD_NOT_ALLOWED.value,
+ "Method {} not allowed for tokens".format(cherrypy.request.method))
+
+ return self._format_out(outdata, session)
+
+ except EngineException as e:
+ cherrypy.log("index Exception {}".format(e))
+ cherrypy.response.status = e.http_code.value
+ return self._format_out("Welcome to OSM!", session)
+
+ @cherrypy.expose
+ def token(self, *args, **kwargs):
+ if not args:
+ raise NbiException("URL must contain at least 'item/version'", HTTPStatus.METHOD_NOT_ALLOWED)
+ version = args[0]
+ if version != 'v1':
+ raise NbiException("URL version '{}' not supported".format(version), HTTPStatus.METHOD_NOT_ALLOWED)
+ session = None
+ # self.engine.load_dbase(cherrypy.request.app.config)
+ try:
+ indata, method = self._format_in(kwargs)
+ if method == "GET":
+ session = self._authorization()
+ if len(args) >= 2:
+ outdata = self.engine.get_token(session, args[1])
+ else:
+ outdata = self.engine.get_token_list(session)
+ elif method == "POST":
+ try:
+ session = self._authorization()
+ except:
+ session = None
+ if kwargs:
+ indata.update(kwargs)
+ outdata = self.engine.new_token(session, indata, cherrypy.request.remote)
+ session = outdata
+ cherrypy.session['Authorization'] = outdata["_id"]
+ # cherrypy.response.cookie["Authorization"] = outdata["id"]
+ # cherrypy.response.cookie["Authorization"]['expires'] = 3600
+ elif method == "DELETE":
+ if len(args) >= 2 and "logout" not in args:
+ token_id = args[1]
+ elif "id" in kwargs:
+ token_id = kwargs["id"]
+ else:
+ session = self._authorization()
+ token_id = session["_id"]
+ outdata = self.engine.del_token(token_id)
+ session = None
+ cherrypy.session['Authorization'] = "logout"
+ # cherrypy.response.cookie["Authorization"] = token_id
+ # cherrypy.response.cookie["Authorization"]['expires'] = 0
+ else:
+ raise NbiException("Method {} not allowed for token".format(method), HTTPStatus.METHOD_NOT_ALLOWED)
+ return self._format_out(outdata, session)
+ except (NbiException, EngineException, DbException) as e:
+ cherrypy.log("tokens Exception {}".format(e))
+ cherrypy.response.status = e.http_code.value
+ problem_details = {
+ "code": e.http_code.name,
+ "status": e.http_code.value,
+ "detail": str(e),
+ }
+ return self._format_out(problem_details, session)
+
+ @cherrypy.expose
+ def test(self, *args, **kwargs):
+ thread_info = None
+ if args and args[0] == "init":
+ try:
+ # self.engine.load_dbase(cherrypy.request.app.config)
+ self.engine.create_admin()
+ return "Done. User 'admin', password 'admin' created"
+ except Exception:
+ cherrypy.response.status = HTTPStatus.FORBIDDEN.value
+ return self._format_out("Database already initialized")
+ elif args and args[0] == "prune":
+ return self.engine.prune()
+ elif args and args[0] == "login":
+ if not cherrypy.request.headers.get("Authorization"):
+ cherrypy.response.headers["WWW-Authenticate"] = 'Basic realm="Access to OSM site", charset="UTF-8"'
+ cherrypy.response.status = HTTPStatus.UNAUTHORIZED.value
+ elif args and args[0] == "login2":
+ if not cherrypy.request.headers.get("Authorization"):
+ cherrypy.response.headers["WWW-Authenticate"] = 'Bearer realm="Access to OSM site"'
+ cherrypy.response.status = HTTPStatus.UNAUTHORIZED.value
+ elif args and args[0] == "sleep":
+ sleep_time = 5
+ try:
+ sleep_time = int(args[1])
+ except Exception:
+ cherrypy.response.status = HTTPStatus.FORBIDDEN.value
+ return self._format_out("Database already initialized")
+ thread_info = cherrypy.thread_data
+ print(thread_info)
+ time.sleep(sleep_time)
+ # thread_info
+ elif len(args) >= 2 and args[0] == "message":
+ topic = args[1]
+ try:
+ for k, v in kwargs.items():
+ self.engine.msg.write(topic, k, yaml.load(v))
+ return "ok"
+ except Exception as e:
+ return "Error: " + format(e)
+
+ return_text = (
+ "<html><pre>\nheaders:\n args: {}\n".format(args) +
+ " kwargs: {}\n".format(kwargs) +
+ " headers: {}\n".format(cherrypy.request.headers) +
+ " path_info: {}\n".format(cherrypy.request.path_info) +
+ " query_string: {}\n".format(cherrypy.request.query_string) +
+ " session: {}\n".format(cherrypy.session) +
+ " cookie: {}\n".format(cherrypy.request.cookie) +
+ " method: {}\n".format(cherrypy.request.method) +
+ " session: {}\n".format(cherrypy.session.get('fieldname')) +
+ " body:\n")
+ return_text += " length: {}\n".format(cherrypy.request.body.length)
+ if cherrypy.request.body.length:
+ return_text += " content: {}\n".format(
+ str(cherrypy.request.body.read(int(cherrypy.request.headers.get('Content-Length', 0)))))
+ if thread_info:
+ return_text += "thread: {}\n".format(thread_info)
+ return_text += "</pre></html>"
+ return return_text
+
+ @cherrypy.expose
+ def default(self, *args, **kwargs):
+ session = None
+ try:
+ if not args or len(args) < 2:
+ raise NbiException("URL must contain at least 'item/version'", HTTPStatus.METHOD_NOT_ALLOWED)
+ item = args[0]
+ version = args[1]
+ if item not in ("token", "user", "project", "vnfpkgm", "nsd", "nslcm"):
+ raise NbiException("URL item '{}' not supported".format(item), HTTPStatus.METHOD_NOT_ALLOWED)
+ if version != 'v1':
+ raise NbiException("URL version '{}' not supported".format(version), HTTPStatus.METHOD_NOT_ALLOWED)
+
+ # self.engine.load_dbase(cherrypy.request.app.config)
+ session = self._authorization()
+ indata, method = self._format_in(kwargs)
+ _id = None
+
+ if item == "nsd":
+ item = "nsds"
+ if len(args) < 3 or args[2] != "ns_descriptors":
+ raise NbiException("only ns_descriptors is allowed", HTTPStatus.METHOD_NOT_ALLOWED)
+ if len(args) > 3:
+ _id = args[3]
+ if len(args) > 4 and args[4] != "nsd_content":
+ raise NbiException("only nsd_content is allowed", HTTPStatus.METHOD_NOT_ALLOWED)
+ elif item == "vnfpkgm":
+ item = "vnfds"
+ if len(args) < 3 or args[2] != "vnf_packages":
+ raise NbiException("only vnf_packages is allowed", HTTPStatus.METHOD_NOT_ALLOWED)
+ if len(args) > 3:
+ _id = args[3]
+ if len(args) > 4 and args[4] not in ("vnfd", "package_content"):
+ raise NbiException("only vnfd or package_content are allowed", HTTPStatus.METHOD_NOT_ALLOWED)
+ elif item == "nslcm":
+ item = "nsrs"
+ if len(args) < 3 or args[2] != "ns_instances":
+ raise NbiException("only ns_instances is allowed", HTTPStatus.METHOD_NOT_ALLOWED)
+ if len(args) > 3:
+ _id = args[3]
+ if len(args) > 4:
+ raise NbiException("This feature is not implemented", HTTPStatus.METHOD_NOT_ALLOWED)
+ else:
+ if len(args) >= 3:
+ _id = args[2]
+ item += "s"
+
+ if method == "GET":
+ if not _id:
+ outdata = self.engine.get_item_list(session, item, kwargs)
+ else: # len(args) > 1
+ outdata = self.engine.get_item(session, item, _id)
+ elif method == "POST":
+ id, completed = self.engine.new_item(session, item, indata, kwargs, cherrypy.request.headers)
+ if not completed:
+ cherrypy.response.headers["Transaction-Id"] = id
+ cherrypy.response.status = HTTPStatus.CREATED.value
+ else:
+ cherrypy.response.headers["Location"] = cherrypy.request.base + "/osm/" + "/".join(args[0:3]) + "/" + id
+ outdata = {"id": id}
+ elif method == "DELETE":
+ if not _id:
+ outdata = self.engine.del_item_list(session, item, kwargs)
+ else: # len(args) > 1
+ outdata = self.engine.del_item(session, item, _id)
+ elif method == "PUT":
+ if not _id:
+ raise NbiException("Missing '/<id>' at the URL to identify item to be updated",
+ HTTPStatus.METHOD_NOT_ALLOWED)
+ elif not indata and not kwargs:
+ raise NbiException("Nothing to update. Provide payload and/or query string",
+ HTTPStatus.BAD_REQUEST)
+ outdata = {"id": self.engine.edit_item(session, item, args[1], indata, kwargs)}
+ else:
+ raise NbiException("Method {} not allowed".format(method), HTTPStatus.METHOD_NOT_ALLOWED)
+ return self._format_out(outdata, session)
+ except (NbiException, EngineException, DbException) as e:
+ cherrypy.log("Exception {}".format(e))
+ cherrypy.response.status = e.http_code.value
+ problem_details = {
+ "code": e.http_code.name,
+ "status": e.http_code.value,
+ "detail": str(e),
+ }
+ return self._format_out(problem_details, session)
+ # raise cherrypy.HTTPError(e.http_code.value, str(e))
+
+
+# def validate_password(realm, username, password):
+# cherrypy.log("realm "+ str(realm))
+# if username == "admin" and password == "admin":
+# return True
+# return False
+
+
+def _start_service():
+ """
+ Callback function called when cherrypy.engine starts
+ Override configuration with env variables
+ Set database, storage, message configuration
+ Init database with admin/admin user password
+ """
+ cherrypy.log.error("Starting osm_nbi")
+ # update general cherrypy configuration
+ update_dict = {}
+
+ engine_config = cherrypy.tree.apps['/osm'].config
+ for k, v in environ.items():
+ if not k.startswith("OSMNBI_"):
+ continue
+ k1, _, k2 = k[7:].lower().partition("_")
+ if not k2:
+ continue
+ try:
+ # update static configuration
+ if k == 'OSMNBI_STATIC_DIR':
+ engine_config["/static"]['tools.staticdir.dir'] = v
+ engine_config["/static"]['tools.staticdir.on'] = True
+ elif k == 'OSMNBI_SOCKET_PORT' or k == 'OSMNBI_SERVER_PORT':
+ update_dict['server.socket_port'] = int(v)
+ elif k == 'OSMNBI_SOCKET_HOST' or k == 'OSMNBI_SERVER_HOST':
+ update_dict['server.socket_host'] = v
+ elif k1 == "server":
+ update_dict['server' + k2] = v
+ # TODO add more entries
+ elif k1 in ("message", "database", "storage"):
+ if k2 == "port":
+ engine_config[k1][k2] = int(v)
+ else:
+ engine_config[k1][k2] = v
+ except ValueError as e:
+ cherrypy.log.error("Ignoring environ '{}': " + str(e))
+ except Exception as e:
+ cherrypy.log.warn("skipping environ '{}' on exception '{}'".format(k, e))
+
+ if update_dict:
+ cherrypy.config.update(update_dict)
+
+ # logging cherrypy
+ log_format_simple = "%(asctime)s %(levelname)s %(name)s %(filename)s:%(lineno)s %(message)s"
+ log_formatter_simple = logging.Formatter(log_format_simple, datefmt='%Y-%m-%dT%H:%M:%S')
+ logger_server = logging.getLogger("cherrypy.error")
+ logger_access = logging.getLogger("cherrypy.access")
+ logger_cherry = logging.getLogger("cherrypy")
+ logger_nbi = logging.getLogger("nbi")
+
+ if "logfile" in engine_config["global"]:
+ file_handler = logging.handlers.RotatingFileHandler(engine_config["global"]["logfile"],
+ maxBytes=100e6, backupCount=9, delay=0)
+ file_handler.setFormatter(log_formatter_simple)
+ logger_cherry.addHandler(file_handler)
+ logger_nbi.addHandler(file_handler)
+ else:
+ for format_, logger in {"nbi.server": logger_server,
+ "nbi.access": logger_access,
+ "%(name)s %(filename)s:%(lineno)s": logger_nbi
+ }.items():
+ log_format_cherry = "%(asctime)s %(levelname)s {} %(message)s".format(format_)
+ log_formatter_cherry = logging.Formatter(log_format_cherry, datefmt='%Y-%m-%dT%H:%M:%S')
+ str_handler = logging.StreamHandler()
+ str_handler.setFormatter(log_formatter_cherry)
+ logger.addHandler(str_handler)
+
+ if engine_config["global"].get("loglevel"):
+ logger_cherry.setLevel(engine_config["global"]["loglevel"])
+ logger_nbi.setLevel(engine_config["global"]["loglevel"])
+
+ # logging other modules
+ for k1, logname in {"message": "nbi.msg", "database": "nbi.db", "storage": "nbi.fs"}.items():
+ engine_config[k1]["logger_name"] = logname
+ logger_module = logging.getLogger(logname)
+ if "logfile" in engine_config[k1]:
+ file_handler = logging.handlers.RotatingFileHandler(engine_config[k1]["logfile"],
+ maxBytes=100e6, backupCount=9, delay=0)
+ file_handler.setFormatter(log_formatter_simple)
+ logger_module.addHandler(file_handler)
+ if "loglevel" in engine_config[k1]:
+ logger_module.setLevel(engine_config[k1]["loglevel"])
+ # TODO add more entries, e.g.: storage
+ cherrypy.tree.apps['/osm'].root.engine.start(engine_config)
+ try:
+ cherrypy.tree.apps['/osm'].root.engine.create_admin()
+ except EngineException:
+ pass
+ # getenv('OSMOPENMANO_TENANT', None)
+
+
+def _stop_service():
+ """
+ Callback function called when cherrypy.engine stops
+ TODO: Ending database connections.
+ """
+ cherrypy.tree.apps['/osm'].root.engine.stop()
+ cherrypy.log.error("Stopping osm_nbi")
+
+def nbi():
+ # conf = {
+ # '/': {
+ # #'request.dispatch': cherrypy.dispatch.MethodDispatcher(),
+ # 'tools.sessions.on': True,
+ # 'tools.response_headers.on': True,
+ # # 'tools.response_headers.headers': [('Content-Type', 'text/plain')],
+ # }
+ # }
+ # cherrypy.Server.ssl_module = 'builtin'
+ # cherrypy.Server.ssl_certificate = "http/cert.pem"
+ # cherrypy.Server.ssl_private_key = "http/privkey.pem"
+ # cherrypy.Server.thread_pool = 10
+ # cherrypy.config.update({'Server.socket_port': config["port"], 'Server.socket_host': config["host"]})
+
+ # cherrypy.config.update({'tools.auth_basic.on': True,
+ # 'tools.auth_basic.realm': 'localhost',
+ # 'tools.auth_basic.checkpassword': validate_password})
+ cherrypy.engine.subscribe('start', _start_service)
+ cherrypy.engine.subscribe('stop', _stop_service)
+ cherrypy.quickstart(Server(), '/osm', "nbi.cfg")
+
+
+if __name__ == '__main__':
+ nbi()
--- /dev/null
+[Unit]
+Description=NBI server (OSM NBI)
+
+[Service]
+ExecStart=/usr/bin/nbi -c /etc/osm/nbi.cfg --log-file=/var/log/osm/nbi.log
+Restart=always
+
+[Install]
+WantedBy=multi-user.target
+
--- /dev/null
+#! /bin/bash
+
+export NBI_URL=https://localhost:9999/osm
+USERNAME=admin
+PASSWORD=admin
+PROJECT=admin
+VIM=ost2-mrt-tid #OST2_MRT #ost2-mrt-tid
+
+DESCRIPTORS=/home/ubuntu/descriptors #../local/descriptors
+DESCRIPTORS=../local/descriptors
+
+VNFD1=${DESCRIPTORS}/ping_vnf.tar.gz
+VNFD2=${DESCRIPTORS}/pong_vnf.tar.gz
+VNFD3=${DESCRIPTORS}/cirros_vnfd.yaml
+
+NSD1=${DESCRIPTORS}/ping_pong_ns.tar.gz
+NSD2=${DESCRIPTORS}/cirros_2vnf_ns.tar.gz
+NSD3=${DESCRIPTORS}/cirros_nsd.yaml
+
+[ -f "$VNFD1" ] || ! echo "not found ping_vnf.tar.gz. Set DESCRIPTORS variable to a proper location" || exit 1
+[ -f "$VNFD2" ] || ! echo "not found pong_vnf.tar.gz. Set DESCRIPTORS variable to a proper location" || exit 1
+[ -f "$VNFD3" ] || ! echo "not found cirros_vnfd.yaml. Set DESCRIPTORS variable to a proper location" || exit 1
+[ -f "$NSD1" ] || ! echo "not found ping_pong_ns.tar.gz. Set DESCRIPTORS variable to a proper location" || exit 1
+[ -f "$NSD2" ] || ! echo "not found cirros_2vnf_ns.tar.gz. Set DESCRIPTORS variable to a proper location" || exit 1
+[ -f "$NSD3" ] || ! echo "not found cirros_nsd.yaml. Set DESCRIPTORS variable to a proper location" || exit 1
+
+#get token
+TOKEN=`curl --insecure -H "Content-Type: application/yaml" -H "Accept: application/yaml" --data "{username: $USERNAME, password: $PASSWORD, project_id: $PROJECT}" ${NBI_URL}/token/v1 2>/dev/null | awk '($1=="id:"){print $2}'`;
+echo token: $TOKEN
+
+
+
+
+# VNFD
+#########
+#insert PKG
+VNFD1_ID=`curl --insecure -w "%{http_code}\n" -H "Content-Type: application/gzip" -H "Accept: application/yaml" -H "Authorization: Bearer $TOKEN" --data-binary "@$VNFD1" ${NBI_URL}/vnfpkgm/v1/vnf_packages 2>/dev/null | awk '($1=="id:"){print $2}'`
+echo ping_vnfd: $VNFD1_ID
+
+VNFD2_ID=`curl --insecure -w "%{http_code}\n" -H "Content-Type: application/gzip" -H "Accept: application/yaml" -H "Authorization: Bearer $TOKEN" --data-binary "@$VNFD2" ${NBI_URL}/vnfpkgm/v1/vnf_packages 2>/dev/null | awk '($1=="id:"){print $2}'`
+echo pong_vnfd: $VNFD2_ID
+
+
+
+# NSD
+#########
+#insert PKG
+NSD1_ID=`curl --insecure -w "%{http_code}\n" -H "Content-Type: application/gzip" -H "Accept: application/yaml" -H "Authorization: Bearer $TOKEN" --data-binary "@$NSD1" ${NBI_URL}/nsd/v1/ns_descriptors 2>/dev/null | awk '($1=="id:"){print $2}'`
+echo ping_pong_nsd: $NSD1_ID
+
+
+# NSRS
+##############
+#add nsr
+NSR1_ID=`curl --insecure -w "%{http_code}\n" -H "Content-Type: application/yaml" -H "Accept: application/yaml" -H "Authorization: Bearer $TOKEN" --data "{ nsDescription: default description, nsName: NSNAME, nsdId: $NSD1_ID, ssh-authorized-key: [ {key-pair-ref: gerardo}, {key-pair-ref: alfonso}], vimAccountId: $VIM }" ${NBI_URL}/nslcm/v1/ns_instances 2>/dev/null | awk '($1=="id:"){print $2}'` ;
+echo ping_pong_nsr: $NSR1_ID
+
+
+echo '
+curl --insecure -w "%{http_code}\n" -H "Content-Type: application/yaml" -H "Accept: application/yaml" -H "Authorization: Bearer '$TOKEN'" '${NBI_URL}'/nslcm/v1/ns_instances/'$NSR1_ID' 2>/dev/null | grep -e detailed-status -e operational-status -e config-status'
+
+
+
+
--- /dev/null
+#! /bin/bash
+
+export NBI_URL=https://localhost:9999/osm
+USERNAME=admin
+PASSWORD=admin
+PROJECT=admin
+
+
+
+#get token
+TOKEN=`curl --insecure -H "Content-Type: application/yaml" -H "Accept: application/yaml" --data "{username: $USERNAME, password: $PASSWORD, project_id: $PROJECT}" ${NBI_URL}/token/v1 2>/dev/null | awk '($1=="id:"){print $2}' ` ; echo $TOKEN
+
+
+echo deleting all
+#DELETE ALL
+
+for url_item in nslcm/v1/ns_instances nsd/v1/ns_descriptors vnfpkgm/v1/vnf_packages
+do
+ for ITEM_ID in `curl --insecure -w "%{http_code}\n" -H "Content-Type: application/yaml" -H "Accept: application/yaml" -H "Authorization: Bearer $TOKEN" ${NBI_URL}/${url_item} 2>/dev/null | awk '($1=="_id:") {print $2}'` ;
+ do
+ curl --insecure -w "%{http_code}\n" -H "Content-Type: application/yaml" -H "Accept: application/yaml" -H "Authorization: Bearer $TOKEN" ${NBI_URL}/${url_item}/$ITEM_ID -X DELETE
+ done
+done
+
+# curl --insecure ${NBI_URL}/test/prune
+
--- /dev/null
+#! /usr/bin/python3
+# -*- coding: utf-8 -*-
+
+import os.path
+import getopt
+import sys
+import base64
+import requests
+import json
+from os.path import getsize, basename
+from hashlib import md5
+
+__author__ = "Alfonso Tierno, alfonso.tiernosepulveda@telefonica.com"
+__date__ = "$2018-01-01$"
+__version__ = "0.1"
+version_date = "Jan 2018"
+
+
+def usage():
+ print("Usage: ", sys.argv[0], "[options]")
+ print(" --version: prints current version")
+ print(" -f|--file FILE: file to be sent")
+ print(" -h|--help: shows this help")
+ print(" -u|--url URL: complete server URL")
+ print(" -s|--chunk-size SIZE: size of chunks, by default 1000")
+ print(" -t|--token TOKEN: Authorizaton token, previously obtained from server")
+ print(" -v|--verbose print debug information, can be used several times")
+ return
+
+
+if __name__=="__main__":
+
+ try:
+ # load parameters and configuration
+ opts, args = getopt.getopt(sys.argv[1:], "hvu:s:f:t:",
+ ["url=", "help", "version", "verbose", "file=", "chunk-size=", "token="])
+ url = None
+ chunk_size = 500
+ pkg_file = None
+ verbose = 0
+ token = None
+
+ for o, a in opts:
+ if o == "--version":
+ print ("upload version " + __version__ + ' ' + version_date)
+ sys.exit()
+ elif o in ("-v", "--verbose"):
+ verbose += 1
+ elif o in ("-h", "--help"):
+ usage()
+ sys.exit()
+ elif o in ("-u", "--url"):
+ url = a
+ elif o in ("-s", "--chunk-size"):
+ chunk_size = int(a)
+ elif o in ("-f", "--file"):
+ pkg_file = a
+ elif o in ("-t", "--token"):
+ token = a
+ else:
+ assert False, "Unhandled option"
+ total_size = getsize(pkg_file)
+ index = 0
+ transaction_id = None
+ file_md5 = md5()
+ with open(pkg_file, 'rb') as f:
+ headers = {
+ "Content-type": "application/gzip",
+ "Content-Filename": basename(pkg_file),
+ "Accept": "application/json",
+ }
+ if token:
+ headers["Authorization"] = token
+ while index < total_size:
+ chunk_data = f.read(chunk_size)
+ file_md5.update(chunk_data)
+ # payload = {"file_name": pkg_file, "chunk_data": base64.b64encode(chunk_data).decode("utf-8"),
+ # "chunk_size": chunk_size}
+ if transaction_id:
+ headers["Transaction-Id"] = transaction_id
+ if index+len(chunk_data) == total_size:
+ headers["Content-File-MD5"] = file_md5.hexdigest()
+ # payload["id"] = transaction_id
+ headers["Content-range"] = "bytes {}-{}/{}".format(index, index+len(chunk_data)-1, total_size)
+ # refers to rfc2616: https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
+ if verbose:
+ print("TX chunk Headers: {}".format(headers))
+ r = requests.post(url, data=chunk_data, headers=headers, verify=False)
+ if r.status_code not in (200, 201):
+ print("Got {}: {}".format(r.status_code, r.text))
+ exit(1)
+ if verbose > 1:
+ print("RX {}: {}".format(r.status_code, r.text))
+ response = r.json()
+ if not transaction_id:
+ transaction_id = response["id"]
+ index += len(chunk_data)
+ if verbose <= 1:
+ print("RX {}: {}".format(r.status_code, r.text))
+ if "id" in response:
+ print("---\nid: {}".format(response["id"]))
+ except Exception as e:
+ raise
--- /dev/null
+#!/usr/bin/env python3
+
+from setuptools import setup #, find_packages
+
+_name = "osm_nbi"
+
+setup(
+ name=_name,
+ description='OSM North Bound Interface',
+ # version_command=('git describe --tags --long --dirty', 'pep440-git'),
+ version="0.1.0",
+ author='ETSI OSM',
+ author_email='alfonso.tiernosepulveda@telefonica.com',
+ maintainer='Alfonso Tierno',
+ maintainer_email='alfonso.tiernosepulveda@telefonica.com',
+ url='https://osm.etsi.org/gitweb/?p=osm/NBI.git;a=summary',
+ license='Apache 2.0',
+
+ packages=[_name], # find_packages(),
+ include_package_data=True,
+ data_files=[('/etc/osm/', ['osm_nbi/nbi.cfg']),
+ ('/etc/systemd/system/', ['osm_nbi/osm-nbi.service']),
+ ],
+
+ install_requires=[
+ 'CherryPy', 'pymongo'
+ ],
+# setup_requires=['setuptools-version-command'],
+ # test_suite='nose.collector',
+ # entry_points='''
+ # [console_scripts]
+ # osm=osm_nbi.nbi:nbi
+ # ''',
+)
--- /dev/null
+[DEFAULT]
+Depends: python3-cherrypy3, python3-pymongo, python3-yaml
--- /dev/null
+[tox]
+envlist = py27,py3,flake8
+toxworkdir={homedir}/.tox
+
+[testenv]
+deps=nose
+ mock
+commands=nosetests
+
+[testenv:flake8]
+basepython = python3
+deps = flake8
+commands =
+ flake8 setup.py
+
+[testenv:build]
+basepython = python3
+deps = stdeb
+ setuptools-version-command
+commands = python3 setup.py --command-packages=stdeb.command bdist_deb