Removed common files and use osm/common package 13/6113/4
authortierno <alfonso.tiernosepulveda@telefonica.com>
Thu, 10 May 2018 11:12:32 +0000 (13:12 +0200)
committertierno <alfonso.tiernosepulveda@telefonica.com>
Thu, 10 May 2018 15:44:27 +0000 (17:44 +0200)
Change-Id: I60f0cee9338fb5f1595e13bfab62805a4fec9913
Signed-off-by: tierno <alfonso.tiernosepulveda@telefonica.com>
17 files changed:
Dockerfile
Dockerfile.fromdeb [new file with mode: 0644]
Dockerfile.local
Makefile
devops-stages/stage-build.sh
osm_nbi/dbbase.py [deleted file]
osm_nbi/dbmemory.py [deleted file]
osm_nbi/dbmongo.py [deleted file]
osm_nbi/engine.py
osm_nbi/fsbase.py [deleted file]
osm_nbi/fslocal.py [deleted file]
osm_nbi/msgbase.py [deleted file]
osm_nbi/msgkafka.py [deleted file]
osm_nbi/msglocal.py [deleted file]
osm_nbi/nbi.py
python3-osm-nbi.postinst [new file with mode: 0755]
setup.py

index 179dfa8..8f664c5 100644 (file)
@@ -1,10 +1,12 @@
 # This Dockerfile is intented for devops and deb package generation
 #
-# Use Dockerfile.local for running osm/NBI in a docker container
+# Use Dockerfile.local for running osm/NBI in a docker container from source
+# Use Dockerfile.fromdeb for running osm/NBI in a docker container from last stable package
 
 
 FROM ubuntu:16.04
 
 RUN apt-get update && apt-get -y install git make python python3 \
     libcurl4-gnutls-dev libgnutls-dev tox python-dev python3-dev \
-    debhelper python-setuptools python-all python3-all apt-utils
+    debhelper python-setuptools python-all python3-all apt-utils 
+
diff --git a/Dockerfile.fromdeb b/Dockerfile.fromdeb
new file mode 100644 (file)
index 0000000..c99e974
--- /dev/null
@@ -0,0 +1,63 @@
+# This creates som/NBI docker from from last stable package
+
+FROM ubuntu:16.04
+
+# Set the working directory to /app
+WORKDIR /app/osm_nbi
+
+# avoid extra information from packages
+RUN echo 'path-exclude /usr/share/doc/*\n\
+path-include /usr/share/doc/*/copyright\n\
+path-exclude /usr/share/man/*\n\
+path-exclude /usr/share/groff/*\n\
+path-exclude /usr/share/info/*\n\
+path-exclude /usr/share/lintian/*\n\
+path-exclude /usr/share/linda/*\n'\
+> /etc/dpkg/dpkg.cfg.d/01_nodoc && \
+    echo  'APT::Install-Recommends "false";\n\
+APT::AutoRemove::RecommendsImportant "false";\n\
+APT::AutoRemove::SuggestsImportant "false";\n'\
+> /etc/apt/apt.conf.d/99_norecommends
+
+
+RUN apt-get update && apt-get install -y curl software-properties-common \
+    && add-apt-repository -y "deb http://osm-download.etsi.org/repository/osm/debian/ReleaseFOUR testing common NBI" \
+    && curl "http://osm-download.etsi.org/repository/osm/debian/ReleaseFOUR/OSM%20ETSI%20Release%20Key.gpg" | apt-key add - \
+    && apt-get update && apt-get install -y python3-osm-nbi python3-osm-common \
+    && rm -rf /var/lib/apt/lists/*
+
+EXPOSE 9999
+
+LABEL Maintainer="alfonso.tiernosepulveda@telefonica.com" \
+      Description="This implements a north bound interface for OSM" \
+      Version="1.0" \
+      Author="Alfonso Tierno"
+
+# Used for local storage
+VOLUME /app/storage
+# Used for logs
+VOLUME /app/log
+
+# The following ENV can be added with "docker run -e xxx' to configure
+# server
+ENV OSMNBI_SOCKET_HOST     0.0.0.0
+ENV OSMNBI_SOCKET_PORT     9999
+# storage
+ENV OSMNBI_STORAGE_PATH    /app/storage
+# database
+ENV OSMNBI_DATABASE_DRIVER mongo
+ENV OSMNBI_DATABASE_HOST   mongo
+ENV OSMNBI_DATABASE_PORT   27017
+# web
+ENV OSMNBI_STATIC_DIR      /app/osm_nbi/html_public
+# logs
+ENV OSMNBI_LOG_FILE        /app/log
+ENV OSMNBI_LOG_LEVEL       DEBUG
+# message
+ENV OSMNBI_MESSAGE_DRIVER  kafka
+ENV OSMNBI_MESSAGE_HOST    kafka
+ENV OSMNBI_MESSAGE_PORT    9092
+
+# Run app.py when the container launches
+CMD ["python3", "nbi.py"]
+
index 57b0f22..7f1a02b 100644 (file)
@@ -1,3 +1,5 @@
+# This creates osm/NBI docker from local NBI source code
+
 FROM ubuntu:16.04
 
 # Set the working directory to /app
@@ -6,11 +8,16 @@ WORKDIR /app/osm_nbi
 # Copy the current directory contents into the container at /app
 ADD . /app
 
-RUN apt-get update && apt-get -y install git  python3 python3-jsonschema \
+RUN apt-get update && apt-get install -y git python3 python3-jsonschema \
     python3-cherrypy3 python3-pymongo python3-yaml python3-pip \
     && pip3 install pip==9.0.3 \
     && pip3 install aiokafka \
     && mkdir -p /app/storage/kafka && mkdir -p /app/log 
+RUN git clone https://osm.etsi.org/gerrit/osm/common.git \
+    && cd common  && python3 setup.py develop && cd ..
+#    && pip3 install -U -r requirements.txt \
+#    && cd ..
+
 
 
 EXPOSE 9999
index 6b857ed..5a2e3ce 100644 (file)
--- a/Makefile
+++ b/Makefile
@@ -1,35 +1,14 @@
-# All Rights Reserved.
-#
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
 
-OUT_DIR := osm_im
-TREES_DIR := osm_im_trees
-Q?=@
-
-all: package
-       $(MAKE) clean_build package
-
-clean: clean_build
-       $(Q)rm -rf build dist osm_nbi.egg-info deb deb_dist *.gz $(OUT_DIR) $(TREES_DIR)
-
-clean_build:
-       rm -rf build
-       find osm_nbi -name '*.pyc' -delete
-       find osm_nbi -name '*.pyo' -delete
+clean:
+       rm -rf dist deb_dist .build osm_nbi-*.tar.gz osm_nbi.egg-info eggs
 
 package:
-       tox -e build
+       python3 setup.py --command-packages=stdeb.command sdist_dsc
+       cp python3-osm-nbi.postinst deb_dist/osm-nbi*/debian
+       cd deb_dist/osm-nbi*/debian && echo "osm-common python3-osm-common" > py3dist-overrides
+       # cd deb_dist/osm-nbi*/debian && echo "pip3 python3-pip"       >> py3dist-overrides
+       cd deb_dist/osm-nbi*/  && dpkg-buildpackage -rfakeroot -uc -us
+       mkdir -p .build
+       cp deb_dist/python3-osm-nbi*.deb .build/
 
-test:
-       echo "TODO"
 
index bf7602b..58af160 100755 (executable)
@@ -1,3 +1,4 @@
 #!/bin/sh
 rm -rf deb_dist
 tox -e build
+#make clean package
diff --git a/osm_nbi/dbbase.py b/osm_nbi/dbbase.py
deleted file mode 100644 (file)
index b101cb4..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-from http import HTTPStatus
-
-__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
-
-
-class DbException(Exception):
-
-    def __init__(self, message, http_code=HTTPStatus.NOT_FOUND):
-        # TODO change to http.HTTPStatus instead of int that allows .value and .name
-        self.http_code = http_code
-        Exception.__init__(self, "database exception " + message)
-
-
-class DbBase(object):
-
-    def __init__(self):
-        pass
-
-    def db_connect(self, config):
-        pass
-
-    def db_disconnect(self):
-        pass
-
-    def get_list(self, table, filter={}):
-        pass
-
-    def get_one(self, table, filter={}, fail_on_empty=True, fail_on_more=True):
-        pass
-
-    def create(self, table, indata):
-        pass
-
-    def del_list(self, table, filter={}):
-        pass
-
-    def del_one(self, table, filter={}, fail_on_empty=True):
-        pass
diff --git a/osm_nbi/dbmemory.py b/osm_nbi/dbmemory.py
deleted file mode 100644 (file)
index cdb0482..0000000
+++ /dev/null
@@ -1,124 +0,0 @@
-import logging
-from dbbase import DbException, DbBase
-from http import HTTPStatus
-from uuid import uuid4
-from copy import deepcopy
-
-__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
-
-
-class DbMemory(DbBase):
-
-    def __init__(self, logger_name='db'):
-        self.logger = logging.getLogger(logger_name)
-        self.db = {}
-
-    def db_connect(self, config):
-        if "logger_name" in config:
-            self.logger = logging.getLogger(config["logger_name"])
-
-    @staticmethod
-    def _format_filter(filter):
-        return filter    # TODO
-
-    def _find(self, table, filter):
-        for i, row in enumerate(self.db.get(table, ())):
-            match = True
-            if filter:
-                for k, v in filter.items():
-                    if k not in row or v != row[k]:
-                        match = False
-            if match:
-                yield i, row
-
-    def get_list(self, table, filter={}):
-        try:
-            l = []
-            for _, row in self._find(table, self._format_filter(filter)):
-                l.append(deepcopy(row))
-            return l
-        except DbException:
-            raise
-        except Exception as e:  # TODO refine
-            raise DbException(str(e))
-
-    def get_one(self, table, filter={}, fail_on_empty=True, fail_on_more=True):
-        try:
-            l = None
-            for _, row in self._find(table, self._format_filter(filter)):
-                if not fail_on_more:
-                    return deepcopy(row)
-                if l:
-                    raise DbException("Found more than one entry with filter='{}'".format(filter),
-                                      HTTPStatus.CONFLICT.value)
-                l = row
-            if not l and fail_on_empty:
-                raise DbException("Not found entry with filter='{}'".format(filter), HTTPStatus.NOT_FOUND)
-            return deepcopy(l)
-        except Exception as e:  # TODO refine
-            raise DbException(str(e))
-
-    def del_list(self, table, filter={}):
-        try:
-            id_list = []
-            for i, _ in self._find(table, self._format_filter(filter)):
-                id_list.append(i)
-            deleted = len(id_list)
-            for i in id_list:
-                del self.db[table][i]
-            return {"deleted": deleted}
-        except DbException:
-            raise
-        except Exception as e:  # TODO refine
-            raise DbException(str(e))
-
-    def del_one(self, table, filter={}, fail_on_empty=True):
-        try:
-            for i, _ in self._find(table, self._format_filter(filter)):
-                break
-            else:
-                if fail_on_empty:
-                    raise DbException("Not found entry with filter='{}'".format(filter), HTTPStatus.NOT_FOUND)
-                return None
-            del self.db[table][i]
-            return {"deleted": 1}
-        except Exception as e:  # TODO refine
-            raise DbException(str(e))
-
-    def replace(self, table, filter, indata, fail_on_empty=True):
-        try:
-            for i, _ in self._find(table, self._format_filter(filter)):
-                break
-            else:
-                if fail_on_empty:
-                    raise DbException("Not found entry with filter='{}'".format(filter), HTTPStatus.NOT_FOUND)
-                return None
-            self.db[table][i] = deepcopy(indata)
-            return {"upadted": 1}
-        except Exception as e:  # TODO refine
-            raise DbException(str(e))
-
-    def create(self, table, indata):
-        try:
-            id = indata.get("_id")
-            if not id:
-                id = str(uuid4())
-                indata["_id"] = id
-            if table not in self.db:
-                self.db[table] = []
-            self.db[table].append(deepcopy(indata))
-            return id
-        except Exception as e:  # TODO refine
-            raise DbException(str(e))
-
-
-if __name__ == '__main__':
-    # some test code
-    db = dbmemory()
-    db.create("test", {"_id": 1, "data": 1})
-    db.create("test", {"_id": 2, "data": 2})
-    db.create("test", {"_id": 3, "data": 3})
-    print("must be 3 items:", db.get_list("test"))
-    print("must return item 2:", db.get_list("test", {"_id": 2}))
-    db.del_one("test", {"_id": 2})
-    print("must be emtpy:", db.get_list("test", {"_id": 2}))
diff --git a/osm_nbi/dbmongo.py b/osm_nbi/dbmongo.py
deleted file mode 100644 (file)
index a8ea1ca..0000000
+++ /dev/null
@@ -1,190 +0,0 @@
-#import pymongo
-import logging
-from pymongo import MongoClient, errors
-from dbbase import DbException, DbBase
-from http import HTTPStatus
-from time import time, sleep
-
-__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
-
-# TODO consider use this decorator for database access retries
-# @retry_mongocall
-# def retry_mongocall(call):
-#     def _retry_mongocall(*args, **kwargs):
-#         retry = 1
-#         while True:
-#             try:
-#                 return call(*args, **kwargs)
-#             except pymongo.AutoReconnect as e:
-#                 if retry == 4:
-#                     raise DbException(str(e))
-#                 sleep(retry)
-#     return _retry_mongocall
-
-
-class DbMongo(DbBase):
-    conn_initial_timout = 120
-    conn_timout = 10
-
-    def __init__(self, logger_name='db'):
-        self.logger = logging.getLogger(logger_name)
-
-    def db_connect(self, config):
-        try:
-            if "logger_name" in config:
-                self.logger = logging.getLogger(config["logger_name"])
-            self.client = MongoClient(config["host"], config["port"])
-            self.db = self.client[config["name"]]
-            if "loglevel" in config:
-                self.logger.setLevel(getattr(logging, config['loglevel']))
-            # get data to try a connection
-            now = time()
-            while True:
-                try:
-                    self.db.users.find_one({"username": "admin"})
-                    return
-                except errors.ConnectionFailure as e:
-                    if time() - now >= self.conn_initial_timout:
-                        raise
-                    self.logger.info("Waiting to database up {}".format(e))
-                    sleep(2)
-        except errors.PyMongoError as e:
-            raise DbException(str(e))
-
-    def db_disconnect(self):
-        pass  # TODO
-
-    @staticmethod
-    def _format_filter(filter):
-        try:
-            db_filter = {}
-            for query_k, query_v in filter.items():
-                dot_index = query_k.rfind(".")
-                if dot_index > 1 and query_k[dot_index+1:] in ("eq", "ne", "gt", "gte", "lt", "lte", "cont",
-                                                               "ncont", "neq"):
-                    operator = "$" + query_k[dot_index+1:]
-                    if operator == "$neq":
-                        operator = "$ne"
-                    k = query_k[:dot_index]
-                else:
-                    operator = "$eq"
-                    k = query_k
-
-                v = query_v
-                if isinstance(v, list):
-                    if operator in ("$eq", "$cont"):
-                        operator = "$in"
-                        v = query_v
-                    elif operator in ("$ne", "$ncont"):
-                        operator = "$nin"
-                        v = query_v
-                    else:
-                        v = query_v.join(",")
-
-                if operator in ("$eq", "$cont"):
-                    # v cannot be a comma separated list, because operator would have been changed to $in
-                    db_filter[k] = v
-                elif operator == "$ncount":
-                    # v cannot be a comma separated list, because operator would have been changed to $nin
-                    db_filter[k] = {"$ne": v}
-                else:
-                    # maybe db_filter[k] exist. e.g. in the query string for values between 5 and 8: "a.gt=5&a.lt=8"
-                    if k not in db_filter:
-                        db_filter[k] = {}
-                    db_filter[k][operator] = v
-
-            return db_filter
-        except Exception as e:
-            raise DbException("Invalid query string filter at {}:{}. Error: {}".format(query_k, v, e),
-                              http_code=HTTPStatus.BAD_REQUEST)
-
-    def get_list(self, table, filter={}):
-        try:
-            l = []
-            collection = self.db[table]
-            rows = collection.find(self._format_filter(filter))
-            for row in rows:
-                l.append(row)
-            return l
-        except DbException:
-            raise
-        except Exception as e:  # TODO refine
-            raise DbException(str(e))
-
-    def get_one(self, table, filter={}, fail_on_empty=True, fail_on_more=True):
-        try:
-            if filter:
-                filter = self._format_filter(filter)
-            collection = self.db[table]
-            if not (fail_on_empty and fail_on_more):
-                return collection.find_one(filter)
-            rows = collection.find(filter)
-            if rows.count() == 0:
-                if fail_on_empty:
-                    raise DbException("Not found any {} with filter='{}'".format(table[:-1], filter),
-                                      HTTPStatus.NOT_FOUND)
-                return None
-            elif rows.count() > 1:
-                if fail_on_more:
-                    raise DbException("Found more than one {} with filter='{}'".format(table[:-1], filter),
-                                      HTTPStatus.CONFLICT)
-            return rows[0]
-        except Exception as e:  # TODO refine
-            raise DbException(str(e))
-
-    def del_list(self, table, filter={}):
-        try:
-            collection = self.db[table]
-            rows = collection.delete_many(self._format_filter(filter))
-            return {"deleted": rows.deleted_count}
-        except DbException:
-            raise
-        except Exception as e:  # TODO refine
-            raise DbException(str(e))
-
-    def del_one(self, table, filter={}, fail_on_empty=True):
-        try:
-            collection = self.db[table]
-            rows = collection.delete_one(self._format_filter(filter))
-            if rows.deleted_count == 0:
-                if fail_on_empty:
-                    raise DbException("Not found any {} with filter='{}'".format(table[:-1], filter),
-                                      HTTPStatus.NOT_FOUND)
-                return None
-            return {"deleted": rows.deleted_count}
-        except Exception as e:  # TODO refine
-            raise DbException(str(e))
-
-    def create(self, table, indata):
-        try:
-            collection = self.db[table]
-            data = collection.insert_one(indata)
-            return data.inserted_id
-        except Exception as e:  # TODO refine
-            raise DbException(str(e))
-
-    def set_one(self, table, filter, update_dict, fail_on_empty=True):
-        try:
-            collection = self.db[table]
-            rows = collection.update_one(self._format_filter(filter), {"$set": update_dict})
-            if rows.updated_count == 0:
-                if fail_on_empty:
-                    raise DbException("Not found any {} with filter='{}'".format(table[:-1], filter),
-                                      HTTPStatus.NOT_FOUND)
-                return None
-            return {"deleted": rows.deleted_count}
-        except Exception as e:  # TODO refine
-            raise DbException(str(e))
-
-    def replace(self, table, id, indata, fail_on_empty=True):
-        try:
-            collection = self.db[table]
-            rows = collection.replace_one({"_id": id}, indata)
-            if rows.modified_count == 0:
-                if fail_on_empty:
-                    raise DbException("Not found any {} with filter='{}'".format(table[:-1], filter),
-                                      HTTPStatus.NOT_FOUND)
-                return None
-            return {"replace": rows.modified_count}
-        except Exception as e:  # TODO refine
-            raise DbException(str(e))
index 7b8799d..d771f96 100644 (file)
@@ -1,10 +1,10 @@
 # -*- coding: utf-8 -*-
 
-import dbmongo
-import dbmemory
-import fslocal
-import msglocal
-import msgkafka
+from osm_common import dbmongo
+from osm_common import dbmemory
+from osm_common import fslocal
+from osm_common import msglocal
+from osm_common import msgkafka
 import tarfile
 import yaml
 import json
@@ -12,9 +12,9 @@ import logging
 from random import choice as random_choice
 from uuid import uuid4
 from hashlib import sha256, md5
-from dbbase import DbException
-from fsbase import FsException
-from msgbase import MsgException
+from osm_common.dbbase import DbException
+from osm_common.fsbase import FsException
+from osm_common.msgbase import MsgException
 from http import HTTPStatus
 from time import time
 from copy import deepcopy
diff --git a/osm_nbi/fsbase.py b/osm_nbi/fsbase.py
deleted file mode 100644 (file)
index 7b6cd0c..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-
-from http import HTTPStatus
-
-__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
-
-
-class FsException(Exception):
-    def __init__(self, message, http_code=HTTPStatus.INTERNAL_SERVER_ERROR):
-        self.http_code = http_code
-        Exception.__init__(self, "storage exception " + message)
-
-
-class FsBase(object):
-    def __init__(self):
-        pass
-
-    def get_params(self):
-        return {}
-
-    def fs_connect(self, config):
-        pass
-
-    def fs_disconnect(self):
-        pass
-
-    def mkdir(self, folder):
-        pass
-
-    def file_exists(self, storage):
-        pass
-
-    def file_size(self, storage):
-        pass
-
-    def file_extract(self, tar_object, path):
-        pass
-
-    def file_open(self, storage, mode):
-        pass
-
-    def file_delete(self, storage, ignore_non_exist=False):
-        pass
-
diff --git a/osm_nbi/fslocal.py b/osm_nbi/fslocal.py
deleted file mode 100644 (file)
index b7dd839..0000000
+++ /dev/null
@@ -1,142 +0,0 @@
-import os
-import logging
-import tarfile
-from http import HTTPStatus
-from shutil import rmtree
-from fsbase import FsBase, FsException
-
-__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
-
-
-class FsLocal(FsBase):
-
-    def __init__(self, logger_name='fs'):
-        self.logger = logging.getLogger(logger_name)
-        self.path = None
-
-    def get_params(self):
-        return {"fs": "local", "path": self.path}
-
-    def fs_connect(self, config):
-        try:
-            if "logger_name" in config:
-                self.logger = logging.getLogger(config["logger_name"])
-            self.path = config["path"]
-            if not self.path.endswith("/"):
-                self.path += "/"
-            if not os.path.exists(self.path):
-                raise FsException("Invalid configuration param at '[storage]': path '{}' does not exist".format(
-                    config["path"]))
-        except FsException:
-            raise
-        except Exception as e:  # TODO refine
-            raise FsException(str(e))
-
-    def fs_disconnect(self):
-        pass  # TODO
-
-    def mkdir(self, folder):
-        """
-        Creates a folder or parent object location
-        :param folder:
-        :return: None or raises and exception
-        """
-        try:
-            os.mkdir(self.path + folder)
-        except Exception as e:
-            raise FsException(str(e), http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
-
-    def file_exists(self, storage, mode=None):
-        """
-        Indicates if "storage" file exist
-        :param storage: can be a str or a str list
-        :param mode: can be 'file' exist as a regular file; 'dir' exists as a directory or; 'None' just exists
-        :return: True, False
-        """
-        if isinstance(storage, str):
-            f = storage
-        else:
-            f = "/".join(storage)
-        if os.path.exists(self.path + f):
-            if mode == "file" and os.path.isfile(self.path + f):
-                return True
-            if mode == "dir" and os.path.isdir(self.path + f):
-                return True
-        return False
-
-    def file_size(self, storage):
-        """
-        return file size
-        :param storage: can be a str or a str list
-        :return: file size
-        """
-        if isinstance(storage, str):
-            f = storage
-        else:
-            f = "/".join(storage)
-        return os.path.getsize(self.path + f)
-
-    def file_extract(self, tar_object, path):
-        """
-        extract a tar file
-        :param tar_object: object of type tar
-        :param path: can be a str or a str list, or a tar object where to extract the tar_object
-        :return: None
-        """
-        if isinstance(path, str):
-            f = self.path + path
-        else:
-            f = self.path + "/".join(path)
-        tar_object.extractall(path=f)
-
-    def file_open(self, storage, mode):
-        """
-        Open a file
-        :param storage: can be a str or list of str
-        :param mode: file mode
-        :return: file object
-        """
-        try:
-            if isinstance(storage, str):
-                f = storage
-            else:
-                f = "/".join(storage)
-            return open(self.path + f, mode)
-        except FileNotFoundError:
-            raise FsException("File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND)
-        except IOError:
-            raise FsException("File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST)
-
-    def dir_ls(self, storage):
-        """
-        return folder content
-        :param storage: can be a str or list of str
-        :return: folder content
-        """
-        try:
-            if isinstance(storage, str):
-                f = storage
-            else:
-                f = "/".join(storage)
-            return os.listdir(self.path + f)
-        except NotADirectoryError:
-            raise FsException("File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND)
-        except IOError:
-            raise FsException("File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST)
-
-    def file_delete(self, storage, ignore_non_exist=False):
-        """
-        Delete storage content recursivelly
-        :param storage: can be a str or list of str
-        :param ignore_non_exist: not raise exception if storage does not exist
-        :return: None
-        """
-
-        if isinstance(storage, str):
-            f = self.path + storage
-        else:
-            f = self.path + "/".join(storage)
-        if os.path.exists(f):
-            rmtree(f)
-        elif not ignore_non_exist:
-            raise FsException("File {} does not exist".format(storage), http_code=HTTPStatus.NOT_FOUND)
diff --git a/osm_nbi/msgbase.py b/osm_nbi/msgbase.py
deleted file mode 100644 (file)
index 25e8c80..0000000
+++ /dev/null
@@ -1,47 +0,0 @@
-
-import asyncio
-from http import HTTPStatus
-
-__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
-
-
-class MsgException(Exception):
-    """
-    Base Exception class for all msgXXXX exceptions
-    """
-
-    def __init__(self, message, http_code=HTTPStatus.INTERNAL_SERVER_ERROR):
-        """
-        General exception
-        :param message:  descriptive text
-        :param http_code: <http.HTTPStatus> type. It contains ".value" (http error code) and ".name" (http error name
-        """
-        self.http_code = http_code
-        Exception.__init__(self, "messaging exception " + message)
-
-
-class MsgBase(object):
-    """
-    Base class for all msgXXXX classes
-    """
-
-    def __init__(self):
-        pass
-
-    def connect(self, config):
-        pass
-
-    def disconnect(self):
-        pass
-
-    def write(self, topic, key, msg):
-        pass
-
-    def read(self, topic):
-        pass
-
-    async def aiowrite(self, topic, key, msg, loop):
-        pass
-
-    async def aioread(self, topic, loop):
-        pass
diff --git a/osm_nbi/msgkafka.py b/osm_nbi/msgkafka.py
deleted file mode 100644 (file)
index 96456af..0000000
+++ /dev/null
@@ -1,96 +0,0 @@
-import logging
-import asyncio
-import yaml
-from aiokafka import AIOKafkaConsumer
-from aiokafka import AIOKafkaProducer
-from aiokafka.errors import KafkaError
-from msgbase import MsgBase, MsgException
-#import json
-
-
-class MsgKafka(MsgBase):
-    def __init__(self, logger_name='msg'):
-        self.logger = logging.getLogger(logger_name)
-        self.host = None
-        self.port = None
-        self.consumer = None
-        self.producer = None
-        # create a different file for each topic
-        #self.files = {}
-
-    def connect(self, config):
-        try:
-            if "logger_name" in config:
-                self.logger = logging.getLogger(config["logger_name"])
-            self.host = config["host"]
-            self.port = config["port"]
-            self.topic_lst = []
-            self.loop = asyncio.get_event_loop()
-            self.broker = str(self.host) + ":" + str(self.port)
-
-        except Exception as e:  # TODO refine
-            raise MsgException(str(e))
-
-    def disconnect(self):
-        try:
-            self.loop.close()
-        except Exception as e:  # TODO refine
-            raise MsgException(str(e))
-
-    def write(self, topic, key, msg):
-        try:
-            self.loop.run_until_complete(self.aiowrite(topic=topic, key=key,
-                                                       msg=yaml.safe_dump(msg, default_flow_style=True),
-                                                       loop=self.loop))
-
-        except Exception as e:
-            raise MsgException("Error writing {} topic: {}".format(topic, str(e)))
-
-    def read(self, topic):
-        """
-        Read from one or several topics. it is non blocking returning None if nothing is available
-        :param topic: can be str: single topic; or str list: several topics
-        :return: topic, key, message; or None
-        """
-        try:
-            return self.loop.run_until_complete(self.aioread(topic, self.loop))
-        except MsgException:
-            raise
-        except Exception as e:
-            raise MsgException("Error reading {} topic: {}".format(topic, str(e)))
-
-    async def aiowrite(self, topic, key, msg, loop):
-        try:
-            self.producer = AIOKafkaProducer(loop=loop, key_serializer=str.encode, value_serializer=str.encode,
-                                             bootstrap_servers=self.broker)
-            await self.producer.start()
-            await self.producer.send(topic=topic, key=key, value=msg)
-        except Exception as e:
-            raise MsgException("Error publishing to {} topic: {}".format(topic, str(e)))
-        finally:
-            await self.producer.stop()
-
-    async def aioread(self, topic, loop):
-        """
-        Asyncio read from one or several topics. It blocks
-        :param topic: can be str: single topic; or str list: several topics
-        :param loop: asyncio loop
-        :return: topic, key, message
-        """
-        try:
-            if isinstance(topic, (list, tuple)):
-                topic_list = topic
-            else:
-                topic_list = (topic,)
-
-            self.consumer = AIOKafkaConsumer(loop=loop, bootstrap_servers=self.broker)
-            await self.consumer.start()
-            self.consumer.subscribe(topic_list)
-            async for message in self.consumer:
-                return message.topic, yaml.load(message.key), yaml.load(message.value)
-        except KafkaError as e:
-            raise MsgException(str(e))
-        finally:
-            await self.consumer.stop()
-
-
diff --git a/osm_nbi/msglocal.py b/osm_nbi/msglocal.py
deleted file mode 100644 (file)
index 337321f..0000000
+++ /dev/null
@@ -1,111 +0,0 @@
-import logging
-import os
-import yaml
-import asyncio
-from msgbase import MsgBase, MsgException
-from time import sleep
-
-__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>"
-
-"""
-This emulated kafka bus by just using a shared file system. Usefull for testing or devops.
-One file is used per topic. Only one producer and one consumer is allowed per topic. Both consumer and producer 
-access to the same file. e.g. same volume if running with docker.
-One text line per message is used in yaml format
-"""
-
-class MsgLocal(MsgBase):
-
-    def __init__(self, logger_name='msg'):
-        self.logger = logging.getLogger(logger_name)
-        self.path = None
-        # create a different file for each topic
-        self.files = {}
-        self.buffer = {}
-
-    def connect(self, config):
-        try:
-            if "logger_name" in config:
-                self.logger = logging.getLogger(config["logger_name"])
-            self.path = config["path"]
-            if not self.path.endswith("/"):
-                self.path += "/"
-            if not os.path.exists(self.path):
-                os.mkdir(self.path)
-        except MsgException:
-            raise
-        except Exception as e:  # TODO refine
-            raise MsgException(str(e))
-
-    def disconnect(self):
-        for f in self.files.values():
-            try:
-                f.close()
-            except Exception as e:  # TODO refine
-                pass
-
-    def write(self, topic, key, msg):
-        """
-        Insert a message into topic
-        :param topic: topic
-        :param key: key text to be inserted
-        :param msg: value object to be inserted, can be str, object ...
-        :return: None or raises and exception
-        """
-        try:
-            if topic not in self.files:
-                self.files[topic] = open(self.path + topic, "a+")
-            yaml.safe_dump({key: msg}, self.files[topic], default_flow_style=True, width=20000)
-            self.files[topic].flush()
-        except Exception as e:  # TODO refine
-            raise MsgException(str(e))
-
-    def read(self, topic, blocks=True):
-        """
-        Read from one or several topics. it is non blocking returning None if nothing is available
-        :param topic: can be str: single topic; or str list: several topics
-        :param blocks: indicates if it should wait and block until a message is present or returns None
-        :return: topic, key, message; or None if blocks==True
-        """
-        try:
-            if isinstance(topic, (list, tuple)):
-                topic_list = topic
-            else:
-                topic_list = (topic, )
-            while True:
-                for single_topic in topic_list:
-                    if single_topic not in self.files:
-                        self.files[single_topic] = open(self.path + single_topic, "a+")
-                        self.buffer[single_topic] = ""
-                    self.buffer[single_topic] += self.files[single_topic].readline()
-                    if not self.buffer[single_topic].endswith("\n"):
-                        continue
-                    msg_dict = yaml.load(self.buffer[single_topic])
-                    self.buffer[single_topic] = ""
-                    assert len(msg_dict) == 1
-                    for k, v in msg_dict.items():
-                        return single_topic, k, v
-                if not blocks:
-                    return None
-                sleep(2)
-        except Exception as e:  # TODO refine
-            raise MsgException(str(e))
-
-    async def aioread(self, topic, loop):
-        """
-        Asyncio read from one or several topics. It blocks
-        :param topic: can be str: single topic; or str list: several topics
-        :param loop: asyncio loop
-        :return: topic, key, message
-        """
-        try:
-            while True:
-                msg = self.read(topic, blocks=False)
-                if msg:
-                    return msg
-                await asyncio.sleep(2, loop=loop)
-        except MsgException:
-            raise
-        except Exception as e:  # TODO refine
-            raise MsgException(str(e))
-
index a8f891d..ba2e14e 100644 (file)
@@ -8,9 +8,9 @@ import yaml
 import html_out as html
 import logging
 from engine import Engine, EngineException
-from dbbase import DbException
-from fsbase import FsException
-from msgbase import MsgException
+from osm_common.dbbase import DbException
+from osm_common.fsbase import FsException
+from osm_common.msgbase import MsgException
 from base64 import standard_b64decode
 #from os import getenv
 from http import HTTPStatus
diff --git a/python3-osm-nbi.postinst b/python3-osm-nbi.postinst
new file mode 100755 (executable)
index 0000000..452f503
--- /dev/null
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+##
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact with: OSM_TECH@list.etsi.org
+##
+
+echo "POST INSTALL OSM-NBI"
+
+#Creation of log folder
+mkdir -p /var/log/osm
+
+systemctl enable osm-nbi.service
+
index 7cd857c..3e17a5a 100644 (file)
--- a/setup.py
+++ b/setup.py
@@ -3,10 +3,13 @@
 import os
 from setuptools import setup
 
-here = os.path.abspath(os.path.dirname(__file__))
 _name = "osm_nbi"
-VERSION = "0.1.3" 
-README = open(os.path.join(here, 'README.rst')).read()
+# version is at first line of osm_nbi/html_public/version
+here = os.path.abspath(os.path.dirname(__file__))
+with open(os.path.join(here, 'osm_nbi/html_public/version')) as version_file:
+    VERSION = version_file.readline().strip()
+with open(os.path.join(here, 'README.rst')) as readme_file:
+    README = readme_file.read()
 
 setup(
     name=_name,
@@ -14,7 +17,7 @@ setup(
     long_description=README,
     # version_command=('git describe --tags --long --dirty', 'pep440-git'),
     version=VERSION,
-    python_requires='>3.5.0',
+    python_requires='>3.5.0',
     author='ETSI OSM',
     author_email='alfonso.tiernosepulveda@telefonica.com',
     maintainer='Alfonso Tierno',
@@ -27,10 +30,12 @@ setup(
     data_files=[('/etc/osm/', ['osm_nbi/nbi.cfg']),
                 ('/etc/systemd/system/', ['osm_nbi/osm-nbi.service']),
                 ],
-
+    dependency_links=[
+        "git+https://osm.etsi.org/gerrit/osm/common.git@master#egg=osm-common-0.1.4"
+    ],
     install_requires=[
-        'CherryPy', 'pymongo', 'jsonschema'
-        # 'PyYAML',
+        'CherryPy', 'pymongo', 'jsonschema', 'PyYAML',
+        # 'osm-common',
     ],
     # setup_requires=['setuptools-version-command'],
     # test_suite='nose.collector',