Merge remote-tracking branch 'origin/master' into paas
Change-Id: Ia9fcc2d74d857cb091634345f0be31a7bbccb950
Signed-off-by: Mark Beierl <mark.beierl@canonical.com>
diff --git a/installers/charm/kafka-exporter/src/charm.py b/installers/charm/kafka-exporter/src/charm.py
index ec6eaab..07a854f 100755
--- a/installers/charm/kafka-exporter/src/charm.py
+++ b/installers/charm/kafka-exporter/src/charm.py
@@ -96,7 +96,6 @@
class KafkaExporterCharm(CharmedOsmBase):
-
on = KafkaEvents()
def __init__(self, *args) -> NoReturn:
diff --git a/installers/charm/keystone/src/charm.py b/installers/charm/keystone/src/charm.py
index 39a7a00..446d2e0 100755
--- a/installers/charm/keystone/src/charm.py
+++ b/installers/charm/keystone/src/charm.py
@@ -248,9 +248,9 @@
):
credentials_files_builder = FilesV3Builder()
fernet_files_builder = FilesV3Builder()
- for (key_id, _) in enumerate(credential_keys):
+ for key_id, _ in enumerate(credential_keys):
credentials_files_builder.add_file(str(key_id), str(key_id), secret=True)
- for (key_id, _) in enumerate(fernet_keys):
+ for key_id, _ in enumerate(fernet_keys):
fernet_files_builder.add_file(str(key_id), str(key_id), secret=True)
return credentials_files_builder.build(), fernet_files_builder.build()
diff --git a/installers/charm/lcm/src/charm.py b/installers/charm/lcm/src/charm.py
index 7c64418..5319763 100755
--- a/installers/charm/lcm/src/charm.py
+++ b/installers/charm/lcm/src/charm.py
@@ -140,7 +140,6 @@
class LcmCharm(CharmedOsmBase):
-
on = KafkaEvents()
def __init__(self, *args) -> NoReturn:
diff --git a/installers/charm/mon/src/charm.py b/installers/charm/mon/src/charm.py
index db047c0..9ad49ad 100755
--- a/installers/charm/mon/src/charm.py
+++ b/installers/charm/mon/src/charm.py
@@ -126,7 +126,6 @@
class MonCharm(CharmedOsmBase):
-
on = KafkaEvents()
def __init__(self, *args) -> NoReturn:
diff --git a/installers/charm/nbi/src/charm.py b/installers/charm/nbi/src/charm.py
index 4aaecb9..cb47d1c 100755
--- a/installers/charm/nbi/src/charm.py
+++ b/installers/charm/nbi/src/charm.py
@@ -118,7 +118,6 @@
class NbiCharm(CharmedOsmBase):
-
on = KafkaEvents()
def __init__(self, *args) -> NoReturn:
diff --git a/installers/charm/osm-nbi/tests/integration/test_charm.py b/installers/charm/osm-nbi/tests/integration/test_charm.py
new file mode 100644
index 0000000..ac35ea6
--- /dev/null
+++ b/installers/charm/osm-nbi/tests/integration/test_charm.py
@@ -0,0 +1,197 @@
+#!/usr/bin/env python3
+# Copyright 2022 Canonical Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+#
+# For those usages not covered by the Apache License, Version 2.0 please
+# contact: legal@canonical.com
+#
+# To get in touch with the maintainers, please contact:
+# osm-charmers@lists.launchpad.net
+#
+# Learn more about testing at: https://juju.is/docs/sdk/testing
+
+import asyncio
+import logging
+import shlex
+from pathlib import Path
+
+import pytest
+import yaml
+from pytest_operator.plugin import OpsTest
+
+logger = logging.getLogger(__name__)
+
+METADATA = yaml.safe_load(Path("./metadata.yaml").read_text())
+NBI_APP = METADATA["name"]
+KAFKA_CHARM = "kafka-k8s"
+KAFKA_APP = "kafka"
+MARIADB_CHARM = "charmed-osm-mariadb-k8s"
+MARIADB_APP = "mariadb"
+MONGO_DB_CHARM = "mongodb-k8s"
+MONGO_DB_APP = "mongodb"
+KEYSTONE_CHARM = "osm-keystone"
+KEYSTONE_APP = "keystone"
+PROMETHEUS_CHARM = "osm-prometheus"
+PROMETHEUS_APP = "prometheus"
+ZOOKEEPER_CHARM = "zookeeper-k8s"
+ZOOKEEPER_APP = "zookeeper"
+INGRESS_CHARM = "nginx-ingress-integrator"
+INGRESS_APP = "ingress"
+APPS = [KAFKA_APP, MONGO_DB_APP, MARIADB_APP, ZOOKEEPER_APP, KEYSTONE_APP, PROMETHEUS_APP, NBI_APP]
+
+
+@pytest.mark.abort_on_fail
+async def test_nbi_is_deployed(ops_test: OpsTest):
+ charm = await ops_test.build_charm(".")
+ resources = {"nbi-image": METADATA["resources"]["nbi-image"]["upstream-source"]}
+
+ await asyncio.gather(
+ ops_test.model.deploy(
+ charm, resources=resources, application_name=NBI_APP, series="focal"
+ ),
+ ops_test.model.deploy(KAFKA_CHARM, application_name=KAFKA_APP, channel="stable"),
+ ops_test.model.deploy(MONGO_DB_CHARM, application_name=MONGO_DB_APP, channel="stable"),
+ ops_test.model.deploy(MARIADB_CHARM, application_name=MARIADB_APP, channel="stable"),
+ ops_test.model.deploy(ZOOKEEPER_CHARM, application_name=ZOOKEEPER_APP, channel="stable"),
+ ops_test.model.deploy(PROMETHEUS_CHARM, application_name=PROMETHEUS_APP, channel="stable"),
+ )
+ # Keystone charm has to be deployed differently since
+ # bug https://github.com/juju/python-libjuju/issues/766
+ # prevents setting correctly the resources
+ cmd = f"juju deploy {KEYSTONE_CHARM} {KEYSTONE_APP} --resource keystone-image=opensourcemano/keystone:12"
+ await ops_test.run(*shlex.split(cmd), check=True)
+
+ async with ops_test.fast_forward():
+ await ops_test.model.wait_for_idle(
+ apps=APPS,
+ )
+ assert ops_test.model.applications[NBI_APP].status == "blocked"
+ unit = ops_test.model.applications[NBI_APP].units[0]
+ assert unit.workload_status_message == "need kafka, mongodb, prometheus, keystone relations"
+
+ logger.info("Adding relations for other components")
+ await ops_test.model.add_relation(KAFKA_APP, ZOOKEEPER_APP)
+ await ops_test.model.add_relation(MARIADB_APP, KEYSTONE_APP)
+
+ logger.info("Adding relations")
+ await ops_test.model.add_relation(NBI_APP, MONGO_DB_APP)
+ await ops_test.model.add_relation(NBI_APP, KAFKA_APP)
+ await ops_test.model.add_relation(NBI_APP, PROMETHEUS_APP)
+ await ops_test.model.add_relation(NBI_APP, KEYSTONE_APP)
+
+ async with ops_test.fast_forward():
+ await ops_test.model.wait_for_idle(
+ apps=APPS,
+ status="active",
+ )
+
+
+@pytest.mark.abort_on_fail
+async def test_nbi_scales_up(ops_test: OpsTest):
+ logger.info("Scaling up osm-nbi")
+ expected_units = 3
+ assert len(ops_test.model.applications[NBI_APP].units) == 1
+ await ops_test.model.applications[NBI_APP].scale(expected_units)
+ async with ops_test.fast_forward():
+ await ops_test.model.wait_for_idle(
+ apps=[NBI_APP], status="active", wait_for_exact_units=expected_units
+ )
+
+
+@pytest.mark.abort_on_fail
+@pytest.mark.parametrize(
+ "relation_to_remove", [KAFKA_APP, MONGO_DB_APP, PROMETHEUS_APP, KEYSTONE_APP]
+)
+async def test_nbi_blocks_without_relation(ops_test: OpsTest, relation_to_remove):
+ logger.info("Removing relation: %s", relation_to_remove)
+ # mongoDB relation is named "database"
+ local_relation = relation_to_remove
+ if local_relation == MONGO_DB_APP:
+ local_relation = "database"
+ await asyncio.gather(
+ ops_test.model.applications[relation_to_remove].remove_relation(local_relation, NBI_APP)
+ )
+ async with ops_test.fast_forward():
+ await ops_test.model.wait_for_idle(apps=[NBI_APP])
+ assert ops_test.model.applications[NBI_APP].status == "blocked"
+ for unit in ops_test.model.applications[NBI_APP].units:
+ assert unit.workload_status_message == f"need {relation_to_remove} relation"
+ await ops_test.model.add_relation(NBI_APP, relation_to_remove)
+ async with ops_test.fast_forward():
+ await ops_test.model.wait_for_idle(
+ apps=APPS,
+ status="active",
+ )
+
+
+@pytest.mark.abort_on_fail
+async def test_nbi_action_debug_mode_disabled(ops_test: OpsTest):
+ async with ops_test.fast_forward():
+ await ops_test.model.wait_for_idle(
+ apps=APPS,
+ status="active",
+ )
+ logger.info("Running action 'get-debug-mode-information'")
+ action = (
+ await ops_test.model.applications[NBI_APP]
+ .units[0]
+ .run_action("get-debug-mode-information")
+ )
+ async with ops_test.fast_forward():
+ await ops_test.model.wait_for_idle(apps=[NBI_APP])
+ status = await ops_test.model.get_action_status(uuid_or_prefix=action.entity_id)
+ assert status[action.entity_id] == "failed"
+
+
+@pytest.mark.abort_on_fail
+async def test_nbi_action_debug_mode_enabled(ops_test: OpsTest):
+ await ops_test.model.applications[NBI_APP].set_config({"debug-mode": "true"})
+ async with ops_test.fast_forward():
+ await ops_test.model.wait_for_idle(
+ apps=APPS,
+ status="active",
+ )
+ logger.info("Running action 'get-debug-mode-information'")
+ # list of units is not ordered
+ unit_id = list(
+ filter(
+ lambda x: (x.entity_id == f"{NBI_APP}/0"), ops_test.model.applications[NBI_APP].units
+ )
+ )[0]
+ action = await unit_id.run_action("get-debug-mode-information")
+ async with ops_test.fast_forward():
+ await ops_test.model.wait_for_idle(apps=[NBI_APP])
+ status = await ops_test.model.get_action_status(uuid_or_prefix=action.entity_id)
+ message = await ops_test.model.get_action_output(action_uuid=action.entity_id)
+ assert status[action.entity_id] == "completed"
+ assert "command" in message
+ assert "password" in message
+
+
+@pytest.mark.abort_on_fail
+async def test_nbi_integration_ingress(ops_test: OpsTest):
+ await asyncio.gather(
+ ops_test.model.deploy(INGRESS_CHARM, application_name=INGRESS_APP, channel="beta"),
+ )
+ async with ops_test.fast_forward():
+ await ops_test.model.wait_for_idle(
+ apps=APPS + [INGRESS_APP],
+ )
+
+ await ops_test.model.add_relation(NBI_APP, INGRESS_APP)
+ async with ops_test.fast_forward():
+ await ops_test.model.wait_for_idle(
+ apps=APPS + [INGRESS_APP],
+ status="active",
+ )
diff --git a/installers/charm/osm-nbi/tests/unit/test_charm.py b/installers/charm/osm-nbi/tests/unit/test_charm.py
index d9440a0..87afafa 100644
--- a/installers/charm/osm-nbi/tests/unit/test_charm.py
+++ b/installers/charm/osm-nbi/tests/unit/test_charm.py
@@ -87,13 +87,13 @@
# Add kafka relation
relation_id = harness.add_relation("kafka", "kafka")
harness.add_relation_unit(relation_id, "kafka/0")
- harness.update_relation_data(relation_id, "kafka", {"host": "kafka", "port": 9092})
+ harness.update_relation_data(relation_id, "kafka", {"host": "kafka", "port": "9092"})
relation_ids.append(relation_id)
# Add prometheus relation
relation_id = harness.add_relation("prometheus", "prometheus")
harness.add_relation_unit(relation_id, "prometheus/0")
harness.update_relation_data(
- relation_id, "prometheus", {"hostname": "prometheus", "port": 9090}
+ relation_id, "prometheus", {"hostname": "prometheus", "port": "9090"}
)
relation_ids.append(relation_id)
# Add keystone relation
diff --git a/installers/charm/osm-nbi/tox.ini b/installers/charm/osm-nbi/tox.ini
index b791e14..c1bada0 100644
--- a/installers/charm/osm-nbi/tox.ini
+++ b/installers/charm/osm-nbi/tox.ini
@@ -21,7 +21,7 @@
[tox]
skipsdist=True
skip_missing_interpreters = True
-envlist = lint, unit
+envlist = lint, unit, integration
[vars]
src_path = {toxinidir}/src/
@@ -92,4 +92,4 @@
pytest-operator
-r{toxinidir}/requirements.txt
commands =
- pytest -v --tb native --ignore={[vars]tst_path}unit --log-cli-level=INFO -s {posargs}
+ pytest -v --tb native --ignore={[vars]tst_path}unit --log-cli-level=INFO -s {posargs} --cloud microk8s
diff --git a/installers/charm/pla/src/charm.py b/installers/charm/pla/src/charm.py
index 36a11dc..d907f0b 100755
--- a/installers/charm/pla/src/charm.py
+++ b/installers/charm/pla/src/charm.py
@@ -76,7 +76,6 @@
class PlaCharm(CharmedOsmBase):
-
on = KafkaEvents()
def __init__(self, *args) -> NoReturn:
diff --git a/installers/charm/pol/src/charm.py b/installers/charm/pol/src/charm.py
index 7b92b45..94f6ecb 100755
--- a/installers/charm/pol/src/charm.py
+++ b/installers/charm/pol/src/charm.py
@@ -87,7 +87,6 @@
class PolCharm(CharmedOsmBase):
-
on = KafkaEvents()
def __init__(self, *args) -> NoReturn:
diff --git a/installers/full_install_osm.sh b/installers/full_install_osm.sh
index 79fb077..8c651e2 100755
--- a/installers/full_install_osm.sh
+++ b/installers/full_install_osm.sh
@@ -823,6 +823,8 @@
add_local_k8scluster
track final_ops add_local_k8scluster_ok
+ arrange_docker_default_network_policy
+
wget -q -O- https://osm-download.etsi.org/ftp/osm-13.0-thirteen/README2.txt &> /dev/null
track end
sudo find /etc/osm
@@ -884,6 +886,13 @@
return 0
}
+function arrange_docker_default_network_policy() {
+ echo -e "Fixing firewall so docker and LXD can share the same host without affecting each other."
+ sudo iptables -I DOCKER-USER -j ACCEPT
+ sudo iptables-save | sudo tee /etc/iptables/rules.v4
+ sudo ip6tables-save | sudo tee /etc/iptables/rules.v6
+}
+
function install_k8s_monitoring() {
[ -z "${DEBUG_INSTALL}" ] || DEBUG beginning of function
# install OSM monitoring
diff --git a/jenkins/ci-pipelines/ci_stage_2.groovy b/jenkins/ci-pipelines/ci_stage_2.groovy
index 6ca88f0..ddee6f4 100644
--- a/jenkins/ci-pipelines/ci_stage_2.groovy
+++ b/jenkins/ci-pipelines/ci_stage_2.groovy
@@ -149,9 +149,14 @@
for (charmPath in charms) {
def directory = charmPath
if (fileExists("$charmPath/charmcraft.yaml")) {
- CHECK_CHANGES = "git diff --name-only origin/$GERRIT_BRANCH -- $directory |wc -l"
+ if (isMergeJob) {
+ CHECK_CHANGES = "git diff --name-only HEAD^1 -- $directory |wc -l"
+ } else {
+ CHECK_CHANGES = "git diff --name-only origin/$GERRIT_BRANCH -- $directory |wc -l"
+ }
charmsChanged = sh(returnStdout:true, script: CHECK_CHANGES).trim()
- if (charmsChanged != "0" || isMergeJob) {
+
+ if (charmsChanged != "0") {
println("$directory has changes, rebuilding")
parallelSteps[directory] = {