Skip to content
Snippets Groups Projects
Commit 47df6c46 authored by calvinosanc1's avatar calvinosanc1
Browse files

K8s_01-Robot tests packages


Signed-off-by: default avatarcalvinosanc1 <guillermo.calvino@canonical.com>
parent de8be78e
No related branches found
No related tags found
1 merge request!71K8s_01-Robot tests packages
Showing
with 1257 additions and 0 deletions
#!/bin/bash
/snap/bin/kubectl apply -f https://openebs.github.io/charts/openebs-operator.yaml
/snap/bin/kubectl apply -f /home/ubuntu/openebs-storage-class.yaml
/snap/bin/kubectl patch storageclass openebs-hostpath -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}'
clouds:
k8s:
type: manual
endpoint: "ubuntu@{{ controller_mgmt_ip_address }}"
apiVersion: storage.k8s.io/v1
kind: StorageClass
metadata:
name: openebs-hostpath
annotations:
openebs.io/cas-type: local
cas.openebs.io/config: |
- name: BasePath
value: "/var/openebs/local"
- name: StorageType
value: "hostpath"
provisioner: openebs.io/local
reclaimPolicy: Delete
volumeBindingMode: WaitForFirstConsumer
#!/bin/bash
# Setup kubectl
mkdir -p ~/.kube
/snap/bin/juju scp kubernetes-master/0:config ~/.kube/config
sudo snap install kubectl --classic
# Allow privileges
/snap/bin/juju config kubernetes-master allow-privileged=true
#!/bin/bash
#!/bin/bash
REPO_NAME=$1
helm repo remove ${REPO_NAME}
helm repo update
#!/bin/bash
RELEASE=$1
REVISION="0"
if [ $# -gt 1 ]; then
REVISION=$2
fi
helm rollback ${RELEASE} ${REVISION}
#!/bin/bash
timeout=0
while [ $timeout -lt 900 ]; do
if [ -f /home/ubuntu/tmp-cloudinit-installed.txt ]; then
break
fi
sleep 10
timeout=$[$timeout + 10]
done
if [ timeout -eq 900 ]; then
>&2 echo "Cloudinit took longer than expected (> 900s). Not possible to start setup"
exit 1
fi
cat /dev/zero | ssh-keygen -q -N ""
MGMT_IP=$(ip -4 -o a s ens3 | awk '{split($4,a,"/"); print a[1]}')
sshpass -posm4u ssh-copy-id -o "StrictHostKeyChecking no" ubuntu@${MGMT_IP}
/snap/bin/juju add-cloud --local k8s -f k8s-cloud.yaml
ssh-copy-id -i /home/ubuntu/.local/share/juju/ssh/juju_id_rsa ubuntu@${MGMT_IP}
/snap/bin/juju bootstrap k8s jujuk8s
#!/bin/bash
CLUSTER_IP=192.168.0.151
ELASTIC_PORT=30280
curl -XPUT 'http://'${CLUSTER_IP}':'${ELASTIC_PORT}'/blog/user/jose' -H 'Content-Type: application/json' -d '{ "name" : "Jose" }'
curl -XPUT 'http://'${CLUSTER_IP}':'${ELASTIC_PORT}'/blogpost/post/1' -H 'Content-Type: application/json' -d '
{
"user": "jose",
"postDate": "2011-12-15",
"body": "Search is hard. Search should be easy." ,
"title": "On search"
}'
curl -XPUT 'http://'${CLUSTER_IP}':'${ELASTIC_PORT}'/blogpost/post/2' -H 'Content-Type: application/json' -d '
{
"user": "jose",
"postDate": "2011-12-12",
"body": "Distribution is hard. Distribution should be easy." ,
"title": "On distributed search"
}'
curl -XPUT 'http://'${CLUSTER_IP}':'${ELASTIC_PORT}'/blogpost/post/3' -H 'Content-Type: application/json' -d '
{
"user": "jose",
"postDate": "2011-12-10",
"body": "Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat" ,
"title": "Lorem ipsum"
}'
curl -XGET 'http://'${CLUSTER_IP}':'${ELASTIC_PORT}'/blog/user/jose?pretty=true'
curl -XGET 'http://'${CLUSTER_IP}':'${ELASTIC_PORT}'/blogpost/post/1?pretty=true'
curl -XGET 'http://'${CLUSTER_IP}':'${ELASTIC_PORT}'/blogpost/post/2?pretty=true'
curl -XGET 'http://'${CLUSTER_IP}':'${ELASTIC_PORT}'/blogpost/post/3?pretty=true'
curl 'http://'${CLUSTER_IP}':'${ELASTIC_PORT}'/blogpost/post/_search?q=user:jose&pretty=true'
#!/usr/bin/python3
import pyspark
from pyspark.streaming import StreamingContext
# Kafka
from pyspark.streaming.kafka import KafkaUtils
import random
from operator import add
#sc = pyspark.SparkContext('spark://192.168.0.151:32077')
conf = pyspark.SparkConf().setAll([('spark.executor.memory', '2g'), ('spark.executor.cores', '1'), ('spark.cores.max', '1'), ('spark.driver.memory','2g')])
conf.setMaster("spark://192.168.0.151:32077")
sc = pyspark.SparkContext(conf=conf)
# sc.setLogLevel("DEBUG")
partitions = 100
n = 100000 * partitions
def f(_):
x = random.random() * 2 - 1
y = random.random() * 2 - 1
return 1 if x ** 2 + y ** 2 <= 1 else 0
count = sc.parallelize(range(1, n + 1), partitions).map(f).reduce(add)
print("Pi is roughly %f" % (4.0 * count / n))
sc.stop()
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<template encoding-version="1.2">
<description></description>
<groupId>22cd837d-016e-1000-1b65-15980a2479dd</groupId>
<name>testNifiKafka</name>
<snippet>
<connections>
<id>530cf8dc-85d3-36ef-0000-000000000000</id>
<parentGroupId>f23949b8-2d71-3a0b-0000-000000000000</parentGroupId>
<backPressureDataSizeThreshold>1 GB</backPressureDataSizeThreshold>
<backPressureObjectThreshold>10000</backPressureObjectThreshold>
<destination>
<groupId>f23949b8-2d71-3a0b-0000-000000000000</groupId>
<id>6d16d9e1-a5a0-320a-0000-000000000000</id>
<type>PROCESSOR</type>
</destination>
<flowFileExpiration>0 sec</flowFileExpiration>
<labelIndex>1</labelIndex>
<loadBalanceCompression>DO_NOT_COMPRESS</loadBalanceCompression>
<loadBalancePartitionAttribute></loadBalancePartitionAttribute>
<loadBalanceStatus>LOAD_BALANCE_NOT_CONFIGURED</loadBalanceStatus>
<loadBalanceStrategy>DO_NOT_LOAD_BALANCE</loadBalanceStrategy>
<name></name>
<selectedRelationships>success</selectedRelationships>
<source>
<groupId>f23949b8-2d71-3a0b-0000-000000000000</groupId>
<id>19e49328-8f2e-3bdd-0000-000000000000</id>
<type>PROCESSOR</type>
</source>
<zIndex>0</zIndex>
</connections>
<connections>
<id>7b37a8d5-c006-31e2-0000-000000000000</id>
<parentGroupId>f23949b8-2d71-3a0b-0000-000000000000</parentGroupId>
<backPressureDataSizeThreshold>1 GB</backPressureDataSizeThreshold>
<backPressureObjectThreshold>10000</backPressureObjectThreshold>
<destination>
<groupId>f23949b8-2d71-3a0b-0000-000000000000</groupId>
<id>19e49328-8f2e-3bdd-0000-000000000000</id>
<type>PROCESSOR</type>
</destination>
<flowFileExpiration>0 sec</flowFileExpiration>
<labelIndex>1</labelIndex>
<loadBalanceCompression>DO_NOT_COMPRESS</loadBalanceCompression>
<loadBalancePartitionAttribute></loadBalancePartitionAttribute>
<loadBalanceStatus>LOAD_BALANCE_NOT_CONFIGURED</loadBalanceStatus>
<loadBalanceStrategy>DO_NOT_LOAD_BALANCE</loadBalanceStrategy>
<name></name>
<selectedRelationships>success</selectedRelationships>
<source>
<groupId>f23949b8-2d71-3a0b-0000-000000000000</groupId>
<id>5d17b5e2-686d-357a-0000-000000000000</id>
<type>PROCESSOR</type>
</source>
<zIndex>0</zIndex>
</connections>
<controllerServices>
<id>1ee454f5-06af-37a5-0000-000000000000</id>
<parentGroupId>f23949b8-2d71-3a0b-0000-000000000000</parentGroupId>
<bundle>
<artifact>nifi-record-serialization-services-nar</artifact>
<group>org.apache.nifi</group>
<version>1.9.2</version>
</bundle>
<comments></comments>
<descriptors>
<entry>
<key>Schema Write Strategy</key>
<value>
<name>Schema Write Strategy</name>
</value>
</entry>
<entry>
<key>schema-cache</key>
<value>
<identifiesControllerService>org.apache.nifi.serialization.RecordSchemaCacheService</identifiesControllerService>
<name>schema-cache</name>
</value>
</entry>
<entry>
<key>schema-access-strategy</key>
<value>
<name>schema-access-strategy</name>
</value>
</entry>
<entry>
<key>schema-registry</key>
<value>
<identifiesControllerService>org.apache.nifi.schemaregistry.services.SchemaRegistry</identifiesControllerService>
<name>schema-registry</name>
</value>
</entry>
<entry>
<key>schema-name</key>
<value>
<name>schema-name</name>
</value>
</entry>
<entry>
<key>schema-version</key>
<value>
<name>schema-version</name>
</value>
</entry>
<entry>
<key>schema-branch</key>
<value>
<name>schema-branch</name>
</value>
</entry>
<entry>
<key>schema-text</key>
<value>
<name>schema-text</name>
</value>
</entry>
<entry>
<key>compression-format</key>
<value>
<name>compression-format</name>
</value>
</entry>
<entry>
<key>cache-size</key>
<value>
<name>cache-size</name>
</value>
</entry>
<entry>
<key>encoder-pool-size</key>
<value>
<name>encoder-pool-size</name>
</value>
</entry>
</descriptors>
<name>AvroRecordSetWriter</name>
<persistsState>false</persistsState>
<properties>
<entry>
<key>Schema Write Strategy</key>
</entry>
<entry>
<key>schema-cache</key>
</entry>
<entry>
<key>schema-access-strategy</key>
<value>schema-name</value>
</entry>
<entry>
<key>schema-registry</key>
<value>8327924e-8c30-3954-0000-000000000000</value>
</entry>
<entry>
<key>schema-name</key>
</entry>
<entry>
<key>schema-version</key>
</entry>
<entry>
<key>schema-branch</key>
</entry>
<entry>
<key>schema-text</key>
</entry>
<entry>
<key>compression-format</key>
</entry>
<entry>
<key>cache-size</key>
</entry>
<entry>
<key>encoder-pool-size</key>
</entry>
</properties>
<state>ENABLED</state>
<type>org.apache.nifi.avro.AvroRecordSetWriter</type>
</controllerServices>
<controllerServices>
<id>8327924e-8c30-3954-0000-000000000000</id>
<parentGroupId>f23949b8-2d71-3a0b-0000-000000000000</parentGroupId>
<bundle>
<artifact>nifi-registry-nar</artifact>
<group>org.apache.nifi</group>
<version>1.9.2</version>
</bundle>
<comments></comments>
<descriptors>
<entry>
<key>avro-reg-validated-field-names</key>
<value>
<name>avro-reg-validated-field-names</name>
</value>
</entry>
<entry>
<key>test-schema</key>
<value>
<name>test-schema</name>
</value>
</entry>
</descriptors>
<name>AvroSchemaRegistry</name>
<persistsState>false</persistsState>
<properties>
<entry>
<key>avro-reg-validated-field-names</key>
</entry>
<entry>
<key>test-schema</key>
<value>{
"type" : "record",
"namespace" : "Test",
"name" : "Employee",
"fields" : [
{ "name" : "YearsExperience" , "type" : "float" },
{ "name" : "Salary" , "type" : "float" }
]
}</value>
</entry>
</properties>
<state>ENABLED</state>
<type>org.apache.nifi.schemaregistry.services.AvroSchemaRegistry</type>
</controllerServices>
<controllerServices>
<id>8aa3514a-9e61-3f4d-0000-000000000000</id>
<parentGroupId>f23949b8-2d71-3a0b-0000-000000000000</parentGroupId>
<bundle>
<artifact>nifi-record-serialization-services-nar</artifact>
<group>org.apache.nifi</group>
<version>1.9.2</version>
</bundle>
<comments></comments>
<descriptors>
<entry>
<key>schema-access-strategy</key>
<value>
<name>schema-access-strategy</name>
</value>
</entry>
<entry>
<key>schema-registry</key>
<value>
<identifiesControllerService>org.apache.nifi.schemaregistry.services.SchemaRegistry</identifiesControllerService>
<name>schema-registry</name>
</value>
</entry>
<entry>
<key>schema-name</key>
<value>
<name>schema-name</name>
</value>
</entry>
<entry>
<key>schema-version</key>
<value>
<name>schema-version</name>
</value>
</entry>
<entry>
<key>schema-branch</key>
<value>
<name>schema-branch</name>
</value>
</entry>
<entry>
<key>schema-text</key>
<value>
<name>schema-text</name>
</value>
</entry>
<entry>
<key>csv-reader-csv-parser</key>
<value>
<name>csv-reader-csv-parser</name>
</value>
</entry>
<entry>
<key>Date Format</key>
<value>
<name>Date Format</name>
</value>
</entry>
<entry>
<key>Time Format</key>
<value>
<name>Time Format</name>
</value>
</entry>
<entry>
<key>Timestamp Format</key>
<value>
<name>Timestamp Format</name>
</value>
</entry>
<entry>
<key>CSV Format</key>
<value>
<name>CSV Format</name>
</value>
</entry>
<entry>
<key>Value Separator</key>
<value>
<name>Value Separator</name>
</value>
</entry>
<entry>
<key>Skip Header Line</key>
<value>
<name>Skip Header Line</name>
</value>
</entry>
<entry>
<key>ignore-csv-header</key>
<value>
<name>ignore-csv-header</name>
</value>
</entry>
<entry>
<key>Quote Character</key>
<value>
<name>Quote Character</name>
</value>
</entry>
<entry>
<key>Escape Character</key>
<value>
<name>Escape Character</name>
</value>
</entry>
<entry>
<key>Comment Marker</key>
<value>
<name>Comment Marker</name>
</value>
</entry>
<entry>
<key>Null String</key>
<value>
<name>Null String</name>
</value>
</entry>
<entry>
<key>Trim Fields</key>
<value>
<name>Trim Fields</name>
</value>
</entry>
<entry>
<key>csvutils-character-set</key>
<value>
<name>csvutils-character-set</name>
</value>
</entry>
</descriptors>
<name>CSVReader</name>
<persistsState>false</persistsState>
<properties>
<entry>
<key>schema-access-strategy</key>
<value>schema-name</value>
</entry>
<entry>
<key>schema-registry</key>
<value>8327924e-8c30-3954-0000-000000000000</value>
</entry>
<entry>
<key>schema-name</key>
</entry>
<entry>
<key>schema-version</key>
</entry>
<entry>
<key>schema-branch</key>
</entry>
<entry>
<key>schema-text</key>
</entry>
<entry>
<key>csv-reader-csv-parser</key>
</entry>
<entry>
<key>Date Format</key>
</entry>
<entry>
<key>Time Format</key>
</entry>
<entry>
<key>Timestamp Format</key>
</entry>
<entry>
<key>CSV Format</key>
</entry>
<entry>
<key>Value Separator</key>
</entry>
<entry>
<key>Skip Header Line</key>
<value>true</value>
</entry>
<entry>
<key>ignore-csv-header</key>
</entry>
<entry>
<key>Quote Character</key>
</entry>
<entry>
<key>Escape Character</key>
</entry>
<entry>
<key>Comment Marker</key>
</entry>
<entry>
<key>Null String</key>
</entry>
<entry>
<key>Trim Fields</key>
</entry>
<entry>
<key>csvutils-character-set</key>
</entry>
</properties>
<state>ENABLED</state>
<type>org.apache.nifi.csv.CSVReader</type>
</controllerServices>
<processors>
<id>19e49328-8f2e-3bdd-0000-000000000000</id>
<parentGroupId>f23949b8-2d71-3a0b-0000-000000000000</parentGroupId>
<position>
<x>0.0</x>
<y>224.0</y>
</position>
<bundle>
<artifact>nifi-update-attribute-nar</artifact>
<group>org.apache.nifi</group>
<version>1.9.2</version>
</bundle>
<config>
<bulletinLevel>WARN</bulletinLevel>
<comments></comments>
<concurrentlySchedulableTaskCount>1</concurrentlySchedulableTaskCount>
<descriptors>
<entry>
<key>Delete Attributes Expression</key>
<value>
<name>Delete Attributes Expression</name>
</value>
</entry>
<entry>
<key>Store State</key>
<value>
<name>Store State</name>
</value>
</entry>
<entry>
<key>Stateful Variables Initial Value</key>
<value>
<name>Stateful Variables Initial Value</name>
</value>
</entry>
<entry>
<key>canonical-value-lookup-cache-size</key>
<value>
<name>canonical-value-lookup-cache-size</name>
</value>
</entry>
<entry>
<key>schema.name</key>
<value>
<name>schema.name</name>
</value>
</entry>
</descriptors>
<executionNode>ALL</executionNode>
<lossTolerant>false</lossTolerant>
<penaltyDuration>30 sec</penaltyDuration>
<properties>
<entry>
<key>Delete Attributes Expression</key>
</entry>
<entry>
<key>Store State</key>
<value>Do not store state</value>
</entry>
<entry>
<key>Stateful Variables Initial Value</key>
</entry>
<entry>
<key>canonical-value-lookup-cache-size</key>
<value>100</value>
</entry>
<entry>
<key>schema.name</key>
<value>test-schema</value>
</entry>
</properties>
<runDurationMillis>0</runDurationMillis>
<schedulingPeriod>1 sec</schedulingPeriod>
<schedulingStrategy>TIMER_DRIVEN</schedulingStrategy>
<yieldDuration>1 sec</yieldDuration>
</config>
<executionNodeRestricted>false</executionNodeRestricted>
<name>UpdateAttribute</name>
<relationships>
<autoTerminate>false</autoTerminate>
<name>success</name>
</relationships>
<state>STOPPED</state>
<style/>
<type>org.apache.nifi.processors.attributes.UpdateAttribute</type>
</processors>
<processors>
<id>5d17b5e2-686d-357a-0000-000000000000</id>
<parentGroupId>f23949b8-2d71-3a0b-0000-000000000000</parentGroupId>
<position>
<x>11.0</x>
<y>0.0</y>
</position>
<bundle>
<artifact>nifi-standard-nar</artifact>
<group>org.apache.nifi</group>
<version>1.9.2</version>
</bundle>
<config>
<bulletinLevel>WARN</bulletinLevel>
<comments></comments>
<concurrentlySchedulableTaskCount>1</concurrentlySchedulableTaskCount>
<descriptors>
<entry>
<key>File Size</key>
<value>
<name>File Size</name>
</value>
</entry>
<entry>
<key>Batch Size</key>
<value>
<name>Batch Size</name>
</value>
</entry>
<entry>
<key>Data Format</key>
<value>
<name>Data Format</name>
</value>
</entry>
<entry>
<key>Unique FlowFiles</key>
<value>
<name>Unique FlowFiles</name>
</value>
</entry>
<entry>
<key>generate-ff-custom-text</key>
<value>
<name>generate-ff-custom-text</name>
</value>
</entry>
<entry>
<key>character-set</key>
<value>
<name>character-set</name>
</value>
</entry>
</descriptors>
<executionNode>ALL</executionNode>
<lossTolerant>false</lossTolerant>
<penaltyDuration>30 sec</penaltyDuration>
<properties>
<entry>
<key>File Size</key>
<value>0B</value>
</entry>
<entry>
<key>Batch Size</key>
<value>1</value>
</entry>
<entry>
<key>Data Format</key>
<value>Text</value>
</entry>
<entry>
<key>Unique FlowFiles</key>
<value>false</value>
</entry>
<entry>
<key>generate-ff-custom-text</key>
<value>YearsExperience,Salary
1.1,39343.00
1.3,46205.00
1.5,37731.00
2.0,43525.00
2.2,39891.00
2.9,56642.00
3.0,60150.00
3.2,54445.00
3.2,64445.00
3.7,57189.00
3.9,63218.00
4.0,55794.00
4.0,56957.00
4.1,57081.00
4.5,61111.00
4.9,67938.00
5.1,66029.00
5.3,83088.00
5.9,81363.00
6.0,93940.00
6.8,91738.00
7.1,98273.00
7.9,101302.00
8.2,113812.00
8.7,109431.00
9.0,105582.00
9.5,116969.00
9.6,112635.00
10.3,122391.00
10.5,121872.00</value>
</entry>
<entry>
<key>character-set</key>
<value>UTF-8</value>
</entry>
</properties>
<runDurationMillis>0</runDurationMillis>
<schedulingPeriod>60 sec</schedulingPeriod>
<schedulingStrategy>TIMER_DRIVEN</schedulingStrategy>
<yieldDuration>1 sec</yieldDuration>
</config>
<executionNodeRestricted>false</executionNodeRestricted>
<name>GenerateFlowFile</name>
<relationships>
<autoTerminate>false</autoTerminate>
<name>success</name>
</relationships>
<state>STOPPED</state>
<style/>
<type>org.apache.nifi.processors.standard.GenerateFlowFile</type>
</processors>
<processors>
<id>6d16d9e1-a5a0-320a-0000-000000000000</id>
<parentGroupId>f23949b8-2d71-3a0b-0000-000000000000</parentGroupId>
<position>
<x>15.0</x>
<y>433.0</y>
</position>
<bundle>
<artifact>nifi-kafka-2-0-nar</artifact>
<group>org.apache.nifi</group>
<version>1.9.2</version>
</bundle>
<config>
<bulletinLevel>WARN</bulletinLevel>
<comments></comments>
<concurrentlySchedulableTaskCount>1</concurrentlySchedulableTaskCount>
<descriptors>
<entry>
<key>bootstrap.servers</key>
<value>
<name>bootstrap.servers</name>
</value>
</entry>
<entry>
<key>topic</key>
<value>
<name>topic</name>
</value>
</entry>
<entry>
<key>record-reader</key>
<value>
<identifiesControllerService>org.apache.nifi.serialization.RecordReaderFactory</identifiesControllerService>
<name>record-reader</name>
</value>
</entry>
<entry>
<key>record-writer</key>
<value>
<identifiesControllerService>org.apache.nifi.serialization.RecordSetWriterFactory</identifiesControllerService>
<name>record-writer</name>
</value>
</entry>
<entry>
<key>use-transactions</key>
<value>
<name>use-transactions</name>
</value>
</entry>
<entry>
<key>acks</key>
<value>
<name>acks</name>
</value>
</entry>
<entry>
<key>attribute-name-regex</key>
<value>
<name>attribute-name-regex</name>
</value>
</entry>
<entry>
<key>message-header-encoding</key>
<value>
<name>message-header-encoding</name>
</value>
</entry>
<entry>
<key>security.protocol</key>
<value>
<name>security.protocol</name>
</value>
</entry>
<entry>
<key>kerberos-credentials-service</key>
<value>
<identifiesControllerService>org.apache.nifi.kerberos.KerberosCredentialsService</identifiesControllerService>
<name>kerberos-credentials-service</name>
</value>
</entry>
<entry>
<key>sasl.kerberos.service.name</key>
<value>
<name>sasl.kerberos.service.name</name>
</value>
</entry>
<entry>
<key>sasl.kerberos.principal</key>
<value>
<name>sasl.kerberos.principal</name>
</value>
</entry>
<entry>
<key>sasl.kerberos.keytab</key>
<value>
<name>sasl.kerberos.keytab</name>
</value>
</entry>
<entry>
<key>ssl.context.service</key>
<value>
<identifiesControllerService>org.apache.nifi.ssl.SSLContextService</identifiesControllerService>
<name>ssl.context.service</name>
</value>
</entry>
<entry>
<key>message-key-field</key>
<value>
<name>message-key-field</name>
</value>
</entry>
<entry>
<key>max.request.size</key>
<value>
<name>max.request.size</name>
</value>
</entry>
<entry>
<key>ack.wait.time</key>
<value>
<name>ack.wait.time</name>
</value>
</entry>
<entry>
<key>max.block.ms</key>
<value>
<name>max.block.ms</name>
</value>
</entry>
<entry>
<key>partitioner.class</key>
<value>
<name>partitioner.class</name>
</value>
</entry>
<entry>
<key>compression.type</key>
<value>
<name>compression.type</name>
</value>
</entry>
</descriptors>
<executionNode>ALL</executionNode>
<lossTolerant>false</lossTolerant>
<penaltyDuration>30 sec</penaltyDuration>
<properties>
<entry>
<key>bootstrap.servers</key>
<value>kafka-bitnami-0.kafka-bitnami-headless.default.svc.cluster.local:9092</value>
</entry>
<entry>
<key>topic</key>
<value>test</value>
</entry>
<entry>
<key>record-reader</key>
<value>8aa3514a-9e61-3f4d-0000-000000000000</value>
</entry>
<entry>
<key>record-writer</key>
<value>1ee454f5-06af-37a5-0000-000000000000</value>
</entry>
<entry>
<key>use-transactions</key>
<value>false</value>
</entry>
<entry>
<key>acks</key>
<value>0</value>
</entry>
<entry>
<key>attribute-name-regex</key>
</entry>
<entry>
<key>message-header-encoding</key>
<value>UTF-8</value>
</entry>
<entry>
<key>security.protocol</key>
<value>PLAINTEXT</value>
</entry>
<entry>
<key>kerberos-credentials-service</key>
</entry>
<entry>
<key>sasl.kerberos.service.name</key>
</entry>
<entry>
<key>sasl.kerberos.principal</key>
</entry>
<entry>
<key>sasl.kerberos.keytab</key>
</entry>
<entry>
<key>ssl.context.service</key>
</entry>
<entry>
<key>message-key-field</key>
</entry>
<entry>
<key>max.request.size</key>
<value>1 MB</value>
</entry>
<entry>
<key>ack.wait.time</key>
<value>5 secs</value>
</entry>
<entry>
<key>max.block.ms</key>
<value>5 sec</value>
</entry>
<entry>
<key>partitioner.class</key>
<value>org.apache.kafka.clients.producer.internals.DefaultPartitioner</value>
</entry>
<entry>
<key>compression.type</key>
<value>none</value>
</entry>
</properties>
<runDurationMillis>0</runDurationMillis>
<schedulingPeriod>60 sec</schedulingPeriod>
<schedulingStrategy>TIMER_DRIVEN</schedulingStrategy>
<yieldDuration>1 sec</yieldDuration>
</config>
<executionNodeRestricted>false</executionNodeRestricted>
<name>PublishKafkaRecord_2_0</name>
<relationships>
<autoTerminate>true</autoTerminate>
<name>failure</name>
</relationships>
<relationships>
<autoTerminate>true</autoTerminate>
<name>success</name>
</relationships>
<state>STOPPED</state>
<style/>
<type>org.apache.nifi.processors.kafka.pubsub.PublishKafkaRecord_2_0</type>
</processors>
</snippet>
<timestamp>11/07/2019 07:40:06 UTC</timestamp>
</template>
#!/bin/bash
#!/bin/bash
RELEASE=$1
CHART=$2
ADDITIONAL_OPTS="--atomic"
if [ $# -gt 2 ]; then
ADDITIONAL_OPTS="${ADDITIONAL_OPTS} -n $3"
fi
if [ $# -gt 3 ]; then
ADDITIONAL_OPTS="${ADDITIONAL_OPTS} -f $4"
fi
helm upgrade ${RELEASE} ${CHART} ${ADDITIONAL_OPTS}
#!/bin/bash
sudo add-apt-repository ppa:juju/stable -y
sudo apt-get update
sudo apt-get install amulet python-requests -y
#!/usr/bin/python3
import amulet
import requests
import unittest
class TestCharm(unittest.TestCase):
def setUp(self):
self.d = amulet.Deployment()
self.d.add('tid-k8s-master')
self.d.expose('tid-k8s-master')
self.d.setup(timeout=900)
self.d.sentry.wait()
self.unit = self.d.sentry['tid-k8s-master'][0]
def test_service(self):
# test we can access over http
page = requests.get('http://{}'.format(self.unit.info['public-address']))
self.assertEqual(page.status_code, 200)
# Now you can use self.d.sentry[SERVICE][UNIT] to address each of the units and perform
# more in-depth steps. Each self.d.sentry[SERVICE][UNIT] has the following methods:
# - .info - An array of the information of that unit from Juju
# - .file(PATH) - Get the details of a file on that unit
# - .file_contents(PATH) - Get plain text output of PATH file from that unit
# - .directory(PATH) - Get details of directory
# - .directory_contents(PATH) - List files and folders in PATH on that unit
# - .relation(relation, service:rel) - Get relation data from return service
if __name__ == '__main__':
unittest.main()
5891094
\ No newline at end of file
File added
#cloud-config
password: osm4u
chpasswd: { expire: False }
ssh_pwauth: True
package_update: true
packages:
- sshpass
snap:
commands:
00: snap install juju --classic --channel=2.6/stable
runcmd:
- [ su, -l, ubuntu, -c, "touch /home/ubuntu/tmp-cloudinit-installed.txt" ]
k8s_jujucontroller_vnf/icons/osm.png

54.6 KiB

vnfd:vnfd-catalog:
vnfd:
- id: k8s_jujucontroller_vnf
name: k8s_jujucontroller_vnf
short-name: k8s_jujucontroller_vnf
version: 1.0
description: A VNF consisting of 1 VDU for juju to deploy K8s cluster
logo: osm.png
connection-point:
- id: mgmt
name: mgmt
short-name: mgmt
mgmt-interface:
cp: mgmt
vdu:
- id: k8svm
name: k8svm
image: ubuntu18.04
count: 1
vm-flavor:
vcpu-count: 2
memory-mb: 4096
storage-gb: 80
interface:
- name: eth0
position: 1
type: EXTERNAL
virtual-interface:
type: PARAVIRT
external-connection-point-ref: mgmt
cloud-init-file: cloud-config.txt
vnf-configuration:
config-access:
ssh-access:
required: True
default-user: ubuntu
juju:
charm: tid-jujuk8s
initial-config-primitive:
- seq: '1'
name: config
parameter:
- name: ssh-hostname
value: <rw_mgmt_ip>
- name: ssh-username
value: ubuntu
- seq: '2'
name: setup-juju
- seq: '3'
name: add-machines
parameter:
- name: machine1
value: <MACHINE1>
- name: machine2
value: <MACHINE2>
- name: machine3
value: <MACHINE3>
- name: machine4
value: <MACHINE4>
- name: machine5
value: <MACHINE5>
- name: machine6
value: <MACHINE6>
- name: machine7
value: <MACHINE7>
- name: machine8
value: <MACHINE8>
- name: machine9
value: <MACHINE9>
- name: machine10
value: <MACHINE10>
- seq: '4'
name: deploy-bundle
parameter:
- name: bundle
value: <BUNDLE>
- seq: '5'
name: post-deploy
- seq: '6'
name: install-openebs-kubectl
# - seq: '7'
# name: init-helm
config-primitive:
- name: setup-juju
- name: deploy-bundle
parameter:
- name: bundle
data-type: STRING
- name: undeploy-bundle
- name: remove-machines
- name: init-helm
- name: install-openebs-chart
- name: install-openebs-kubectl
- name: post-deploy
- name: add-machine
parameter:
- name: machine
data-type: STRING
- name: add-machines
parameter:
- name: machine1
data-type: STRING
- name: machine2
data-type: STRING
- name: machine3
data-type: STRING
- name: machine4
data-type: STRING
- name: machine5
data-type: STRING
- name: machine6
data-type: STRING
- name: machine7
data-type: STRING
- name: machine8
data-type: STRING
- name: machine9
data-type: STRING
- name: machine10
data-type: STRING
- name: remove-machines
parameter:
- name: force
data-type: STRING
- name: add-repo
parameter:
- name: name
data-type: STRING
- name: url
data-type: STRING
- name: remove-repo
parameter:
- name: name
data-type: STRING
- name: install-chart
parameter:
- name: chart
data-type: STRING
- name: name
data-type: STRING
- name: namespace
data-type: STRING
- name: atomic
data-type: STRING
- name: values
data-type: STRING
- name: valuesFile
data-type: STRING
- name: upgrade-release
parameter:
- name: chart
data-type: STRING
- name: name
data-type: STRING
- name: namespace
data-type: STRING
- name: atomic
data-type: STRING
- name: values
data-type: STRING
- name: valuesFile
data-type: STRING
- name: rollback-release
parameter:
- name: name
data-type: STRING
- name: revision
data-type: STRING
- name: delete-release
parameter:
- name: name
data-type: STRING
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment