7cc10becc4a80c723fb1eb1aecaa683dd90369d6
[osm/osmclient.git] / osmclient / common / package_tool.py
1 # /bin/env python3
2 # Copyright 2019 ATOS
3 #
4 # All Rights Reserved.
5 #
6 # Licensed under the Apache License, Version 2.0 (the "License"); you may
7 # not use this file except in compliance with the License. You may obtain
8 # a copy of the License at
9 #
10 # http://www.apache.org/licenses/LICENSE-2.0
11 #
12 # Unless required by applicable law or agreed to in writing, software
13 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15 # License for the specific language governing permissions and limitations
16 # under the License.
17
18 from osmclient.common.exceptions import ClientException
19 import os
20 import glob
21 import time
22 import tarfile
23 import hashlib
24 from osm_im.validation import Validation as validation_im
25 from jinja2 import Environment, PackageLoader
26
27
28 class PackageTool(object):
29 def __init__(self, client=None):
30 self._client = client
31
32 def create(self, package_type, base_directory, package_name, override, image, vdus, vcpu, memory, storage,
33 interfaces, vendor, detailed, netslice_subnets, netslice_vlds):
34 """
35 **Create a package descriptor**
36
37 :params:
38 - package_type: [vnf, ns, nst]
39 - base directory: path of destination folder
40 - package_name: is the name of the package to be created
41 - image: specify the image of the vdu
42 - vcpu: number of virtual cpus of the vdu
43 - memory: amount of memory in MB pf the vdu
44 - storage: amount of storage in GB of the vdu
45 - interfaces: number of interfaces besides management interface
46 - vendor: vendor name of the vnf/ns
47 - detailed: include all possible values for NSD, VNFD, NST
48 - netslice_subnets: number of netslice_subnets for the NST
49 - netslice_vlds: number of virtual link descriptors for the NST
50
51 :return: status
52 """
53
54 # print("location: {}".format(osmclient.__path__))
55 file_loader = PackageLoader("osmclient")
56 env = Environment(loader=file_loader)
57 if package_type == 'ns':
58 template = env.get_template('nsd.yaml.j2')
59 content = {"name": package_name, "vendor": vendor, "vdus": vdus, "clean": False, "interfaces": interfaces,
60 "detailed": detailed}
61 elif package_type == 'vnf':
62 template = env.get_template('vnfd.yaml.j2')
63 content = {"name": package_name, "vendor": vendor, "vdus": vdus, "clean": False, "interfaces": interfaces,
64 "image": image, "vcpu": vcpu, "memory": memory, "storage": storage, "detailed": detailed}
65 elif package_type == 'nst':
66 template = env.get_template('nst.yaml.j2')
67 content = {"name": package_name, "vendor": vendor, "interfaces": interfaces,
68 "netslice_subnets": netslice_subnets, "netslice_vlds": netslice_vlds, "detailed": detailed}
69 else:
70 raise ClientException("Wrong descriptor type {}. Options: ns, vnf, nst".format(package_type))
71
72 # print("To be rendered: {}".format(content))
73 output = template.render(content)
74 # print(output)
75
76 structure = self.discover_folder_structure(base_directory, package_name, override)
77 if structure.get("folders"):
78 self.create_folders(structure["folders"], package_type)
79 if structure.get("files"):
80 self.create_files(structure["files"], output, package_type)
81 return "Created"
82
83 def validate(self, base_directory):
84 """
85 **Validate OSM Descriptors given a path**
86
87 :params:
88 - base_directory is the root path for all descriptors
89
90 :return: List of dict of validated descriptors. keys: type, path, valid, error
91 """
92 table = []
93 descriptors_paths = [f for f in glob.glob(base_directory + "/**/*.yaml", recursive=True)]
94 print("Base directory: {}".format(base_directory))
95 print("{} Descriptors found to validate".format(len(descriptors_paths)))
96 for desc_path in descriptors_paths:
97 with open(desc_path) as descriptor_file:
98 descriptor_data = descriptor_file.read()
99 desc_type = "-"
100 try:
101 desc_type, descriptor_data = validation_im.yaml_validation(self, descriptor_data)
102 validation_im.pyangbind_validation(self, desc_type, descriptor_data)
103 table.append({"type": desc_type, "path": desc_path, "valid": "OK", "error": "-"})
104 except Exception as e:
105 table.append({"type": desc_type, "path": desc_path, "valid": "ERROR", "error": str(e)})
106 return table
107
108 def build(self, package_folder, skip_validation=True):
109 """
110 **Creates a .tar.gz file given a package_folder**
111
112 :params:
113 - package_folder: is the name of the folder to be packaged
114 - skip_validation: is the flag to validate or not the descriptors on the folder before build
115
116 :returns: message result for the build process
117 """
118
119 if not os.path.exists("{}".format(package_folder)):
120 return "Fail, package is not in the specified route"
121 if not skip_validation:
122 results = self.validate(package_folder)
123 for result in results:
124 if result["valid"] != "OK":
125 return("There was an error validating the file: {} with error: {}".format(result["path"],
126 result["error"]))
127 self.calculate_checksum(package_folder)
128 with tarfile.open("{}.tar.gz".format(package_folder), mode='w:gz') as archive:
129 print("Adding File: {}".format(package_folder))
130 archive.add('{}'.format(package_folder), recursive=True)
131 return "Created {}.tar.gz".format(package_folder)
132
133 def calculate_checksum(self, package_folder):
134 """
135 **Function to calculate the checksum given a folder**
136
137 :params:
138 - package_folder: is the folder where we have the files to calculate the checksum
139 :returns: None
140 """
141 files = [f for f in glob.glob(package_folder + "/**/*.*", recursive=True)]
142 checksum = open("{}/checksum.txt".format(package_folder), "w+")
143 for file_item in files:
144 if "checksum.txt" in file_item:
145 continue
146 # from https://www.quickprogrammingtips.com/python/how-to-calculate-md5-hash-of-a-file-in-python.html
147 md5_hash = hashlib.md5()
148 with open(file_item, "rb") as f:
149 # Read and update hash in chunks of 4K
150 for byte_block in iter(lambda: f.read(4096), b""):
151 md5_hash.update(byte_block)
152 checksum.write("{}\t{}\n".format(md5_hash.hexdigest(), file_item))
153 checksum.close()
154
155 def create_folders(self, folders, package_type):
156 """
157 **Create folder given a list of folders**
158
159 :params:
160 - folders: [List] list of folders paths to be created
161 - package_type: is the type of package to be created
162 :return: None
163 """
164
165 for folder in folders:
166 try:
167 # print("Folder {} == package_type {}".format(folder[1], package_type))
168 if folder[1] == package_type:
169 print("Creating folder:\t{}".format(folder[0]))
170 os.makedirs(folder[0])
171 except FileExistsError:
172 pass
173
174 def save_file(self, file_name, file_body):
175 """
176 **Create a file given a name and the content**
177
178 :params:
179 - file_name: is the name of the file with the relative route
180 - file_body: is the content of the file
181 :return: None
182 """
183 print("Creating file: \t{}".format(file_name))
184 try:
185 with open(file_name, "w+") as f:
186 f.write(file_body)
187 except Exception as e:
188 raise ClientException(e)
189
190 def generate_readme(self):
191 """
192 **Creates the README content**
193
194 :returns: readme content
195 """
196 return """# Descriptor created by OSM descriptor package generated\n\n**Created on {} **""".format(
197 time.strftime("%m/%d/%Y, %H:%M:%S", time.localtime()))
198
199 def generate_cloud_init(self):
200 """
201 **Creates the cloud-init content**
202
203 :returns: cloud-init content
204 """
205 return "---\n#cloud-config"
206
207 def create_files(self, files, file_content, package_type):
208 """
209 **Creates the files given the file list and type**
210
211 :params:
212 - files: is the list of files structure
213 - file_content: is the content of the descriptor rendered by the template
214 - package_type: is the type of package to filter the creation structure
215
216 :return: None
217 """
218 for file_item, file_package, file_type in files:
219 if package_type == file_package:
220 if file_type == "descriptor":
221 self.save_file(file_item, file_content)
222 elif file_type == "readme":
223 self.save_file(file_item, self.generate_readme())
224 elif file_type == "cloud_init":
225 self.save_file(file_item, self.generate_cloud_init())
226
227 def check_files_folders(self, path_list, override):
228 """
229 **Find files and folders missing given a directory structure {"folders": [], "files": []}**
230
231 :params:
232 - path_list: is the list of files and folders to be created
233 - override: is the flag used to indicate the creation of the list even if the file exist to override it
234
235 :return: Missing paths Dict
236 """
237 missing_paths = {}
238 folders = []
239 files = []
240 for folder in path_list.get("folders"):
241 if not os.path.exists(folder[0]):
242 folders.append(folder)
243 missing_paths["folders"] = folders
244
245 for file_item in path_list.get("files"):
246 if not os.path.exists(file_item[0]) or override is True:
247 files.append(file_item)
248 missing_paths["files"] = files
249
250 return missing_paths
251
252 def discover_folder_structure(self, base_directory, name, override):
253 """
254 **Discover files and folders structure for OSM descriptors given a base_directory and name**
255
256 :params:
257 - base_directory: is the location of the package to be created
258 - name: is the name of the package
259 - override: is the flag used to indicate the creation of the list even if the file exist to override it
260 :return: Files and Folders not found. In case of override, it will return all file list
261 """
262 prefix = "{}/{}".format(base_directory, name)
263 files_folders = {"folders": [("{}_ns".format(prefix), "ns"),
264 ("{}_ns/icons".format(prefix), "ns"),
265 ("{}_ns/charms".format(prefix), "ns"),
266 ("{}_vnf".format(name), "vnf"),
267 ("{}_vnf/charms".format(prefix), "vnf"),
268 ("{}_vnf/cloud_init".format(prefix), "vnf"),
269 ("{}_vnf/images".format(prefix), "vnf"),
270 ("{}_vnf/icons".format(prefix), "vnf"),
271 ("{}_vnf/scripts".format(prefix), "vnf"),
272 ("{}_nst".format(prefix), "nst"),
273 ("{}_nst/icons".format(prefix), "nst")
274 ],
275 "files": [("{}_ns/{}_nsd.yaml".format(prefix, name), "ns", "descriptor"),
276 ("{}_ns/README.md".format(prefix), "ns", "readme"),
277 ("{}_vnf/{}_vnfd.yaml".format(prefix, name), "vnf", "descriptor"),
278 ("{}_vnf/cloud_init/cloud-config.txt".format(prefix), "vnf", "cloud_init"),
279 ("{}_vnf/README.md".format(prefix), "vnf", "readme"),
280 ("{}_nst/{}_nst.yaml".format(prefix, name), "nst", "descriptor"),
281 ("{}_nst/README.md".format(prefix), "nst", "readme")
282 ]
283 }
284 missing_files_folders = self.check_files_folders(files_folders, override)
285 # print("Missing files and folders: {}".format(missing_files_folders))
286 return missing_files_folders