New feature: Code changes for project support
[osm/SO.git] / rwlaunchpad / plugins / rwlaunchpadtasklet / rift / tasklets / rwlaunchpad / export.py
1
2 #
3 # Copyright 2016 RIFT.IO Inc
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
16 #
17
18 import asyncio
19 import io
20 import os.path
21 import stat
22 import time
23 import uuid
24
25 import tornado.web
26
27 import rift.package.archive
28 import rift.package.checksums
29 import rift.package.package
30 import rift.package.store
31 import rift.package.image
32
33 from . import state
34 from . import message
35 from . import tosca
36
37 import gi
38 gi.require_version('NsdYang', '1.0')
39 gi.require_version('VnfdYang', '1.0')
40 gi.require_version('RwPkgMgmtYang', '1.0')
41
42 from gi.repository import (
43 NsdYang,
44 VnfdYang,
45 RwPkgMgmtYang)
46 import rift.mano.dts as mano_dts
47
48
49 RPC_PACKAGE_EXPORT_ENDPOINT = RwPkgMgmtYang.YangOutput_RwPkgMgmt_PackageExport
50
51
52 class ExportStart(message.StatusMessage):
53 def __init__(self):
54 super().__init__("export-started", "export process started")
55
56
57 class ExportSuccess(message.StatusMessage):
58 def __init__(self):
59 super().__init__("export-success", "export process successfully completed")
60
61
62 class ExportFailure(message.StatusMessage):
63 def __init__(self):
64 super().__init__("export-failure", "export process failed")
65
66
67 class ExportError(message.ErrorMessage):
68 def __init__(self, msg):
69 super().__init__("update-error", msg)
70
71
72 class ExportSingleDescriptorOnlyError(ExportError):
73 def __init__(self):
74 super().__init__("Only a single descriptor can be exported")
75
76
77 class ArchiveExportError(Exception):
78 pass
79
80
81 class DescriptorPackageArchiveExporter(object):
82 def __init__(self, log):
83 self._log = log
84
85 def _create_archive_from_package(self, archive_hdl, package, open_fn):
86 orig_open = package.open
87 try:
88 package.open = open_fn
89 archive = rift.package.archive.TarPackageArchive.from_package(
90 self._log, package, archive_hdl
91 )
92 return archive
93 finally:
94 package.open = orig_open
95
96 def create_archive(self, archive_hdl, package, desc_json_str, serializer):
97 """ Create a package archive from an existing package, descriptor messages,
98 and a destination serializer.
99
100 In order to stay flexible with the package directory structure and
101 descriptor format, attempt to "augment" the onboarded package with the
102 updated descriptor in the original format. If the original package
103 contained a checksum file, then recalculate the descriptor checksum.
104
105 Arguments:
106 archive_hdl - An open file handle with 'wb' permissions
107 package - A DescriptorPackage instance
108 desc_json_str - A descriptor (e.g. nsd, vnfd) protobuf message
109 serializer - A destination serializer (e.g. VnfdSerializer)
110
111 Returns:
112 A TarPackageArchive
113
114 Raises:
115 ArchiveExportError - The exported archive failed to create
116
117 """
118 new_desc_msg = serializer.from_file_hdl(io.BytesIO(desc_json_str.encode()), ".json")
119 _, dest_ext = os.path.splitext(package.descriptor_file)
120 new_desc_hdl = io.BytesIO(serializer.to_string(new_desc_msg, dest_ext).encode())
121 descriptor_checksum = rift.package.checksums.checksum(new_desc_hdl)
122
123 checksum_file = None
124 try:
125 checksum_file = rift.package.package.PackageChecksumValidator.get_package_checksum_file(
126 package
127 )
128
129 except FileNotFoundError:
130 pass
131
132 # Since we're going to intercept the open function to rewrite the descriptor
133 # and checksum, save a handle to use below
134 open_fn = package.open
135
136 def create_checksum_file_hdl():
137 with open_fn(checksum_file) as checksum_hdl:
138 archive_checksums = rift.package.checksums.ArchiveChecksums.from_file_desc(
139 checksum_hdl
140 )
141
142 archive_checksums[package.descriptor_file] = descriptor_checksum
143
144 checksum_hdl = io.BytesIO(archive_checksums.to_string().encode())
145 return checksum_hdl
146
147 def open_wrapper(rel_path):
148 """ Wraps the package open in order to rewrite the descriptor file and checksum """
149 if rel_path == package.descriptor_file:
150 return new_desc_hdl
151
152 elif rel_path == checksum_file:
153 return create_checksum_file_hdl()
154
155 return open_fn(rel_path)
156
157 archive = self._create_archive_from_package(archive_hdl, package, open_wrapper)
158
159 return archive
160
161 def export_package(self, package, export_dir, file_id, json_desc_str, dest_serializer):
162 """ Export package as an archive to the export directory
163
164 Arguments:
165 package - A DescriptorPackage instance
166 export_dir - The directory to export the package archive to
167 file_id - A unique file id to name the archive as (i.e. <file_id>.tar.gz)
168 json_desc_str - A descriptor (e.g. nsd, vnfd) json message string
169 dest_serializer - A destination serializer (e.g. VnfdSerializer)
170
171 Returns:
172 The created archive path
173
174 Raises:
175 ArchiveExportError - The exported archive failed to create
176 """
177 try:
178 os.makedirs(export_dir, exist_ok=True)
179 except FileExistsError:
180 pass
181
182 archive_path = os.path.join(export_dir, file_id + ".tar.gz")
183 with open(archive_path, 'wb') as archive_hdl:
184 try:
185 self.create_archive(
186 archive_hdl, package, json_desc_str, dest_serializer
187 )
188 except Exception as e:
189 os.remove(archive_path)
190 msg = "Failed to create exported archive"
191 self._log.error(msg)
192 raise ArchiveExportError(msg) from e
193
194 return archive_path
195
196
197 class ExportRpcHandler(mano_dts.AbstractRpcHandler):
198 def __init__(self, application, catalog_map):
199 """
200 Args:
201 application: UploaderApplication
202 calalog_map: Dict containing Vnfds and Nsd onboarding.
203 """
204 super().__init__(application.log, application.dts, application.loop)
205
206 self.application = application
207 self.store_map = application.package_store_map
208 self.exporter = application.exporter
209 self.catalog_map = catalog_map
210
211 @property
212 def xpath(self):
213 return "/rw-pkg-mgmt:package-export"
214
215 @asyncio.coroutine
216 def callback(self, ks_path, msg):
217 transaction_id = str(uuid.uuid4())
218 log = message.Logger(
219 self.log,
220 self.application.messages[transaction_id],
221 )
222
223 file_name = self.export(transaction_id, log, msg)
224
225 rpc_out = RPC_PACKAGE_EXPORT_ENDPOINT.from_dict({
226 'transaction_id': transaction_id,
227 'filename': file_name})
228
229 return rpc_out
230
231 def export(self, transaction_id, log, msg):
232 log.message(ExportStart())
233 desc_type = msg.package_type.lower()
234
235 if desc_type not in self.catalog_map:
236 raise ValueError("Invalid package type: {}".format(desc_type))
237
238 # Parse the IDs
239 desc_id = msg.package_id
240 catalog = self.catalog_map[desc_type](project=msg.project_name)
241
242 if desc_id not in catalog:
243 raise ValueError("Unable to find package ID: {}".format(desc_id))
244
245 desc_msg = catalog[desc_id]
246
247 # Get the schema for exporting
248 schema = msg.export_schema.lower()
249
250 # Get the grammar for exporting
251 grammar = msg.export_grammar.lower()
252
253 # Get the format for exporting
254 format_ = msg.export_format.lower()
255
256 filename = None
257
258 if grammar == 'tosca':
259 filename = "{}.zip".format(transaction_id)
260 self.export_tosca(schema, format_, desc_type, desc_id, desc_msg, log, transaction_id)
261 log.message(message.FilenameMessage(filename))
262 else:
263 filename = "{}.tar.gz".format(transaction_id)
264 self.export_rift(schema, format_, desc_type, desc_id, desc_msg, log, transaction_id)
265 log.message(message.FilenameMessage(filename))
266
267 log.message(ExportSuccess())
268
269 return filename
270
271 def export_rift(self, schema, format_, desc_type, desc_id, desc_msg, log, transaction_id):
272 convert = rift.package.convert
273 schema_serializer_map = {
274 "rift": {
275 "vnfd": convert.RwVnfdSerializer,
276 "nsd": convert.RwNsdSerializer,
277 },
278 "mano": {
279 "vnfd": convert.VnfdSerializer,
280 "nsd": convert.NsdSerializer,
281 }
282 }
283
284 if schema not in schema_serializer_map:
285 raise tornado.web.HTTPError(400, "unknown schema: {}".format(schema))
286
287 if format_ != "yaml":
288 log.warn("Only yaml format supported for export")
289
290 if desc_type not in schema_serializer_map[schema]:
291 raise tornado.web.HTTPError(400, "unknown descriptor type: {}".format(desc_type))
292
293 # Use the rift superset schema as the source
294 src_serializer = schema_serializer_map["rift"][desc_type]()
295
296 dest_serializer = schema_serializer_map[schema][desc_type]()
297
298 package_store = self.store_map[desc_type]
299
300 # Attempt to get the package from the package store
301 # If that fails, create a temporary package using the descriptor only
302 try:
303 package = package_store.get_package(desc_id)
304 except rift.package.store.PackageNotFoundError:
305 log.debug("stored package not found. creating package from descriptor config")
306
307 desc_yaml_str = src_serializer.to_yaml_string(desc_msg)
308 with io.BytesIO(desc_yaml_str.encode()) as hdl:
309 hdl.name = "{}__{}.yaml".format(desc_msg.id, desc_type)
310 package = rift.package.package.DescriptorPackage.from_descriptor_file_hdl(
311 log, hdl
312 )
313
314 self.exporter.export_package(
315 package=package,
316 export_dir=self.application.export_dir,
317 file_id=transaction_id,
318 json_desc_str=src_serializer.to_json_string(desc_msg),
319 dest_serializer=dest_serializer,
320 )
321
322 def export_tosca(self, format_, schema, desc_type, desc_id, desc_msg, log, transaction_id):
323 if format_ != "yaml":
324 log.warn("Only yaml format supported for TOSCA export")
325
326 if desc_type != "nsd":
327 raise tornado.web.HTTPError(
328 400,
329 "NSD need to passed to generate TOSCA: {}".format(desc_type))
330
331 def get_pkg_from_store(id_, type_):
332 package = None
333 # Attempt to get the package from the package store
334 try:
335 package_store = self.store_map[type_]
336 package = package_store.get_package(id_)
337
338 except rift.package.store.PackageNotFoundError:
339 log.debug("stored package not found for {}.".format(id_))
340 except rift.package.store.PackageStoreError:
341 log.debug("stored package error for {}.".format(id_))
342
343 return package
344
345 pkg = tosca.ExportTosca()
346
347 # Add NSD and related descriptors for exporting
348 nsd_id = pkg.add_nsd(desc_msg, get_pkg_from_store(desc_id, "nsd"))
349
350 catalog = self.catalog_map["vnfd"]
351 for const_vnfd in desc_msg.constituent_vnfd:
352 vnfd_id = const_vnfd.vnfd_id_ref
353 if vnfd_id in catalog:
354 pkg.add_vnfd(nsd_id,
355 catalog[vnfd_id],
356 get_pkg_from_store(vnfd_id, "vnfd"))
357 else:
358 raise tornado.web.HTTPError(
359 400,
360 "Unknown VNFD descriptor {} for NSD {}".
361 format(vnfd_id, nsd_id))
362
363 # Create the archive.
364 pkg.create_archive(transaction_id,
365 dest=self.application.export_dir)
366
367
368 class ExportStateHandler(state.StateHandler):
369 STARTED = ExportStart
370 SUCCESS = ExportSuccess
371 FAILURE = ExportFailure
372
373
374 @asyncio.coroutine
375 def periodic_export_cleanup(log, loop, export_dir, period_secs=10 * 60, min_age_secs=30 * 60):
376 """ Periodically cleanup old exported archives (.tar.gz files) in export_dir
377
378 Arguments:
379 log - A Logger instance
380 loop - A asyncio event loop
381 export_dir - The directory to cleanup old archives in
382 period_secs - The number of seconds between clean ups
383 min_age_secs - The minimum age of a archive to be eligible for cleanup
384
385 """
386 log.debug("Starting periodic export cleaning for export directory: %s", export_dir)
387
388 # Create export dir if not created yet
389 if not os.path.exists(export_dir):
390 os.makedirs(export_dir)
391
392 while True:
393 yield from asyncio.sleep(period_secs, loop=loop)
394
395 if not os.path.exists(export_dir):
396 continue
397
398 for file_name in os.listdir(export_dir):
399 if not file_name.endswith(".tar.gz"):
400 continue
401
402 file_path = os.path.join(export_dir, file_name)
403
404 try:
405 file_stat = os.stat(file_path)
406 except OSError as e:
407 log.warning("Could not stat old exported archive: %s", str(e))
408 continue
409
410 file_age = time.time() - file_stat[stat.ST_MTIME]
411
412 if file_age < min_age_secs:
413 continue
414
415 log.debug("Cleaning up old exported archive: %s", file_path)
416
417 try:
418 os.remove(file_path)
419 except OSError as e:
420 log.warning("Failed to remove old exported archive: %s", str(e))