3 # Copyright 2016 RIFT.IO Inc
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
27 import rift
.package
.archive
28 import rift
.package
.checksums
29 import rift
.package
.package
30 import rift
.package
.store
31 import rift
.package
.image
38 gi
.require_version('NsdYang', '1.0')
39 gi
.require_version('VnfdYang', '1.0')
40 gi
.require_version('RwPkgMgmtYang', '1.0')
42 from gi
.repository
import (
46 import rift
.mano
.dts
as mano_dts
49 RPC_PACKAGE_EXPORT_ENDPOINT
= RwPkgMgmtYang
.YangOutput_RwPkgMgmt_PackageExport
52 class ExportStart(message
.StatusMessage
):
54 super().__init
__("export-started", "export process started")
57 class ExportSuccess(message
.StatusMessage
):
59 super().__init
__("export-success", "export process successfully completed")
62 class ExportFailure(message
.StatusMessage
):
64 super().__init
__("export-failure", "export process failed")
67 class ExportError(message
.ErrorMessage
):
68 def __init__(self
, msg
):
69 super().__init
__("update-error", msg
)
72 class ExportSingleDescriptorOnlyError(ExportError
):
74 super().__init
__("Only a single descriptor can be exported")
77 class ArchiveExportError(Exception):
81 class DescriptorPackageArchiveExporter(object):
82 def __init__(self
, log
):
85 def _create_archive_from_package(self
, archive_hdl
, package
, open_fn
):
86 orig_open
= package
.open
88 package
.open = open_fn
89 archive
= rift
.package
.archive
.TarPackageArchive
.from_package(
90 self
._log
, package
, archive_hdl
94 package
.open = orig_open
96 def create_archive(self
, archive_hdl
, package
, desc_json_str
, serializer
):
97 """ Create a package archive from an existing package, descriptor messages,
98 and a destination serializer.
100 In order to stay flexible with the package directory structure and
101 descriptor format, attempt to "augment" the onboarded package with the
102 updated descriptor in the original format. If the original package
103 contained a checksum file, then recalculate the descriptor checksum.
106 archive_hdl - An open file handle with 'wb' permissions
107 package - A DescriptorPackage instance
108 desc_json_str - A descriptor (e.g. nsd, vnfd) protobuf message
109 serializer - A destination serializer (e.g. VnfdSerializer)
115 ArchiveExportError - The exported archive failed to create
118 new_desc_msg
= serializer
.from_file_hdl(io
.BytesIO(desc_json_str
.encode()), ".json")
119 _
, dest_ext
= os
.path
.splitext(package
.descriptor_file
)
120 new_desc_hdl
= io
.BytesIO(serializer
.to_string(new_desc_msg
, dest_ext
).encode())
121 descriptor_checksum
= rift
.package
.checksums
.checksum(new_desc_hdl
)
125 checksum_file
= rift
.package
.package
.PackageChecksumValidator
.get_package_checksum_file(
129 except FileNotFoundError
:
132 # Since we're going to intercept the open function to rewrite the descriptor
133 # and checksum, save a handle to use below
134 open_fn
= package
.open
136 def create_checksum_file_hdl():
137 with
open_fn(checksum_file
) as checksum_hdl
:
138 archive_checksums
= rift
.package
.checksums
.ArchiveChecksums
.from_file_desc(
142 archive_checksums
[package
.descriptor_file
] = descriptor_checksum
144 checksum_hdl
= io
.BytesIO(archive_checksums
.to_string().encode())
147 def open_wrapper(rel_path
):
148 """ Wraps the package open in order to rewrite the descriptor file and checksum """
149 if rel_path
== package
.descriptor_file
:
152 elif rel_path
== checksum_file
:
153 return create_checksum_file_hdl()
155 return open_fn(rel_path
)
157 archive
= self
._create
_archive
_from
_package
(archive_hdl
, package
, open_wrapper
)
161 def export_package(self
, package
, export_dir
, file_id
, json_desc_str
, dest_serializer
):
162 """ Export package as an archive to the export directory
165 package - A DescriptorPackage instance
166 export_dir - The directory to export the package archive to
167 file_id - A unique file id to name the archive as (i.e. <file_id>.tar.gz)
168 json_desc_str - A descriptor (e.g. nsd, vnfd) json message string
169 dest_serializer - A destination serializer (e.g. VnfdSerializer)
172 The created archive path
175 ArchiveExportError - The exported archive failed to create
178 os
.makedirs(export_dir
, exist_ok
=True)
179 except FileExistsError
:
182 archive_path
= os
.path
.join(export_dir
, file_id
+ ".tar.gz")
183 with
open(archive_path
, 'wb') as archive_hdl
:
186 archive_hdl
, package
, json_desc_str
, dest_serializer
188 except Exception as e
:
189 os
.remove(archive_path
)
190 msg
= "Failed to create exported archive"
192 raise ArchiveExportError(msg
) from e
197 class ExportRpcHandler(mano_dts
.AbstractRpcHandler
):
198 def __init__(self
, application
, catalog_map
):
201 application: UploaderApplication
202 calalog_map: Dict containing Vnfds and Nsd onboarding.
204 super().__init
__(application
.log
, application
.dts
, application
.loop
)
206 self
.application
= application
207 self
.store_map
= application
.package_store_map
208 self
.exporter
= application
.exporter
209 self
.catalog_map
= catalog_map
213 return "/rw-pkg-mgmt:package-export"
216 def callback(self
, ks_path
, msg
):
217 transaction_id
= str(uuid
.uuid4())
218 log
= message
.Logger(
220 self
.application
.messages
[transaction_id
],
223 file_name
= self
.export(transaction_id
, log
, msg
)
225 rpc_out
= RPC_PACKAGE_EXPORT_ENDPOINT
.from_dict({
226 'transaction_id': transaction_id
,
227 'filename': file_name
})
231 def export(self
, transaction_id
, log
, msg
):
232 log
.message(ExportStart())
233 desc_type
= msg
.package_type
.lower()
235 if desc_type
not in self
.catalog_map
:
236 raise ValueError("Invalid package type: {}".format(desc_type
))
239 desc_id
= msg
.package_id
240 catalog
= self
.catalog_map
[desc_type
](project
=msg
.project_name
)
242 if desc_id
not in catalog
:
243 raise ValueError("Unable to find package ID: {}".format(desc_id
))
245 desc_msg
= catalog
[desc_id
]
247 # Get the schema for exporting
248 schema
= msg
.export_schema
.lower()
250 # Get the grammar for exporting
251 grammar
= msg
.export_grammar
.lower()
253 # Get the format for exporting
254 format_
= msg
.export_format
.lower()
258 if grammar
== 'tosca':
259 filename
= "{}.zip".format(transaction_id
)
260 self
.export_tosca(schema
, format_
, desc_type
, desc_id
, desc_msg
, log
, transaction_id
)
261 log
.message(message
.FilenameMessage(filename
))
263 filename
= "{}.tar.gz".format(transaction_id
)
264 self
.export_rift(schema
, format_
, desc_type
, desc_id
, desc_msg
, log
, transaction_id
)
265 log
.message(message
.FilenameMessage(filename
))
267 log
.message(ExportSuccess())
271 def export_rift(self
, schema
, format_
, desc_type
, desc_id
, desc_msg
, log
, transaction_id
):
272 convert
= rift
.package
.convert
273 schema_serializer_map
= {
275 "vnfd": convert
.RwVnfdSerializer
,
276 "nsd": convert
.RwNsdSerializer
,
279 "vnfd": convert
.VnfdSerializer
,
280 "nsd": convert
.NsdSerializer
,
284 if schema
not in schema_serializer_map
:
285 raise tornado
.web
.HTTPError(400, "unknown schema: {}".format(schema
))
287 if format_
!= "yaml":
288 log
.warn("Only yaml format supported for export")
290 if desc_type
not in schema_serializer_map
[schema
]:
291 raise tornado
.web
.HTTPError(400, "unknown descriptor type: {}".format(desc_type
))
293 # Use the rift superset schema as the source
294 src_serializer
= schema_serializer_map
["rift"][desc_type
]()
296 dest_serializer
= schema_serializer_map
[schema
][desc_type
]()
298 package_store
= self
.store_map
[desc_type
]
300 # Attempt to get the package from the package store
301 # If that fails, create a temporary package using the descriptor only
303 package
= package_store
.get_package(desc_id
)
304 except rift
.package
.store
.PackageNotFoundError
:
305 log
.debug("stored package not found. creating package from descriptor config")
307 desc_yaml_str
= src_serializer
.to_yaml_string(desc_msg
)
308 with io
.BytesIO(desc_yaml_str
.encode()) as hdl
:
309 hdl
.name
= "{}__{}.yaml".format(desc_msg
.id, desc_type
)
310 package
= rift
.package
.package
.DescriptorPackage
.from_descriptor_file_hdl(
314 self
.exporter
.export_package(
316 export_dir
=self
.application
.export_dir
,
317 file_id
=transaction_id
,
318 json_desc_str
=src_serializer
.to_json_string(desc_msg
),
319 dest_serializer
=dest_serializer
,
322 def export_tosca(self
, format_
, schema
, desc_type
, desc_id
, desc_msg
, log
, transaction_id
):
323 if format_
!= "yaml":
324 log
.warn("Only yaml format supported for TOSCA export")
326 if desc_type
!= "nsd":
327 raise tornado
.web
.HTTPError(
329 "NSD need to passed to generate TOSCA: {}".format(desc_type
))
331 def get_pkg_from_store(id_
, type_
):
333 # Attempt to get the package from the package store
335 package_store
= self
.store_map
[type_
]
336 package
= package_store
.get_package(id_
)
338 except rift
.package
.store
.PackageNotFoundError
:
339 log
.debug("stored package not found for {}.".format(id_
))
340 except rift
.package
.store
.PackageStoreError
:
341 log
.debug("stored package error for {}.".format(id_
))
345 pkg
= tosca
.ExportTosca()
347 # Add NSD and related descriptors for exporting
348 nsd_id
= pkg
.add_nsd(desc_msg
, get_pkg_from_store(desc_id
, "nsd"))
350 catalog
= self
.catalog_map
["vnfd"]
351 for const_vnfd
in desc_msg
.constituent_vnfd
:
352 vnfd_id
= const_vnfd
.vnfd_id_ref
353 if vnfd_id
in catalog
:
356 get_pkg_from_store(vnfd_id
, "vnfd"))
358 raise tornado
.web
.HTTPError(
360 "Unknown VNFD descriptor {} for NSD {}".
361 format(vnfd_id
, nsd_id
))
363 # Create the archive.
364 pkg
.create_archive(transaction_id
,
365 dest
=self
.application
.export_dir
)
368 class ExportStateHandler(state
.StateHandler
):
369 STARTED
= ExportStart
370 SUCCESS
= ExportSuccess
371 FAILURE
= ExportFailure
375 def periodic_export_cleanup(log
, loop
, export_dir
, period_secs
=10 * 60, min_age_secs
=30 * 60):
376 """ Periodically cleanup old exported archives (.tar.gz files) in export_dir
379 log - A Logger instance
380 loop - A asyncio event loop
381 export_dir - The directory to cleanup old archives in
382 period_secs - The number of seconds between clean ups
383 min_age_secs - The minimum age of a archive to be eligible for cleanup
386 log
.debug("Starting periodic export cleaning for export directory: %s", export_dir
)
388 # Create export dir if not created yet
389 if not os
.path
.exists(export_dir
):
390 os
.makedirs(export_dir
)
393 yield from asyncio
.sleep(period_secs
, loop
=loop
)
395 if not os
.path
.exists(export_dir
):
398 for file_name
in os
.listdir(export_dir
):
399 if not file_name
.endswith(".tar.gz"):
402 file_path
= os
.path
.join(export_dir
, file_name
)
405 file_stat
= os
.stat(file_path
)
407 log
.warning("Could not stat old exported archive: %s", str(e
))
410 file_age
= time
.time() - file_stat
[stat
.ST_MTIME
]
412 if file_age
< min_age_secs
:
415 log
.debug("Cleaning up old exported archive: %s", file_path
)
420 log
.warning("Failed to remove old exported archive: %s", str(e
))