2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
21 from os
import listdir
, mkdir
, getcwd
, remove
22 from os
.path
import isfile
, isdir
, join
, abspath
23 from shutil
import copyfile
, rmtree
28 from osm_im
.validation
import Validation
as validation_im
29 from osmclient
.common
.exceptions
import ClientException
30 from osmclient
.common
.package_tool
import PackageTool
31 from osmclient
.sol005
.repo
import Repo
32 from packaging
import version
as versioning
38 def __init__(self
, http
=None, client
=None):
41 self
._apiName
= "/admin"
42 self
._apiVersion
= "/v1"
43 self
._apiResource
= "/osmrepos"
44 self
._logger
= logging
.getLogger("osmclient")
45 self
._apiBase
= "{}{}{}".format(
46 self
._apiName
, self
._apiVersion
, self
._apiResource
49 def pkg_list(self
, pkgtype
, filter=None, repo
=None):
51 Returns a repo based on name or id
53 self
._logger
.debug("")
54 self
._client
.get_token()
55 # Get OSM registered repository list
56 repositories
= self
.list()
58 repositories
= [r
for r
in repositories
if r
["name"] == repo
]
60 raise ClientException("Not repository found")
63 for repository
in repositories
:
65 r
= requests
.get("{}/index.yaml".format(repository
.get("url")))
67 if r
.status_code
== 200:
68 repo_list
= yaml
.safe_load(r
.text
)
69 vnf_packages
= repo_list
.get("{}_packages".format(pkgtype
))
70 for repo
in vnf_packages
:
71 versions
= vnf_packages
.get(repo
)
72 latest
= versions
.get("latest")
73 del versions
["latest"]
74 for version
in versions
:
75 latest_version
= False
80 "vendor": versions
[version
].get("vendor"),
81 "name": versions
[version
].get("name"),
83 "description": versions
[version
].get("description"),
84 "location": versions
[version
].get("path"),
85 "repository": repository
.get("name"),
86 "repourl": repository
.get("url"),
87 "latest": latest_version
,
92 "repository in url {} unreachable".format(repository
.get("url"))
94 except Exception as e
:
96 "Error cannot read from repository {} '{}': {}".format(
97 repository
["name"], repository
["url"], e
102 vnf_repos_filtered
= []
104 for vnf_repo
in vnf_repos
:
105 for k
, v
in vnf_repo
.items():
107 kf
, vf
= filter.split("=")
108 if k
== kf
and vf
in v
:
109 vnf_repos_filtered
.append(vnf_repo
)
111 vnf_repos
= vnf_repos_filtered
114 def get_pkg(self
, pkgtype
, name
, repo
, filter, version
):
116 Returns the filename of the PKG downloaded to disk
118 self
._logger
.debug("")
119 self
._client
.get_token()
122 # Get OSM registered repository list
123 pkgs
= self
.pkg_list(pkgtype
, filter, repo
)
125 if pkg
.get("repository") == repo
and pkg
.get("name") == name
:
126 if "latest" in version
:
127 if not pkg
.get("latest"):
130 version
= pkg
.get("version")
131 if pkg
.get("version") == version
:
133 "{}{}".format(pkg
.get("repourl"), pkg
.get("location")),
136 if r
.status_code
!= 200:
137 raise ClientException("Package not found")
139 with tempfile
.NamedTemporaryFile(delete
=False) as f
:
140 f
.write(r
.raw
.read())
143 raise ClientException(
144 "{} {} not found at repo {}".format(pkgtype
, name
, repo
)
148 def pkg_get(self
, pkgtype
, name
, repo
, version
, filter):
150 pkg_name
= self
.get_pkg(pkgtype
, name
, repo
, filter, version
)
152 raise ClientException("Package not found")
153 folder
, descriptor
= self
.zip_extraction(pkg_name
)
154 with
open(descriptor
) as pkg
:
155 pkg_descriptor
= yaml
.safe_load(pkg
)
156 rmtree(folder
, ignore_errors
=False)
159 and (pkg_descriptor
.get("vnfd") or pkg_descriptor
.get("vnfd:vnfd_catalog"))
162 and (pkg_descriptor
.get("nsd") or pkg_descriptor
.get("nsd:nsd_catalog"))
164 raise ClientException("Wrong Package type")
165 return pkg_descriptor
167 def repo_index(self
, origin
=".", destination
="."):
169 Repo Index main function
170 :param origin: origin directory for getting all the artifacts
171 :param destination: destination folder for create and index the valid artifacts
173 self
._logger
.debug("")
174 if destination
== ".":
175 if origin
== destination
:
176 destination
= "repository"
178 destination
= abspath(destination
)
179 origin
= abspath(origin
)
182 origin
= join(getcwd(), origin
)
183 if destination
[0] != "/":
184 destination
= join(getcwd(), destination
)
186 self
.init_directory(destination
)
187 artifacts
= [f
for f
in listdir(origin
) if isfile(join(origin
, f
))]
188 directories
= [f
for f
in listdir(origin
) if isdir(join(origin
, f
))]
189 for artifact
in artifacts
:
190 self
.register_artifact_in_repository(
191 join(origin
, artifact
), destination
, source
="file"
193 for artifact
in directories
:
194 self
.register_artifact_in_repository(
195 join(origin
, artifact
), destination
, source
="directory"
197 print("\nFinal Results: ")
199 "VNF Packages Indexed: "
200 + str(len(glob
.glob(destination
+ "/vnf/*/*/metadata.yaml")))
203 "NS Packages Indexed: "
204 + str(len(glob
.glob(destination
+ "/ns/*/*/metadata.yaml")))
207 def md5(self
, fname
):
210 :param fname: file path
211 :return: checksum string
213 self
._logger
.debug("")
214 hash_md5
= hashlib
.md5()
215 with
open(fname
, "rb") as f
:
216 for chunk
in iter(lambda: f
.read(4096), b
""):
217 hash_md5
.update(chunk
)
218 return hash_md5
.hexdigest()
220 def fields_building(self
, descriptor_dict
, file, package_type
):
222 From an artifact descriptor, obtain the fields required for indexing
223 :param descriptor_dict: artifact description
224 :param file: artifact package
225 :param package_type: type of artifact (vnf or ns)
228 self
._logger
.debug("")
230 base_path
= "/{}/".format(package_type
)
232 if package_type
== "vnf":
233 if descriptor_dict
.get("vnfd-catalog", False):
234 aux_dict
= descriptor_dict
.get("vnfd-catalog", {}).get("vnfd", [{}])[0]
236 aux_dict
= descriptor_dict
.get("vnfd:vnfd-catalog", {}).get(
241 for vdu
in aux_dict
.get("vdu", ()):
242 images
.append(vdu
.get("image"))
243 fields
["images"] = images
244 if package_type
== "ns":
245 if descriptor_dict
.get("nsd-catalog", False):
246 aux_dict
= descriptor_dict
.get("nsd-catalog", {}).get("nsd", [{}])[0]
248 aux_dict
= descriptor_dict
.get("nsd:nsd-catalog", {}).get("nsd", [{}])[
254 for vnf
in aux_dict
.get("constituent-vnfd", ()):
255 vnfs
.append(vnf
.get("vnfd-id-ref"))
256 self
._logger
.debug("Used VNFS in the NSD: " + str(vnfs
))
257 fields
["vnfd-id-ref"] = vnfs
259 fields
["name"] = aux_dict
.get("name")
260 fields
["id"] = aux_dict
.get("id")
261 fields
["description"] = aux_dict
.get("description")
262 fields
["vendor"] = aux_dict
.get("vendor")
263 fields
["version"] = aux_dict
.get("version", "1.0")
264 fields
["path"] = "{}{}/{}/{}-{}.tar.gz".format(
269 fields
.get("version"),
273 def zip_extraction(self
, file_name
):
275 Validation of artifact.
276 :param file: file path
277 :return: status details, status, fields, package_type
279 self
._logger
.debug("Decompressing package file")
280 temp_file
= "/tmp/{}".format(file_name
.split("/")[-1])
281 if file_name
!= temp_file
:
282 copyfile(file_name
, temp_file
)
283 with tarfile
.open(temp_file
, "r:gz") as tar
:
284 folder
= tar
.getnames()[0].split("/")[0]
288 descriptor_file
= glob
.glob("{}/*.y*ml".format(folder
))[0]
289 return folder
, descriptor_file
291 def validate_artifact(self
, path
, source
):
293 Validation of artifact.
294 :param path: file path
295 :return: status details, status, fields, package_type
297 self
._logger
.debug("")
301 if source
== "directory":
302 descriptor_file
= glob
.glob("{}/*.y*ml".format(path
))[0]
304 folder
, descriptor_file
= self
.zip_extraction(path
)
306 self
._logger
.debug("Opening descriptor file: {}".format(descriptor_file
))
308 with
open(descriptor_file
, "r") as f
:
309 descriptor_data
= f
.read()
310 validation
= validation_im()
311 desc_type
, descriptor_dict
= validation
.yaml_validation(descriptor_data
)
312 validation_im
.pyangbind_validation(self
, desc_type
, descriptor_dict
)
313 if "vnf" in list(descriptor_dict
.keys())[0]:
316 # raise ClientException("Not VNF package")
319 self
._logger
.debug("Descriptor: {}".format(descriptor_dict
))
320 fields
= self
.fields_building(descriptor_dict
, path
, package_type
)
321 self
._logger
.debug("Descriptor sucessfully validated")
324 "detail": "{}D successfully validated".format(package_type
.upper()),
331 except Exception as e
:
332 # Delete the folder we just created
333 return {"detail": str(e
)}, False, {}, package_type
336 rmtree(folder
, ignore_errors
=True)
338 def register_artifact_in_repository(self
, path
, destination
, source
):
340 Registration of one artifact in a repository
342 destination: path for index creation
344 self
._logger
.debug("")
349 _
, valid
, fields
, package_type
= self
.validate_artifact(path
, source
)
352 "{} {} Not well configured.".format(package_type
.upper(), str(path
))
355 if source
== "directory":
356 path
= pt
.build(path
)
358 fields
["checksum"] = self
.md5(path
)
359 self
.indexation(destination
, path
, package_type
, fields
)
361 except Exception as e
:
362 self
._logger
.exception(
363 "Error registering artifact in Repository: {}".format(e
)
365 raise ClientException(e
)
368 if source
== "directory" and compresed
:
371 def indexation(self
, destination
, path
, package_type
, fields
):
373 Process for index packages
374 :param destination: index repository path
375 :param path: path of the package
376 :param package_type: package type (vnf, ns)
377 :param fields: dict with the required values
379 self
._logger
.debug("")
381 "name": fields
.get("name"),
382 "description": fields
.get("description"),
383 "vendor": fields
.get("vendor"),
384 "path": fields
.get("path"),
388 destination
, package_type
, fields
.get("id"), fields
.get("version")
390 if isdir(join(destination
, package_type
, fields
.get("id"))):
391 if isdir(final_path
):
392 self
._logger
.warning(
393 "{} {} already exists".format(package_type
.upper(), str(path
))
403 + fields
.get("version")
408 open(final_path
+ "/" + "metadata.yaml", "w"),
409 default_flow_style
=False,
413 index
= yaml
.safe_load(open(destination
+ "/index.yaml"))
415 index
["{}_packages".format(package_type
)][fields
.get("id")][
416 fields
.get("version")
419 index
["{}_packages".format(package_type
)][fields
.get("id")][
422 ) < versioning
.parse(fields
.get("version")):
423 index
["{}_packages".format(package_type
)][fields
.get("id")][
425 ] = fields
.get("version")
428 open(destination
+ "/index.yaml", "w"),
429 default_flow_style
=False,
434 "{} {} added in the repository".format(
435 package_type
.upper(), str(path
)
439 mkdir(destination
+ "/{}/".format(package_type
) + fields
.get("id"))
447 + fields
.get("version")
452 open(join(final_path
, "metadata.yaml"), "w"),
453 default_flow_style
=False,
457 index
= yaml
.safe_load(open(destination
+ "/index.yaml"))
459 index
["{}_packages".format(package_type
)][fields
.get("id")] = {
460 fields
.get("version"): data_ind
462 index
["{}_packages".format(package_type
)][fields
.get("id")][
464 ] = fields
.get("version")
467 open(join(destination
, "index.yaml"), "w"),
468 default_flow_style
=False,
473 "{} {} added in the repository".format(package_type
.upper(), str(path
))
476 def current_datatime(self
):
479 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z"
481 self
._logger
.debug("")
482 return time
.strftime("%Y-%m-%dT%H:%M:%S.%sZ")
484 def init_directory(self
, destination
):
486 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns
490 self
._logger
.debug("")
491 if not isdir(destination
):
493 if not isfile(join(destination
, "index.yaml")):
494 mkdir(join(destination
, "vnf"))
495 mkdir(join(destination
, "ns"))
498 "generated": self
.current_datatime(),
502 with
open(join(destination
, "index.yaml"), "w") as outfile
:
504 index_data
, outfile
, default_flow_style
=False, width
=80, indent
=4