Coverage for osmclient/sol005/osmrepo.py: 16%

309 statements  

« prev     ^ index     » next       coverage.py v7.3.1, created at 2024-06-22 09:01 +0000

1# 

2# Licensed under the Apache License, Version 2.0 (the "License"); you may 

3# not use this file except in compliance with the License. You may obtain 

4# a copy of the License at 

5# 

6# http://www.apache.org/licenses/LICENSE-2.0 

7# 

8# Unless required by applicable law or agreed to in writing, software 

9# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 

10# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 

11# License for the specific language governing permissions and limitations 

12# under the License. 

13# 

14 

15""" 

16OSM Repo API handling 

17""" 

18import glob 

19import logging 

20from os import listdir, mkdir, getcwd, remove 

21from os.path import isfile, isdir, join, abspath 

22from shutil import copyfile, rmtree 

23import tarfile 

24import tempfile 

25import time 

26 

27from osm_im.validation import Validation as validation_im 

28from osmclient.common.exceptions import ClientException 

29from osmclient.common.package_tool import PackageTool 

30from osmclient.sol005.repo import Repo 

31from osmclient.common import utils 

32from packaging import version as versioning 

33import requests 

34import yaml 

35 

36 

37class OSMRepo(Repo): 

38 def __init__(self, http=None, client=None): 

39 self._http = http 

40 self._client = client 

41 self._apiName = "/admin" 

42 self._apiVersion = "/v1" 

43 self._apiResource = "/osmrepos" 

44 self._logger = logging.getLogger("osmclient") 

45 self._apiBase = "{}{}{}".format( 

46 self._apiName, self._apiVersion, self._apiResource 

47 ) 

48 

49 def pkg_list(self, pkgtype, filter=None, repo=None): 

50 """ 

51 Returns a repo based on name or id 

52 """ 

53 self._logger.debug("") 

54 self._client.get_token() 

55 # Get OSM registered repository list 

56 repositories = self.list() 

57 if repo: 

58 repositories = [r for r in repositories if r["name"] == repo] 

59 if not repositories: 

60 raise ClientException("Not repository found") 

61 

62 vnf_repos = [] 

63 for repository in repositories: 

64 try: 

65 r = requests.get("{}/index.yaml".format(repository.get("url"))) 

66 

67 if r.status_code == 200: 

68 repo_list = yaml.safe_load(r.text) 

69 vnf_packages = repo_list.get("{}_packages".format(pkgtype)) 

70 for repo in vnf_packages: 

71 versions = vnf_packages.get(repo) 

72 latest = versions.get("latest") 

73 del versions["latest"] 

74 for version in versions: 

75 latest_version = False 

76 if version == latest: 

77 latest_version = True 

78 vnf_repos.append( 

79 { 

80 "vendor": versions[version].get("vendor"), 

81 "name": versions[version].get("name"), 

82 "version": version, 

83 "description": versions[version].get("description"), 

84 "location": versions[version].get("path"), 

85 "repository": repository.get("name"), 

86 "repourl": repository.get("url"), 

87 "latest": latest_version, 

88 } 

89 ) 

90 else: 

91 raise Exception( 

92 "repository in url {} unreachable".format(repository.get("url")) 

93 ) 

94 except Exception as e: 

95 self._logger.error( 

96 "Error cannot read from repository {} '{}': {}".format( 

97 repository["name"], repository["url"], e 

98 ), 

99 exc_info=True, 

100 ) 

101 continue 

102 

103 vnf_repos_filtered = [] 

104 if filter: 

105 for vnf_repo in vnf_repos: 

106 for k, v in vnf_repo.items(): 

107 if v: 

108 kf, vf = filter.split("=") 

109 if k == kf and vf in v: 

110 vnf_repos_filtered.append(vnf_repo) 

111 break 

112 vnf_repos = vnf_repos_filtered 

113 return vnf_repos 

114 

115 def get_pkg(self, pkgtype, name, repo, filter, version): 

116 """ 

117 Returns the filename of the PKG downloaded to disk 

118 """ 

119 self._logger.debug("") 

120 self._client.get_token() 

121 f = None 

122 f_name = None 

123 # Get OSM registered repository list 

124 pkgs = self.pkg_list(pkgtype, filter, repo) 

125 for pkg in pkgs: 

126 if pkg.get("repository") == repo and pkg.get("name") == name: 

127 if "latest" in version: 

128 if not pkg.get("latest"): 

129 continue 

130 else: 

131 version = pkg.get("version") 

132 if pkg.get("version") == version: 

133 r = requests.get( 

134 "{}{}".format(pkg.get("repourl"), pkg.get("location")), 

135 stream=True, 

136 ) 

137 if r.status_code != 200: 

138 raise ClientException("Package not found") 

139 

140 with tempfile.NamedTemporaryFile(delete=False) as f: 

141 f.write(r.raw.read()) 

142 f_name = f.name 

143 if not f_name: 

144 raise ClientException( 

145 "{} {} not found at repo {}".format(pkgtype, name, repo) 

146 ) 

147 return f_name 

148 

149 def pkg_get(self, pkgtype, name, repo, version, filter): 

150 pkg_name = self.get_pkg(pkgtype, name, repo, filter, version) 

151 if not pkg_name: 

152 raise ClientException("Package not found") 

153 folder, descriptor = self.zip_extraction(pkg_name) 

154 with open(descriptor) as pkg: 

155 pkg_descriptor = yaml.safe_load(pkg) 

156 rmtree(folder, ignore_errors=False) 

157 if ( 

158 pkgtype == "vnf" 

159 and (pkg_descriptor.get("vnfd") or pkg_descriptor.get("vnfd:vnfd_catalog")) 

160 ) or ( 

161 pkgtype == "ns" 

162 and (pkg_descriptor.get("nsd") or pkg_descriptor.get("nsd:nsd_catalog")) 

163 ): 

164 raise ClientException("Wrong Package type") 

165 return pkg_descriptor 

166 

167 def repo_index(self, origin=".", destination="."): 

168 """ 

169 Repo Index main function 

170 :param origin: origin directory for getting all the artifacts 

171 :param destination: destination folder for create and index the valid artifacts 

172 """ 

173 self._logger.debug("Starting index composition") 

174 if destination == ".": 

175 if origin == destination: 

176 destination = "repository" 

177 

178 destination = abspath(destination) 

179 origin = abspath(origin) 

180 self._logger.debug(f"Paths {destination}, {origin}") 

181 if origin[0] != "/": 

182 origin = join(getcwd(), origin) 

183 if destination[0] != "/": 

184 destination = join(getcwd(), destination) 

185 

186 self.init_directory(destination) 

187 artifacts = [] 

188 directories = [] 

189 for f in listdir(origin): 

190 self._logger.debug(f"Element: {join(origin,f)}") 

191 if isfile(join(origin, f)) and f.endswith(".tar.gz"): 

192 artifacts.append(f) 

193 elif ( 

194 isdir(join(origin, f)) 

195 and f != destination.split("/")[-1] 

196 and not f.startswith(".") 

197 ): 

198 directories.append( 

199 f 

200 ) # TODO: Document that nested directories are not supported 

201 else: 

202 self._logger.debug(f"Ignoring {f}") 

203 self._logger.debug(f"Artifacts: {artifacts}") 

204 for package in artifacts: 

205 self.register_package_in_repository( 

206 join(origin, package), origin, destination, kind="artifact" 

207 ) 

208 self._logger.debug(f"Directories: {directories}") 

209 for package in directories: 

210 self.register_package_in_repository( 

211 join(origin, package), origin, destination, kind="directory" 

212 ) 

213 self._logger.info("\nFinal Results: ") 

214 self._logger.info( 

215 "VNF Packages Indexed: " 

216 + str(len(glob.glob(destination + "/vnf/*/*/metadata.yaml"))) 

217 ) 

218 self._logger.info( 

219 "NS Packages Indexed: " 

220 + str(len(glob.glob(destination + "/ns/*/*/metadata.yaml"))) 

221 ) 

222 

223 self._logger.info( 

224 "NST Packages Indexed: " 

225 + str(len(glob.glob(destination + "/nst/*/*/metadata.yaml"))) 

226 ) 

227 

228 def fields_building(self, descriptor_dict, file, package_type): 

229 """ 

230 From an artifact descriptor, obtain the fields required for indexing 

231 :param descriptor_dict: artifact description 

232 :param file: artifact package 

233 :param package_type: type of artifact (vnf, ns, nst) 

234 :return: fields 

235 """ 

236 self._logger.debug("") 

237 

238 fields = {} 

239 base_path = "/{}/".format(package_type) 

240 aux_dict = {} 

241 if package_type == "vnf": 

242 if descriptor_dict.get("vnfd-catalog", False): 

243 aux_dict = descriptor_dict.get("vnfd-catalog", {}).get("vnfd", [{}])[0] 

244 elif descriptor_dict.get("vnfd:vnfd-catalog"): 

245 aux_dict = descriptor_dict.get("vnfd:vnfd-catalog", {}).get( 

246 "vnfd", [{}] 

247 )[0] 

248 elif descriptor_dict.get("vnfd"): 

249 aux_dict = descriptor_dict["vnfd"] 

250 if aux_dict.get("vnfd"): 

251 aux_dict = aux_dict["vnfd"][0] 

252 else: 

253 msg = f"Unexpected descriptor format {descriptor_dict}" 

254 self._logger.error(msg) 

255 raise ValueError(msg) 

256 self._logger.debug( 

257 f"Extracted descriptor info for {package_type}: {aux_dict}" 

258 ) 

259 images = [] 

260 for vdu in aux_dict.get("vdu", aux_dict.get("kdu", ())): 

261 images.append(vdu.get("image", vdu.get("name"))) 

262 fields["images"] = images 

263 elif package_type == "ns": 

264 if descriptor_dict.get("nsd-catalog", False): 

265 aux_dict = descriptor_dict.get("nsd-catalog", {}).get("nsd", [{}])[0] 

266 elif descriptor_dict.get("nsd:nsd-catalog"): 

267 aux_dict = descriptor_dict.get("nsd:nsd-catalog", {}).get("nsd", [{}])[ 

268 0 

269 ] 

270 elif descriptor_dict.get("nsd"): 

271 aux_dict = descriptor_dict["nsd"] 

272 if aux_dict.get("nsd"): 

273 aux_dict = descriptor_dict["nsd"]["nsd"][0] 

274 else: 

275 msg = f"Unexpected descriptor format {descriptor_dict}" 

276 self._logger.error(msg) 

277 raise ValueError(msg) 

278 vnfs = [] 

279 if aux_dict.get("constituent-vnfd"): 

280 for vnf in aux_dict.get("constituent-vnfd", ()): 

281 vnfs.append(vnf.get("vnfd-id-ref")) 

282 else: 

283 vnfs = aux_dict.get("vnfd-id") 

284 self._logger.debug("Used VNFS in the NSD: " + str(vnfs)) 

285 fields["vnfd-id-ref"] = vnfs 

286 elif package_type == "nst": 

287 if descriptor_dict.get("nst-catalog", False): 

288 aux_dict = descriptor_dict.get("nst-catalog", {}).get("nst", [{}])[0] 

289 elif descriptor_dict.get("nst:nst-catalog"): 

290 aux_dict = descriptor_dict.get("nst:nst-catalog", {}).get("nst", [{}])[ 

291 0 

292 ] 

293 elif descriptor_dict.get("nst"): 

294 aux_dict = descriptor_dict["nst"] 

295 if aux_dict.get("nst"): 

296 aux_dict = descriptor_dict["nst"]["nst"][0] 

297 nsds = [] 

298 for nsd in aux_dict.get("netslice-subnet", ()): 

299 nsds.append(nsd.get("nsd-ref")) 

300 self._logger.debug("Used NSDs in the NST: " + str(nsds)) 

301 if not nsds: 

302 msg = f"Unexpected descriptor format {descriptor_dict}" 

303 self._logger.error(msg) 

304 raise ValueError(msg) 

305 fields["nsd-id-ref"] = nsds 

306 else: 

307 msg = f"Unexpected descriptor format {descriptor_dict}" 

308 self._logger.error(msg) 

309 raise ValueError(msg) 

310 # Repo search is based on 'name' entry in index.yaml. It is mandatory then 

311 fields["name"] = aux_dict.get("name", aux_dict["product-name"]) 

312 fields["id"] = aux_dict.get("id") 

313 fields["description"] = aux_dict.get("description") 

314 fields["vendor"] = aux_dict.get("vendor") 

315 fields["version"] = str(aux_dict.get("version", "1.0")) 

316 fields["path"] = "{}{}/{}/{}-{}.tar.gz".format( 

317 base_path, 

318 fields["id"], 

319 fields["version"], 

320 fields.get("id"), 

321 fields.get("version"), 

322 ) 

323 return fields 

324 

325 def zip_extraction(self, file_name): 

326 """ 

327 Validation of artifact. 

328 :param file: file path 

329 :return: status details, status, fields, package_type 

330 """ 

331 self._logger.debug("Decompressing package file") 

332 temp_file = "/tmp/{}".format(file_name.split("/")[-1]) 

333 if file_name != temp_file: 

334 copyfile(file_name, temp_file) 

335 with tarfile.open(temp_file, "r:gz") as tar: 

336 folder = tar.getnames()[0].split("/")[0] 

337 tar.extractall() 

338 

339 remove(temp_file) 

340 descriptor_file = glob.glob("{}/*.y*ml".format(folder))[0] 

341 return folder, descriptor_file 

342 

343 def validate_artifact(self, path, origin, kind): 

344 """ 

345 Validation of artifact. 

346 :param path: file path 

347 :param origin: folder where the package is located 

348 :param kind: flag to select the correct file type (directory or artifact) 

349 :return: status details, status, fields, package_type 

350 """ 

351 self._logger.debug(f"Validating {path} {kind}") 

352 package_type = "" 

353 folder = "" 

354 try: 

355 if kind == "directory": 

356 descriptor_file = glob.glob("{}/*.y*ml".format(path))[0] 

357 else: 

358 folder, descriptor_file = self.zip_extraction(path) 

359 folder = join(origin, folder) 

360 self._logger.debug( 

361 f"Kind is an artifact (tar.gz). Folder: {folder}. Descriptor_file: {descriptor_file}" 

362 ) 

363 

364 self._logger.debug("Opening descriptor file: {}".format(descriptor_file)) 

365 

366 with open(descriptor_file, "r") as f: 

367 descriptor_data = f.read() 

368 self._logger.debug(f"Descriptor data: {descriptor_data}") 

369 validation = validation_im() 

370 desc_type, descriptor_dict = validation.yaml_validation(descriptor_data) 

371 try: 

372 validation_im.pyangbind_validation(self, desc_type, descriptor_dict) 

373 except Exception as e: 

374 self._logger.error(e, exc_info=True) 

375 raise e 

376 descriptor_type_ref = list(descriptor_dict.keys())[0].lower() 

377 if "vnf" in descriptor_type_ref: 

378 package_type = "vnf" 

379 elif "nst" in descriptor_type_ref: 

380 package_type = "nst" 

381 elif "ns" in descriptor_type_ref: 

382 package_type = "ns" 

383 else: 

384 msg = f"Unknown package type {descriptor_type_ref}" 

385 self._logger.error(msg) 

386 raise ValueError(msg) 

387 self._logger.debug("Descriptor: {}".format(descriptor_dict)) 

388 fields = self.fields_building(descriptor_dict, path, package_type) 

389 self._logger.debug(f"Descriptor successfully validated {fields}") 

390 return ( 

391 { 

392 "detail": "{}D successfully validated".format(package_type.upper()), 

393 "code": "OK", 

394 }, 

395 True, 

396 fields, 

397 package_type, 

398 ) 

399 except Exception as e: 

400 # Delete the folder we just created 

401 return {"detail": str(e)}, False, {}, package_type 

402 finally: 

403 if folder: 

404 rmtree(folder, ignore_errors=True) 

405 

406 def register_package_in_repository(self, path, origin, destination, kind): 

407 """ 

408 Registration of one artifact in a repository 

409 :param path: absolute path of the VNF/NS package 

410 :param origin: folder where the package is located 

411 :param destination: path for index creation 

412 :param kind: artifact (tar.gz) or directory 

413 """ 

414 self._logger.debug("") 

415 pt = PackageTool() 

416 compressed = False 

417 try: 

418 fields = {} 

419 _, valid, fields, package_type = self.validate_artifact(path, origin, kind) 

420 if not valid: 

421 raise Exception( 

422 "{} {} Not well configured.".format(package_type.upper(), str(path)) 

423 ) 

424 else: 

425 if kind == "directory": 

426 path = pt.build(path) 

427 self._logger.debug(f"Directory path {path}") 

428 compressed = True 

429 fields["checksum"] = utils.md5(path) 

430 self.indexation(destination, path, package_type, fields) 

431 

432 except Exception as e: 

433 self._logger.exception( 

434 "Error registering package in Repository: {}".format(e) 

435 ) 

436 raise ClientException(e) 

437 

438 finally: 

439 if kind == "directory" and compressed: 

440 remove(path) 

441 

442 def indexation(self, destination, path, package_type, fields): 

443 """ 

444 Process for index packages 

445 :param destination: index repository path 

446 :param path: path of the package 

447 :param package_type: package type (vnf, ns, nst) 

448 :param fields: dict with the required values 

449 """ 

450 self._logger.debug(f"Processing {destination} {path} {package_type} {fields}") 

451 

452 data_ind = { 

453 "name": fields.get("name"), 

454 "description": fields.get("description"), 

455 "vendor": fields.get("vendor"), 

456 "path": fields.get("path"), 

457 } 

458 self._logger.debug(data_ind) 

459 final_path = join( 

460 destination, package_type, fields.get("id"), fields.get("version") 

461 ) 

462 if isdir(join(destination, package_type, fields.get("id"))): 

463 if isdir(final_path): 

464 self._logger.warning( 

465 "{} {} already exists".format(package_type.upper(), str(path)) 

466 ) 

467 else: 

468 mkdir(final_path) 

469 copyfile( 

470 path, 

471 final_path 

472 + "/" 

473 + fields.get("id") 

474 + "-" 

475 + fields.get("version") 

476 + ".tar.gz", 

477 ) 

478 yaml.safe_dump( 

479 fields, 

480 open(final_path + "/" + "metadata.yaml", "w"), 

481 default_flow_style=False, 

482 width=80, 

483 indent=4, 

484 ) 

485 index = yaml.safe_load(open(destination + "/index.yaml")) 

486 

487 index["{}_packages".format(package_type)][fields.get("id")][ 

488 fields.get("version") 

489 ] = data_ind 

490 if versioning.parse( 

491 index["{}_packages".format(package_type)][fields.get("id")][ 

492 "latest" 

493 ] 

494 ) < versioning.parse(fields.get("version")): 

495 index["{}_packages".format(package_type)][fields.get("id")][ 

496 "latest" 

497 ] = fields.get("version") 

498 yaml.safe_dump( 

499 index, 

500 open(destination + "/index.yaml", "w"), 

501 default_flow_style=False, 

502 width=80, 

503 indent=4, 

504 ) 

505 self._logger.info( 

506 "{} {} added in the repository".format( 

507 package_type.upper(), str(path) 

508 ) 

509 ) 

510 else: 

511 mkdir(destination + "/{}/".format(package_type) + fields.get("id")) 

512 mkdir(final_path) 

513 copyfile( 

514 path, 

515 final_path 

516 + "/" 

517 + fields.get("id") 

518 + "-" 

519 + fields.get("version") 

520 + ".tar.gz", 

521 ) 

522 yaml.safe_dump( 

523 fields, 

524 open(join(final_path, "metadata.yaml"), "w"), 

525 default_flow_style=False, 

526 width=80, 

527 indent=4, 

528 ) 

529 index = yaml.safe_load(open(destination + "/index.yaml")) 

530 

531 index["{}_packages".format(package_type)][fields.get("id")] = { 

532 fields.get("version"): data_ind 

533 } 

534 index["{}_packages".format(package_type)][fields.get("id")]["latest"] = ( 

535 fields.get("version") 

536 ) 

537 yaml.safe_dump( 

538 index, 

539 open(join(destination, "index.yaml"), "w"), 

540 default_flow_style=False, 

541 width=80, 

542 indent=4, 

543 ) 

544 self._logger.info( 

545 "{} {} added in the repository".format(package_type.upper(), str(path)) 

546 ) 

547 

548 def current_datetime(self): 

549 """ 

550 Datetime Generator 

551 :return: Datetime as string with the following structure "2020-04-29T08:41:07.681653Z" 

552 """ 

553 self._logger.debug("") 

554 return time.strftime("%Y-%m-%dT%H:%M:%S.%sZ") 

555 

556 def init_directory(self, destination): 

557 """ 

558 Initialize the index directory. Creation of index.yaml, and the directories for vnf and ns 

559 :param destination: 

560 :return: 

561 """ 

562 self._logger.debug("") 

563 if not isdir(destination): 

564 mkdir(destination) 

565 if not isfile(join(destination, "index.yaml")): 

566 mkdir(join(destination, "vnf")) 

567 mkdir(join(destination, "ns")) 

568 mkdir(join(destination, "nst")) 

569 index_data = { 

570 "apiVersion": "v1", 

571 "generated": self.current_datetime(), 

572 "vnf_packages": {}, 

573 "ns_packages": {}, 

574 "nst_packages": {}, 

575 } 

576 with open(join(destination, "index.yaml"), "w") as outfile: 

577 yaml.safe_dump( 

578 index_data, outfile, default_flow_style=False, width=80, indent=4 

579 )