1 |
|
# Copyright 2019 Canonical |
2 |
|
# |
3 |
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may |
4 |
|
# not use this file except in compliance with the License. You may obtain |
5 |
|
# a copy of the License at |
6 |
|
# |
7 |
|
# http://www.apache.org/licenses/LICENSE-2.0 |
8 |
|
# |
9 |
|
# Unless required by applicable law or agreed to in writing, software |
10 |
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
11 |
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
12 |
|
# License for the specific language governing permissions and limitations |
13 |
|
# under the License. |
14 |
|
# |
15 |
|
# For those usages not covered by the Apache License, Version 2.0 please |
16 |
|
# contact: eduardo.sousa@canonical.com |
17 |
|
## |
18 |
|
|
19 |
1 |
from io import BytesIO |
20 |
1 |
import logging |
21 |
1 |
import os |
22 |
1 |
from pathlib import Path |
23 |
1 |
import subprocess |
24 |
1 |
import tarfile |
25 |
1 |
import tempfile |
26 |
1 |
from unittest.mock import Mock |
27 |
|
|
28 |
1 |
from gridfs import GridFSBucket |
29 |
1 |
from osm_common.fsbase import FsException |
30 |
1 |
from osm_common.fsmongo import FsMongo |
31 |
1 |
from pymongo import MongoClient |
32 |
1 |
import pytest |
33 |
|
|
34 |
1 |
__author__ = "Eduardo Sousa <eduardo.sousa@canonical.com>" |
35 |
|
|
36 |
|
|
37 |
1 |
def valid_path(): |
38 |
1 |
return tempfile.gettempdir() + "/" |
39 |
|
|
40 |
|
|
41 |
1 |
def invalid_path(): |
42 |
1 |
return "/#tweeter/" |
43 |
|
|
44 |
|
|
45 |
1 |
@pytest.fixture(scope="function", params=[True, False]) |
46 |
1 |
def fs_mongo(request, monkeypatch): |
47 |
0 |
def mock_mongoclient_constructor(a, b): |
48 |
0 |
pass |
49 |
|
|
50 |
0 |
def mock_mongoclient_getitem(a, b): |
51 |
0 |
pass |
52 |
|
|
53 |
0 |
def mock_gridfs_constructor(a, b): |
54 |
0 |
pass |
55 |
|
|
56 |
0 |
monkeypatch.setattr(MongoClient, "__init__", mock_mongoclient_constructor) |
57 |
0 |
monkeypatch.setattr(MongoClient, "__getitem__", mock_mongoclient_getitem) |
58 |
0 |
monkeypatch.setattr(GridFSBucket, "__init__", mock_gridfs_constructor) |
59 |
0 |
fs = FsMongo(lock=request.param) |
60 |
0 |
fs.fs_connect({"path": valid_path(), "uri": "mongo:27017", "collection": "files"}) |
61 |
0 |
return fs |
62 |
|
|
63 |
|
|
64 |
1 |
def generic_fs_exception_message(message): |
65 |
1 |
return "storage exception {}".format(message) |
66 |
|
|
67 |
|
|
68 |
1 |
def fs_connect_exception_message(path): |
69 |
1 |
return "storage exception Invalid configuration param at '[storage]': path '{}' does not exist".format( |
70 |
|
path |
71 |
|
) |
72 |
|
|
73 |
|
|
74 |
1 |
def file_open_file_not_found_exception(storage): |
75 |
0 |
f = storage if isinstance(storage, str) else "/".join(storage) |
76 |
0 |
return "storage exception File {} does not exist".format(f) |
77 |
|
|
78 |
|
|
79 |
1 |
def file_open_io_exception(storage): |
80 |
0 |
f = storage if isinstance(storage, str) else "/".join(storage) |
81 |
0 |
return "storage exception File {} cannot be opened".format(f) |
82 |
|
|
83 |
|
|
84 |
1 |
def dir_ls_not_a_directory_exception(storage): |
85 |
0 |
f = storage if isinstance(storage, str) else "/".join(storage) |
86 |
0 |
return "storage exception File {} does not exist".format(f) |
87 |
|
|
88 |
|
|
89 |
1 |
def dir_ls_io_exception(storage): |
90 |
0 |
f = storage if isinstance(storage, str) else "/".join(storage) |
91 |
0 |
return "storage exception File {} cannot be opened".format(f) |
92 |
|
|
93 |
|
|
94 |
1 |
def file_delete_exception_message(storage): |
95 |
0 |
return "storage exception File {} does not exist".format(storage) |
96 |
|
|
97 |
|
|
98 |
1 |
def test_constructor_without_logger(): |
99 |
1 |
fs = FsMongo() |
100 |
1 |
assert fs.logger == logging.getLogger("fs") |
101 |
1 |
assert fs.path is None |
102 |
1 |
assert fs.client is None |
103 |
1 |
assert fs.fs is None |
104 |
|
|
105 |
|
|
106 |
1 |
def test_constructor_with_logger(): |
107 |
1 |
logger_name = "fs_mongo" |
108 |
1 |
fs = FsMongo(logger_name=logger_name) |
109 |
1 |
assert fs.logger == logging.getLogger(logger_name) |
110 |
1 |
assert fs.path is None |
111 |
1 |
assert fs.client is None |
112 |
1 |
assert fs.fs is None |
113 |
|
|
114 |
|
|
115 |
1 |
def test_get_params(fs_mongo, monkeypatch): |
116 |
0 |
def mock_gridfs_find(self, search_query, **kwargs): |
117 |
0 |
return [] |
118 |
|
|
119 |
0 |
monkeypatch.setattr(GridFSBucket, "find", mock_gridfs_find) |
120 |
0 |
params = fs_mongo.get_params() |
121 |
0 |
assert len(params) == 2 |
122 |
0 |
assert "fs" in params |
123 |
0 |
assert "path" in params |
124 |
0 |
assert params["fs"] == "mongo" |
125 |
0 |
assert params["path"] == valid_path() |
126 |
|
|
127 |
|
|
128 |
1 |
@pytest.mark.parametrize( |
129 |
|
"config, exp_logger, exp_path", |
130 |
|
[ |
131 |
|
( |
132 |
|
{ |
133 |
|
"logger_name": "fs_mongo", |
134 |
|
"path": valid_path(), |
135 |
|
"uri": "mongo:27017", |
136 |
|
"collection": "files", |
137 |
|
}, |
138 |
|
"fs_mongo", |
139 |
|
valid_path(), |
140 |
|
), |
141 |
|
( |
142 |
|
{ |
143 |
|
"logger_name": "fs_mongo", |
144 |
|
"path": valid_path()[:-1], |
145 |
|
"uri": "mongo:27017", |
146 |
|
"collection": "files", |
147 |
|
}, |
148 |
|
"fs_mongo", |
149 |
|
valid_path(), |
150 |
|
), |
151 |
|
( |
152 |
|
{"path": valid_path(), "uri": "mongo:27017", "collection": "files"}, |
153 |
|
"fs", |
154 |
|
valid_path(), |
155 |
|
), |
156 |
|
( |
157 |
|
{"path": valid_path()[:-1], "uri": "mongo:27017", "collection": "files"}, |
158 |
|
"fs", |
159 |
|
valid_path(), |
160 |
|
), |
161 |
|
], |
162 |
|
) |
163 |
1 |
def test_fs_connect_with_valid_config(config, exp_logger, exp_path): |
164 |
0 |
fs = FsMongo() |
165 |
0 |
fs.fs_connect(config) |
166 |
0 |
assert fs.logger == logging.getLogger(exp_logger) |
167 |
0 |
assert fs.path == exp_path |
168 |
0 |
assert type(fs.client) == MongoClient |
169 |
0 |
assert type(fs.fs) == GridFSBucket |
170 |
|
|
171 |
|
|
172 |
1 |
@pytest.mark.parametrize( |
173 |
|
"config, exp_exception_message", |
174 |
|
[ |
175 |
|
( |
176 |
|
{ |
177 |
|
"logger_name": "fs_mongo", |
178 |
|
"path": invalid_path(), |
179 |
|
"uri": "mongo:27017", |
180 |
|
"collection": "files", |
181 |
|
}, |
182 |
|
fs_connect_exception_message(invalid_path()), |
183 |
|
), |
184 |
|
( |
185 |
|
{ |
186 |
|
"logger_name": "fs_mongo", |
187 |
|
"path": invalid_path()[:-1], |
188 |
|
"uri": "mongo:27017", |
189 |
|
"collection": "files", |
190 |
|
}, |
191 |
|
fs_connect_exception_message(invalid_path()[:-1]), |
192 |
|
), |
193 |
|
( |
194 |
|
{"path": invalid_path(), "uri": "mongo:27017", "collection": "files"}, |
195 |
|
fs_connect_exception_message(invalid_path()), |
196 |
|
), |
197 |
|
( |
198 |
|
{"path": invalid_path()[:-1], "uri": "mongo:27017", "collection": "files"}, |
199 |
|
fs_connect_exception_message(invalid_path()[:-1]), |
200 |
|
), |
201 |
|
( |
202 |
|
{"path": "/", "uri": "mongo:27017", "collection": "files"}, |
203 |
|
generic_fs_exception_message( |
204 |
|
"Invalid configuration param at '[storage]': path '/' is not writable" |
205 |
|
), |
206 |
|
), |
207 |
|
], |
208 |
|
) |
209 |
1 |
def test_fs_connect_with_invalid_path(config, exp_exception_message): |
210 |
0 |
fs = FsMongo() |
211 |
0 |
with pytest.raises(FsException) as excinfo: |
212 |
0 |
fs.fs_connect(config) |
213 |
0 |
assert str(excinfo.value) == exp_exception_message |
214 |
|
|
215 |
|
|
216 |
1 |
@pytest.mark.parametrize( |
217 |
|
"config, exp_exception_message", |
218 |
|
[ |
219 |
|
( |
220 |
|
{"logger_name": "fs_mongo", "uri": "mongo:27017", "collection": "files"}, |
221 |
|
'Missing parameter "path"', |
222 |
|
), |
223 |
|
( |
224 |
|
{"logger_name": "fs_mongo", "path": valid_path(), "collection": "files"}, |
225 |
|
'Missing parameters: "uri"', |
226 |
|
), |
227 |
|
( |
228 |
|
{"logger_name": "fs_mongo", "path": valid_path(), "uri": "mongo:27017"}, |
229 |
|
'Missing parameter "collection"', |
230 |
|
), |
231 |
|
], |
232 |
|
) |
233 |
1 |
def test_fs_connect_with_missing_parameters(config, exp_exception_message): |
234 |
0 |
fs = FsMongo() |
235 |
0 |
with pytest.raises(FsException) as excinfo: |
236 |
0 |
fs.fs_connect(config) |
237 |
0 |
assert str(excinfo.value) == generic_fs_exception_message(exp_exception_message) |
238 |
|
|
239 |
|
|
240 |
1 |
@pytest.mark.parametrize( |
241 |
|
"config, exp_exception_message", |
242 |
|
[ |
243 |
|
( |
244 |
|
{ |
245 |
|
"logger_name": "fs_mongo", |
246 |
|
"path": valid_path(), |
247 |
|
"uri": "mongo:27017", |
248 |
|
"collection": "files", |
249 |
|
}, |
250 |
|
"MongoClient crashed", |
251 |
|
), |
252 |
|
], |
253 |
|
) |
254 |
1 |
def test_fs_connect_with_invalid_mongoclient( |
255 |
|
config, exp_exception_message, monkeypatch |
256 |
|
): |
257 |
0 |
def generate_exception(a, b=None): |
258 |
0 |
raise Exception(exp_exception_message) |
259 |
|
|
260 |
0 |
monkeypatch.setattr(MongoClient, "__init__", generate_exception) |
261 |
|
|
262 |
0 |
fs = FsMongo() |
263 |
0 |
with pytest.raises(FsException) as excinfo: |
264 |
0 |
fs.fs_connect(config) |
265 |
0 |
assert str(excinfo.value) == generic_fs_exception_message(exp_exception_message) |
266 |
|
|
267 |
|
|
268 |
1 |
@pytest.mark.parametrize( |
269 |
|
"config, exp_exception_message", |
270 |
|
[ |
271 |
|
( |
272 |
|
{ |
273 |
|
"logger_name": "fs_mongo", |
274 |
|
"path": valid_path(), |
275 |
|
"uri": "mongo:27017", |
276 |
|
"collection": "files", |
277 |
|
}, |
278 |
|
"Collection unavailable", |
279 |
|
), |
280 |
|
], |
281 |
|
) |
282 |
1 |
def test_fs_connect_with_invalid_mongo_collection( |
283 |
|
config, exp_exception_message, monkeypatch |
284 |
|
): |
285 |
0 |
def mock_mongoclient_constructor(a, b=None): |
286 |
0 |
pass |
287 |
|
|
288 |
0 |
def generate_exception(a, b): |
289 |
0 |
raise Exception(exp_exception_message) |
290 |
|
|
291 |
0 |
monkeypatch.setattr(MongoClient, "__init__", mock_mongoclient_constructor) |
292 |
0 |
monkeypatch.setattr(MongoClient, "__getitem__", generate_exception) |
293 |
|
|
294 |
0 |
fs = FsMongo() |
295 |
0 |
with pytest.raises(FsException) as excinfo: |
296 |
0 |
fs.fs_connect(config) |
297 |
0 |
assert str(excinfo.value) == generic_fs_exception_message(exp_exception_message) |
298 |
|
|
299 |
|
|
300 |
1 |
@pytest.mark.parametrize( |
301 |
|
"config, exp_exception_message", |
302 |
|
[ |
303 |
|
( |
304 |
|
{ |
305 |
|
"logger_name": "fs_mongo", |
306 |
|
"path": valid_path(), |
307 |
|
"uri": "mongo:27017", |
308 |
|
"collection": "files", |
309 |
|
}, |
310 |
|
"GridFsBucket crashed", |
311 |
|
), |
312 |
|
], |
313 |
|
) |
314 |
1 |
def test_fs_connect_with_invalid_gridfsbucket( |
315 |
|
config, exp_exception_message, monkeypatch |
316 |
|
): |
317 |
0 |
def mock_mongoclient_constructor(a, b=None): |
318 |
0 |
pass |
319 |
|
|
320 |
0 |
def mock_mongoclient_getitem(a, b): |
321 |
0 |
pass |
322 |
|
|
323 |
0 |
def generate_exception(a, b): |
324 |
0 |
raise Exception(exp_exception_message) |
325 |
|
|
326 |
0 |
monkeypatch.setattr(MongoClient, "__init__", mock_mongoclient_constructor) |
327 |
0 |
monkeypatch.setattr(MongoClient, "__getitem__", mock_mongoclient_getitem) |
328 |
0 |
monkeypatch.setattr(GridFSBucket, "__init__", generate_exception) |
329 |
|
|
330 |
0 |
fs = FsMongo() |
331 |
0 |
with pytest.raises(FsException) as excinfo: |
332 |
0 |
fs.fs_connect(config) |
333 |
0 |
assert str(excinfo.value) == generic_fs_exception_message(exp_exception_message) |
334 |
|
|
335 |
|
|
336 |
1 |
def test_fs_disconnect(fs_mongo): |
337 |
0 |
fs_mongo.fs_disconnect() |
338 |
|
|
339 |
|
|
340 |
|
# Example.tar.gz |
341 |
|
# example_tar/ |
342 |
|
# ├── directory |
343 |
|
# │ └── file |
344 |
|
# └── symlinks |
345 |
|
# ├── directory_link -> ../directory/ |
346 |
|
# └── file_link -> ../directory/file |
347 |
1 |
class FakeCursor: |
348 |
1 |
def __init__(self, id, filename, metadata): |
349 |
1 |
self._id = id |
350 |
1 |
self.filename = filename |
351 |
1 |
self.metadata = metadata |
352 |
|
|
353 |
|
|
354 |
1 |
class FakeFS: |
355 |
1 |
directory_metadata = {"type": "dir", "permissions": 509} |
356 |
1 |
file_metadata = {"type": "file", "permissions": 436} |
357 |
1 |
symlink_metadata = {"type": "sym", "permissions": 511} |
358 |
|
|
359 |
1 |
tar_info = { |
360 |
|
1: { |
361 |
|
"cursor": FakeCursor(1, "example_tar", directory_metadata), |
362 |
|
"metadata": directory_metadata, |
363 |
|
"stream_content": b"", |
364 |
|
"stream_content_bad": b"Something", |
365 |
|
"path": "./tmp/example_tar", |
366 |
|
}, |
367 |
|
2: { |
368 |
|
"cursor": FakeCursor(2, "example_tar/directory", directory_metadata), |
369 |
|
"metadata": directory_metadata, |
370 |
|
"stream_content": b"", |
371 |
|
"stream_content_bad": b"Something", |
372 |
|
"path": "./tmp/example_tar/directory", |
373 |
|
}, |
374 |
|
3: { |
375 |
|
"cursor": FakeCursor(3, "example_tar/symlinks", directory_metadata), |
376 |
|
"metadata": directory_metadata, |
377 |
|
"stream_content": b"", |
378 |
|
"stream_content_bad": b"Something", |
379 |
|
"path": "./tmp/example_tar/symlinks", |
380 |
|
}, |
381 |
|
4: { |
382 |
|
"cursor": FakeCursor(4, "example_tar/directory/file", file_metadata), |
383 |
|
"metadata": file_metadata, |
384 |
|
"stream_content": b"Example test", |
385 |
|
"stream_content_bad": b"Example test2", |
386 |
|
"path": "./tmp/example_tar/directory/file", |
387 |
|
}, |
388 |
|
5: { |
389 |
|
"cursor": FakeCursor(5, "example_tar/symlinks/file_link", symlink_metadata), |
390 |
|
"metadata": symlink_metadata, |
391 |
|
"stream_content": b"../directory/file", |
392 |
|
"stream_content_bad": b"", |
393 |
|
"path": "./tmp/example_tar/symlinks/file_link", |
394 |
|
}, |
395 |
|
6: { |
396 |
|
"cursor": FakeCursor( |
397 |
|
6, "example_tar/symlinks/directory_link", symlink_metadata |
398 |
|
), |
399 |
|
"metadata": symlink_metadata, |
400 |
|
"stream_content": b"../directory/", |
401 |
|
"stream_content_bad": b"", |
402 |
|
"path": "./tmp/example_tar/symlinks/directory_link", |
403 |
|
}, |
404 |
|
} |
405 |
|
|
406 |
1 |
def upload_from_stream(self, f, stream, metadata=None): |
407 |
1 |
found = False |
408 |
1 |
for i, v in self.tar_info.items(): |
409 |
1 |
if f == v["path"]: |
410 |
1 |
assert metadata["type"] == v["metadata"]["type"] |
411 |
1 |
assert stream.read() == BytesIO(v["stream_content"]).read() |
412 |
1 |
stream.seek(0) |
413 |
1 |
assert stream.read() != BytesIO(v["stream_content_bad"]).read() |
414 |
1 |
found = True |
415 |
1 |
continue |
416 |
1 |
assert found |
417 |
|
|
418 |
1 |
def find(self, type, no_cursor_timeout=True, sort=None): |
419 |
1 |
list = [] |
420 |
1 |
for i, v in self.tar_info.items(): |
421 |
1 |
if type["metadata.type"] == "dir": |
422 |
1 |
if v["metadata"] == self.directory_metadata: |
423 |
1 |
list.append(v["cursor"]) |
424 |
|
else: |
425 |
1 |
if v["metadata"] != self.directory_metadata: |
426 |
1 |
list.append(v["cursor"]) |
427 |
1 |
return list |
428 |
|
|
429 |
1 |
def download_to_stream(self, id, file_stream): |
430 |
1 |
file_stream.write(BytesIO(self.tar_info[id]["stream_content"]).read()) |
431 |
|
|
432 |
|
|
433 |
1 |
def test_file_extract(): |
434 |
1 |
tar_path = "tmp/Example.tar.gz" |
435 |
1 |
folder_path = "tmp/example_tar" |
436 |
|
|
437 |
|
# Generate package |
438 |
1 |
subprocess.call(["rm", "-rf", "./tmp"]) |
439 |
1 |
subprocess.call(["mkdir", "-p", "{}/directory".format(folder_path)]) |
440 |
1 |
subprocess.call(["mkdir", "-p", "{}/symlinks".format(folder_path)]) |
441 |
1 |
p = Path("{}/directory/file".format(folder_path)) |
442 |
1 |
p.write_text("Example test") |
443 |
1 |
os.symlink("../directory/file", "{}/symlinks/file_link".format(folder_path)) |
444 |
1 |
os.symlink("../directory/", "{}/symlinks/directory_link".format(folder_path)) |
445 |
1 |
if os.path.exists(tar_path): |
446 |
0 |
os.remove(tar_path) |
447 |
1 |
subprocess.call(["tar", "-czvf", tar_path, folder_path]) |
448 |
|
|
449 |
1 |
try: |
450 |
1 |
tar = tarfile.open(tar_path, "r") |
451 |
1 |
fs = FsMongo() |
452 |
1 |
fs.fs = FakeFS() |
453 |
1 |
fs.file_extract(compressed_object=tar, path=".") |
454 |
|
finally: |
455 |
1 |
os.remove(tar_path) |
456 |
1 |
subprocess.call(["rm", "-rf", "./tmp"]) |
457 |
|
|
458 |
|
|
459 |
1 |
def test_upload_local_fs(): |
460 |
1 |
path = "./tmp/" |
461 |
|
|
462 |
1 |
subprocess.call(["rm", "-rf", path]) |
463 |
1 |
try: |
464 |
1 |
fs = FsMongo() |
465 |
1 |
fs.path = path |
466 |
1 |
fs.fs = FakeFS() |
467 |
1 |
fs.sync() |
468 |
1 |
assert os.path.isdir("{}example_tar".format(path)) |
469 |
1 |
assert os.path.isdir("{}example_tar/directory".format(path)) |
470 |
1 |
assert os.path.isdir("{}example_tar/symlinks".format(path)) |
471 |
1 |
assert os.path.isfile("{}example_tar/directory/file".format(path)) |
472 |
1 |
assert os.path.islink("{}example_tar/symlinks/file_link".format(path)) |
473 |
1 |
assert os.path.islink("{}example_tar/symlinks/directory_link".format(path)) |
474 |
|
finally: |
475 |
1 |
subprocess.call(["rm", "-rf", path]) |
476 |
|
|
477 |
|
|
478 |
1 |
def test_upload_mongo_fs(): |
479 |
1 |
path = "./tmp/" |
480 |
|
|
481 |
1 |
subprocess.call(["rm", "-rf", path]) |
482 |
1 |
try: |
483 |
1 |
fs = FsMongo() |
484 |
1 |
fs.path = path |
485 |
1 |
fs.fs = Mock() |
486 |
1 |
fs.fs.find.return_value = {} |
487 |
|
|
488 |
1 |
file_content = "Test file content" |
489 |
|
|
490 |
|
# Create local dir and upload content to fakefs |
491 |
1 |
os.mkdir(path) |
492 |
1 |
os.mkdir("{}example_local".format(path)) |
493 |
1 |
os.mkdir("{}example_local/directory".format(path)) |
494 |
1 |
with open( |
495 |
|
"{}example_local/directory/test_file".format(path), "w+" |
496 |
|
) as test_file: |
497 |
1 |
test_file.write(file_content) |
498 |
1 |
fs.reverse_sync("example_local") |
499 |
|
|
500 |
1 |
assert fs.fs.upload_from_stream.call_count == 2 |
501 |
|
|
502 |
|
# first call to upload_from_stream, dir_name |
503 |
1 |
dir_name = "example_local/directory" |
504 |
1 |
call_args_0 = fs.fs.upload_from_stream.call_args_list[0] |
505 |
1 |
assert call_args_0[0][0] == dir_name |
506 |
1 |
assert call_args_0[1].get("metadata").get("type") == "dir" |
507 |
|
|
508 |
|
# second call to upload_from_stream, dir_name |
509 |
1 |
file_name = "example_local/directory/test_file" |
510 |
1 |
call_args_1 = fs.fs.upload_from_stream.call_args_list[1] |
511 |
1 |
assert call_args_1[0][0] == file_name |
512 |
1 |
assert call_args_1[1].get("metadata").get("type") == "file" |
513 |
|
|
514 |
|
finally: |
515 |
1 |
subprocess.call(["rm", "-rf", path]) |
516 |
1 |
pass |