RUN python3 -m easy_install pip==21.0.1
RUN pip3 install tox==3.22.0
+
+ENV LC_ALL C.UTF-8
+ENV LANG C.UTF-8
# See the License for the specific language governing permissions and
# limitations under the License.
-version = '7.0.0.post4'
-date_version = '2019-01-21'
+version = "7.0.0.post4"
+date_version = "2019-01-21"
# try to get version from installed package. Skip if fails
try:
from pkg_resources import get_distribution
+
version = get_distribution("osm_common").version
except Exception:
pass
class FakeLock:
"""Implements a fake lock that can be called with the "with" statement or acquire, release methods"""
+
def __enter__(self):
pass
class DbException(Exception):
-
def __init__(self, message, http_code=HTTPStatus.NOT_FOUND):
self.http_code = http_code
Exception.__init__(self, "database exception " + str(message))
class DbBase(object):
-
- def __init__(self, logger_name='db', lock=False):
+ def __init__(self, logger_name="db", lock=False):
"""
Constructor of dbBase
:param logger_name: logging name
"""
raise DbException("Method 'create_list' not implemented")
- def set_one(self, table, q_filter, update_dict, fail_on_empty=True, unset=None, pull=None, push=None,
- push_list=None, pull_list=None):
+ def set_one(
+ self,
+ table,
+ q_filter,
+ update_dict,
+ fail_on_empty=True,
+ unset=None,
+ pull=None,
+ push=None,
+ push_list=None,
+ pull_list=None,
+ ):
"""
Modifies an entry at database
:param table: collection or table
"""
raise DbException("Method 'set_one' not implemented")
- def set_list(self, table, q_filter, update_dict, unset=None, pull=None, push=None, push_list=None, pull_list=None):
+ def set_list(
+ self,
+ table,
+ q_filter,
+ update_dict,
+ unset=None,
+ pull=None,
+ push=None,
+ push_list=None,
+ pull_list=None,
+ ):
"""
Modifies al matching entries at database
:param table: collection or table
else:
update_key_bytes = update_key
- new_secret_key = bytearray(self.secret_key) if self.secret_key else bytearray(32)
+ new_secret_key = (
+ bytearray(self.secret_key) if self.secret_key else bytearray(32)
+ )
for i, b in enumerate(update_key_bytes):
new_secret_key[i % 32] ^= b
return bytes(new_secret_key)
:return: Encrypted content of value
"""
self.get_secret_key()
- if not self.secret_key or not schema_version or schema_version == '1.0':
+ if not self.secret_key or not schema_version or schema_version == "1.0":
return value
else:
secret_key = self._join_secret_key(salt)
cipher = AES.new(secret_key)
- padded_private_msg = value + ('\0' * ((16-len(value)) % 16))
+ padded_private_msg = value + ("\0" * ((16 - len(value)) % 16))
encrypted_msg = cipher.encrypt(padded_private_msg)
encoded_encrypted_msg = b64encode(encrypted_msg)
return encoded_encrypted_msg.decode("ascii")
:return: Plain content of value
"""
self.get_secret_key()
- if not self.secret_key or not schema_version or schema_version == '1.0':
+ if not self.secret_key or not schema_version or schema_version == "1.0":
return value
else:
secret_key = self._join_secret_key(salt)
cipher = AES.new(secret_key)
decrypted_msg = cipher.decrypt(encrypted_msg)
try:
- unpadded_private_msg = decrypted_msg.decode().rstrip('\0')
+ unpadded_private_msg = decrypted_msg.decode().rstrip("\0")
except UnicodeDecodeError:
- raise DbException("Cannot decrypt information. Are you using same COMMONKEY in all OSM components?",
- http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise DbException(
+ "Cannot decrypt information. Are you using same COMMONKEY in all OSM components?",
+ http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
return unpadded_private_msg
- def encrypt_decrypt_fields(self, item, action, fields=None, flags=None, schema_version=None, salt=None):
+ def encrypt_decrypt_fields(
+ self, item, action, fields=None, flags=None, schema_version=None, salt=None
+ ):
if not fields:
return
self.get_secret_key()
- actions = ['encrypt', 'decrypt']
+ actions = ["encrypt", "decrypt"]
if action.lower() not in actions:
- raise DbException("Unknown action ({}): Must be one of {}".format(action, actions),
- http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
- method = self.encrypt if action.lower() == 'encrypt' else self.decrypt
+ raise DbException(
+ "Unknown action ({}): Must be one of {}".format(action, actions),
+ http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
+ method = self.encrypt if action.lower() == "encrypt" else self.decrypt
if flags is None:
flags = re.I
_item[key] = method(val, schema_version, salt)
else:
process(val)
+
process(item)
:param key_list: This is used internally for recursive calls. Do not fill this parameter.
:return: none or raises and exception only at array modification when there is a bad format or conflict.
"""
+
def _deep_update_array(array_to_change, _dict_reference, _key_list):
to_append = {}
to_insert_at_index = {}
_key_list[-1] = str(k)
if not isinstance(k, str) or not k.startswith("$"):
if array_edition is True:
- raise DbException("Found array edition (keys starting with '$') and pure dictionary edition in the"
- " same dict at '{}'".format(":".join(_key_list[:-1])))
+ raise DbException(
+ "Found array edition (keys starting with '$') and pure dictionary edition in the"
+ " same dict at '{}'".format(":".join(_key_list[:-1]))
+ )
array_edition = False
continue
else:
if array_edition is False:
- raise DbException("Found array edition (keys starting with '$') and pure dictionary edition in the"
- " same dict at '{}'".format(":".join(_key_list[:-1])))
+ raise DbException(
+ "Found array edition (keys starting with '$') and pure dictionary edition in the"
+ " same dict at '{}'".format(":".join(_key_list[:-1]))
+ )
array_edition = True
insert = False
indexes = [] # indexes to edit or insert
kitem = k[1:]
- if kitem.startswith('+'):
+ if kitem.startswith("+"):
insert = True
kitem = kitem[1:]
if _dict_reference[k] is None:
- raise DbException("A value of None has not sense for insertions at '{}'".format(
- ":".join(_key_list)))
+ raise DbException(
+ "A value of None has not sense for insertions at '{}'".format(
+ ":".join(_key_list)
+ )
+ )
- if kitem.startswith('[') and kitem.endswith(']'):
+ if kitem.startswith("[") and kitem.endswith("]"):
try:
index = int(kitem[1:-1])
if index < 0:
index = 0 # skip outside index edition
indexes.append(index)
except Exception:
- raise DbException("Wrong format at '{}'. Expecting integer index inside quotes".format(
- ":".join(_key_list)))
+ raise DbException(
+ "Wrong format at '{}'. Expecting integer index inside quotes".format(
+ ":".join(_key_list)
+ )
+ )
elif kitem:
# match_found_skip = False
try:
filter_in = yaml.safe_load(kitem)
except Exception:
- raise DbException("Wrong format at '{}'. Expecting '$<yaml-format>'".format(":".join(_key_list)))
+ raise DbException(
+ "Wrong format at '{}'. Expecting '$<yaml-format>'".format(
+ ":".join(_key_list)
+ )
+ )
if isinstance(filter_in, dict):
for index, item in enumerate(array_to_change):
for filter_k, filter_v in filter_in.items():
- if not isinstance(item, dict) or filter_k not in item or item[filter_k] != filter_v:
+ if (
+ not isinstance(item, dict)
+ or filter_k not in item
+ or item[filter_k] != filter_v
+ ):
break
else: # match found
if insert:
# if match_found_skip:
# continue
elif not insert:
- raise DbException("Wrong format at '{}'. Expecting '$+', '$[<index]' or '$[<filter>]'".format(
- ":".join(_key_list)))
+ raise DbException(
+ "Wrong format at '{}'. Expecting '$+', '$[<index]' or '$[<filter>]'".format(
+ ":".join(_key_list)
+ )
+ )
for index in indexes:
if insert:
- if index in to_insert_at_index and to_insert_at_index[index] != _dict_reference[k]:
+ if (
+ index in to_insert_at_index
+ and to_insert_at_index[index] != _dict_reference[k]
+ ):
# Several different insertions on the same item of the array
- raise DbException("Conflict at '{}'. Several insertions on same array index {}".format(
- ":".join(_key_list), index))
+ raise DbException(
+ "Conflict at '{}'. Several insertions on same array index {}".format(
+ ":".join(_key_list), index
+ )
+ )
to_insert_at_index[index] = _dict_reference[k]
else:
- if index in indexes_to_edit_delete and values_to_edit_delete[index] != _dict_reference[k]:
+ if (
+ index in indexes_to_edit_delete
+ and values_to_edit_delete[index] != _dict_reference[k]
+ ):
# Several different editions on the same item of the array
- raise DbException("Conflict at '{}'. Several editions on array index {}".format(
- ":".join(_key_list), index))
+ raise DbException(
+ "Conflict at '{}'. Several editions on array index {}".format(
+ ":".join(_key_list), index
+ )
+ )
indexes_to_edit_delete.append(index)
values_to_edit_delete[index] = _dict_reference[k]
if not indexes:
try:
if values_to_edit_delete[index] is None: # None->Anything
try:
- del (array_to_change[index])
+ del array_to_change[index]
except IndexError:
pass # it is not consider an error if this index does not exist
- elif not isinstance(values_to_edit_delete[index], dict): # NotDict->Anything
+ elif not isinstance(
+ values_to_edit_delete[index], dict
+ ): # NotDict->Anything
array_to_change[index] = deepcopy(values_to_edit_delete[index])
elif isinstance(array_to_change[index], dict): # Dict->Dict
- deep_update_rfc7396(array_to_change[index], values_to_edit_delete[index], _key_list)
+ deep_update_rfc7396(
+ array_to_change[index], values_to_edit_delete[index], _key_list
+ )
else: # Dict->NotDict
- if isinstance(array_to_change[index], list): # Dict->List. Check extra array edition
- if _deep_update_array(array_to_change[index], values_to_edit_delete[index], _key_list):
+ if isinstance(
+ array_to_change[index], list
+ ): # Dict->List. Check extra array edition
+ if _deep_update_array(
+ array_to_change[index],
+ values_to_edit_delete[index],
+ _key_list,
+ ):
continue
array_to_change[index] = deepcopy(values_to_edit_delete[index])
# calling deep_update_rfc7396 to delete the None values
- deep_update_rfc7396(array_to_change[index], values_to_edit_delete[index], _key_list)
+ deep_update_rfc7396(
+ array_to_change[index], values_to_edit_delete[index], _key_list
+ )
except IndexError:
- raise DbException("Array edition index out of range at '{}'".format(":".join(_key_list)))
+ raise DbException(
+ "Array edition index out of range at '{}'".format(
+ ":".join(_key_list)
+ )
+ )
# insertion with indexes
to_insert_indexes = list(to_insert_at_index.keys())
key_list.append("")
for k in dict_reference:
key_list[-1] = str(k)
- if dict_reference[k] is None: # None->Anything
+ if dict_reference[k] is None: # None->Anything
if k in dict_to_change:
del dict_to_change[k]
elif not isinstance(dict_reference[k], dict): # NotDict->Anything
deep_update_rfc7396(dict_to_change[k], dict_reference[k], key_list)
elif isinstance(dict_to_change[k], dict): # Dict->Dict
deep_update_rfc7396(dict_to_change[k], dict_reference[k], key_list)
- else: # Dict->NotDict
- if isinstance(dict_to_change[k], list): # Dict->List. Check extra array edition
+ else: # Dict->NotDict
+ if isinstance(
+ dict_to_change[k], list
+ ): # Dict->List. Check extra array edition
if _deep_update_array(dict_to_change[k], dict_reference[k], key_list):
continue
dict_to_change[k] = deepcopy(dict_reference[k])
def deep_update(dict_to_change, dict_reference):
- """ Maintained for backward compatibility. Use deep_update_rfc7396 instead"""
+ """Maintained for backward compatibility. Use deep_update_rfc7396 instead"""
return deep_update_rfc7396(dict_to_change, dict_reference)
class DbMemory(DbBase):
-
- def __init__(self, logger_name='db', lock=False):
+ def __init__(self, logger_name="db", lock=False):
super().__init__(logger_name, lock)
self.db = {}
return db_filter
def _find(self, table, q_filter):
-
def recursive_find(key_list, key_next_index, content, oper, target):
if key_next_index == len(key_list) or content is None:
try:
if oper in ("eq", "cont"):
if isinstance(target, list):
if isinstance(content, list):
- return any(content_item in target for content_item in content)
+ return any(
+ content_item in target for content_item in content
+ )
return content in target
elif isinstance(content, list):
return target in content
elif oper in ("neq", "ne", "ncont"):
if isinstance(target, list):
if isinstance(content, list):
- return all(content_item not in target for content_item in content)
+ return all(
+ content_item not in target
+ for content_item in content
+ )
return content not in target
elif isinstance(content, list):
return target not in content
elif oper == "lte":
return content <= target
else:
- raise DbException("Unknown filter operator '{}' in key '{}'".
- format(oper, ".".join(key_list)), http_code=HTTPStatus.BAD_REQUEST)
+ raise DbException(
+ "Unknown filter operator '{}' in key '{}'".format(
+ oper, ".".join(key_list)
+ ),
+ http_code=HTTPStatus.BAD_REQUEST,
+ )
except TypeError:
return False
elif isinstance(content, dict):
- return recursive_find(key_list, key_next_index + 1, content.get(key_list[key_next_index]), oper,
- target)
+ return recursive_find(
+ key_list,
+ key_next_index + 1,
+ content.get(key_list[key_next_index]),
+ oper,
+ target,
+ )
elif isinstance(content, list):
look_for_match = True # when there is a match return immediately
- if (target is None) != (oper in ("neq", "ne", "ncont")): # one True and other False (Xor)
- look_for_match = False # when there is not a match return immediately
+ if (target is None) != (
+ oper in ("neq", "ne", "ncont")
+ ): # one True and other False (Xor)
+ look_for_match = (
+ False # when there is not a match return immediately
+ )
for content_item in content:
if key_list[key_next_index] == "ANYINDEX" and isinstance(v, dict):
for k2, v2 in target.items():
k_new_list = k2.split(".")
new_operator = "eq"
- if k_new_list[-1] in ("eq", "ne", "gt", "gte", "lt", "lte", "cont", "ncont", "neq"):
+ if k_new_list[-1] in (
+ "eq",
+ "ne",
+ "gt",
+ "gte",
+ "lt",
+ "lte",
+ "cont",
+ "ncont",
+ "neq",
+ ):
new_operator = k_new_list.pop()
- if not recursive_find(k_new_list, 0, content_item, new_operator, v2):
+ if not recursive_find(
+ k_new_list, 0, content_item, new_operator, v2
+ ):
matches = False
break
else:
- matches = recursive_find(key_list, key_next_index, content_item, oper, target)
+ matches = recursive_find(
+ key_list, key_next_index, content_item, oper, target
+ )
if matches == look_for_match:
return matches
- if key_list[key_next_index].isdecimal() and int(key_list[key_next_index]) < len(content):
- matches = recursive_find(key_list, key_next_index + 1, content[int(key_list[key_next_index])],
- oper, target)
+ if key_list[key_next_index].isdecimal() and int(
+ key_list[key_next_index]
+ ) < len(content):
+ matches = recursive_find(
+ key_list,
+ key_next_index + 1,
+ content[int(key_list[key_next_index])],
+ oper,
+ target,
+ )
if matches == look_for_match:
return matches
return not look_for_match
for k, v in q_filter.items():
k_list = k.split(".")
operator = "eq"
- if k_list[-1] in ("eq", "ne", "gt", "gte", "lt", "lte", "cont", "ncont", "neq"):
+ if k_list[-1] in (
+ "eq",
+ "ne",
+ "gt",
+ "gte",
+ "lt",
+ "lte",
+ "cont",
+ "ncont",
+ "neq",
+ ):
operator = k_list.pop()
matches = recursive_find(k_list, 0, row, operator, v)
if not matches:
if not fail_on_more:
return deepcopy(row)
if result:
- raise DbException("Found more than one entry with filter='{}'".format(q_filter),
- HTTPStatus.CONFLICT.value)
+ raise DbException(
+ "Found more than one entry with filter='{}'".format(
+ q_filter
+ ),
+ HTTPStatus.CONFLICT.value,
+ )
result = row
if not result and fail_on_empty:
- raise DbException("Not found entry with filter='{}'".format(q_filter), HTTPStatus.NOT_FOUND)
+ raise DbException(
+ "Not found entry with filter='{}'".format(q_filter),
+ HTTPStatus.NOT_FOUND,
+ )
return deepcopy(result)
except Exception as e: # TODO refine
raise DbException(str(e))
break
else:
if fail_on_empty:
- raise DbException("Not found entry with filter='{}'".format(q_filter), HTTPStatus.NOT_FOUND)
+ raise DbException(
+ "Not found entry with filter='{}'".format(q_filter),
+ HTTPStatus.NOT_FOUND,
+ )
return None
del self.db[table][i]
return {"deleted": 1}
except Exception as e: # TODO refine
raise DbException(str(e))
- def _update(self, db_item, update_dict, unset=None, pull=None, push=None, push_list=None, pull_list=None):
+ def _update(
+ self,
+ db_item,
+ update_dict,
+ unset=None,
+ pull=None,
+ push=None,
+ push_list=None,
+ pull_list=None,
+ ):
"""
Modifies an entry at database
:param db_item: entry of the table to update
whole array
:return: True if database has been changed, False if not; Exception on error
"""
+
def _iterate_keys(k, db_nested, populate=True):
k_list = k.split(".")
k_item_prev = k_list[0]
if isinstance(db_nested[k_item_prev], dict):
if k_item not in db_nested[k_item_prev]:
if not populate:
- raise DbException("Cannot set '{}', not existing '{}'".format(k, k_item))
+ raise DbException(
+ "Cannot set '{}', not existing '{}'".format(k, k_item)
+ )
populated = True
db_nested[k_item_prev][k_item] = None
elif isinstance(db_nested[k_item_prev], list) and k_item.isdigit():
k_item = int(k_item)
if k_item >= len(db_nested[k_item_prev]):
if not populate:
- raise DbException("Cannot set '{}', index too large '{}'".format(k, k_item))
+ raise DbException(
+ "Cannot set '{}', index too large '{}'".format(
+ k, k_item
+ )
+ )
populated = True
- db_nested[k_item_prev] += [None] * (k_item - len(db_nested[k_item_prev]) + 1)
+ db_nested[k_item_prev] += [None] * (
+ k_item - len(db_nested[k_item_prev]) + 1
+ )
elif db_nested[k_item_prev] is None:
if not populate:
- raise DbException("Cannot set '{}', not existing '{}'".format(k, k_item))
+ raise DbException(
+ "Cannot set '{}', not existing '{}'".format(k, k_item)
+ )
populated = True
db_nested[k_item_prev] = {k_item: None}
else: # number, string, boolean, ... or list but with not integer key
- raise DbException("Cannot set '{}' on existing '{}={}'".format(k, k_item_prev,
- db_nested[k_item_prev]))
+ raise DbException(
+ "Cannot set '{}' on existing '{}={}'".format(
+ k, k_item_prev, db_nested[k_item_prev]
+ )
+ )
db_nested = db_nested[k_item_prev]
k_item_prev = k_item
return db_nested, k_item_prev, populated
if unset:
for dot_k in unset:
try:
- dict_to_update, key_to_update, _ = _iterate_keys(dot_k, db_item, populate=False)
+ dict_to_update, key_to_update, _ = _iterate_keys(
+ dot_k, db_item, populate=False
+ )
del dict_to_update[key_to_update]
updated = True
except Exception:
if pull:
for dot_k, v in pull.items():
try:
- dict_to_update, key_to_update, _ = _iterate_keys(dot_k, db_item, populate=False)
+ dict_to_update, key_to_update, _ = _iterate_keys(
+ dot_k, db_item, populate=False
+ )
except Exception:
continue
if key_to_update not in dict_to_update:
continue
if not isinstance(dict_to_update[key_to_update], list):
- raise DbException("Cannot pull '{}'. Target is not a list".format(dot_k))
+ raise DbException(
+ "Cannot pull '{}'. Target is not a list".format(dot_k)
+ )
while v in dict_to_update[key_to_update]:
dict_to_update[key_to_update].remove(v)
updated = True
if pull_list:
for dot_k, v in pull_list.items():
if not isinstance(v, list):
- raise DbException("Invalid content at pull_list, '{}' must be an array".format(dot_k),
- http_code=HTTPStatus.BAD_REQUEST)
+ raise DbException(
+ "Invalid content at pull_list, '{}' must be an array".format(
+ dot_k
+ ),
+ http_code=HTTPStatus.BAD_REQUEST,
+ )
try:
- dict_to_update, key_to_update, _ = _iterate_keys(dot_k, db_item, populate=False)
+ dict_to_update, key_to_update, _ = _iterate_keys(
+ dot_k, db_item, populate=False
+ )
except Exception:
continue
if key_to_update not in dict_to_update:
continue
if not isinstance(dict_to_update[key_to_update], list):
- raise DbException("Cannot pull_list '{}'. Target is not a list".format(dot_k))
+ raise DbException(
+ "Cannot pull_list '{}'. Target is not a list".format(dot_k)
+ )
for single_v in v:
while single_v in dict_to_update[key_to_update]:
dict_to_update[key_to_update].remove(single_v)
updated = True
if push:
for dot_k, v in push.items():
- dict_to_update, key_to_update, populated = _iterate_keys(dot_k, db_item)
- if isinstance(dict_to_update, dict) and key_to_update not in dict_to_update:
+ dict_to_update, key_to_update, populated = _iterate_keys(
+ dot_k, db_item
+ )
+ if (
+ isinstance(dict_to_update, dict)
+ and key_to_update not in dict_to_update
+ ):
dict_to_update[key_to_update] = [v]
updated = True
elif populated and dict_to_update[key_to_update] is None:
dict_to_update[key_to_update] = [v]
updated = True
elif not isinstance(dict_to_update[key_to_update], list):
- raise DbException("Cannot push '{}'. Target is not a list".format(dot_k))
+ raise DbException(
+ "Cannot push '{}'. Target is not a list".format(dot_k)
+ )
else:
dict_to_update[key_to_update].append(v)
updated = True
if push_list:
for dot_k, v in push_list.items():
if not isinstance(v, list):
- raise DbException("Invalid content at push_list, '{}' must be an array".format(dot_k),
- http_code=HTTPStatus.BAD_REQUEST)
- dict_to_update, key_to_update, populated = _iterate_keys(dot_k, db_item)
- if isinstance(dict_to_update, dict) and key_to_update not in dict_to_update:
+ raise DbException(
+ "Invalid content at push_list, '{}' must be an array".format(
+ dot_k
+ ),
+ http_code=HTTPStatus.BAD_REQUEST,
+ )
+ dict_to_update, key_to_update, populated = _iterate_keys(
+ dot_k, db_item
+ )
+ if (
+ isinstance(dict_to_update, dict)
+ and key_to_update not in dict_to_update
+ ):
dict_to_update[key_to_update] = v.copy()
updated = True
elif populated and dict_to_update[key_to_update] is None:
dict_to_update[key_to_update] = v.copy()
updated = True
elif not isinstance(dict_to_update[key_to_update], list):
- raise DbException("Cannot push '{}'. Target is not a list".format(dot_k),
- http_code=HTTPStatus.CONFLICT)
+ raise DbException(
+ "Cannot push '{}'. Target is not a list".format(dot_k),
+ http_code=HTTPStatus.CONFLICT,
+ )
else:
dict_to_update[key_to_update] += v
updated = True
except Exception as e: # TODO refine
raise DbException(str(e))
- def set_one(self, table, q_filter, update_dict, fail_on_empty=True, unset=None, pull=None, push=None,
- push_list=None, pull_list=None):
+ def set_one(
+ self,
+ table,
+ q_filter,
+ update_dict,
+ fail_on_empty=True,
+ unset=None,
+ pull=None,
+ push=None,
+ push_list=None,
+ pull_list=None,
+ ):
"""
Modifies an entry at database
:param table: collection or table
"""
with self.lock:
for i, db_item in self._find(table, self._format_filter(q_filter)):
- updated = self._update(db_item, update_dict, unset=unset, pull=pull, push=push, push_list=push_list,
- pull_list=pull_list)
+ updated = self._update(
+ db_item,
+ update_dict,
+ unset=unset,
+ pull=pull,
+ push=push,
+ push_list=push_list,
+ pull_list=pull_list,
+ )
return {"updated": 1 if updated else 0}
else:
if fail_on_empty:
- raise DbException("Not found entry with _id='{}'".format(q_filter), HTTPStatus.NOT_FOUND)
+ raise DbException(
+ "Not found entry with _id='{}'".format(q_filter),
+ HTTPStatus.NOT_FOUND,
+ )
return None
- def set_list(self, table, q_filter, update_dict, unset=None, pull=None, push=None, push_list=None, pull_list=None):
+ def set_list(
+ self,
+ table,
+ q_filter,
+ update_dict,
+ unset=None,
+ pull=None,
+ push=None,
+ push_list=None,
+ pull_list=None,
+ ):
"""Modifies al matching entries at database. Same as push. Do not fail if nothing matches"""
with self.lock:
updated = 0
found = 0
for _, db_item in self._find(table, self._format_filter(q_filter)):
found += 1
- if self._update(db_item, update_dict, unset=unset, pull=pull, push=push, push_list=push_list,
- pull_list=pull_list):
+ if self._update(
+ db_item,
+ update_dict,
+ unset=unset,
+ pull=pull,
+ push=push,
+ push_list=push_list,
+ pull_list=pull_list,
+ ):
updated += 1
# if not found and fail_on_empty:
# raise DbException("Not found entry with '{}'".format(q_filter), HTTPStatus.NOT_FOUND)
break
else:
if fail_on_empty:
- raise DbException("Not found entry with _id='{}'".format(_id), HTTPStatus.NOT_FOUND)
+ raise DbException(
+ "Not found entry with _id='{}'".format(_id),
+ HTTPStatus.NOT_FOUND,
+ )
return None
self.db[table][i] = deepcopy(indata)
return {"updated": 1}
raise DbException(str(e))
-if __name__ == '__main__':
+if __name__ == "__main__":
# some test code
db = DbMemory()
db.create("test", {"_id": 1, "data": 1})
conn_initial_timout = 120
conn_timout = 10
- def __init__(self, logger_name='db', lock=False):
+ def __init__(self, logger_name="db", lock=False):
super().__init__(logger_name, lock)
self.client = None
self.db = None
self.secret_key = None
if self.database_key:
self.set_secret_key(self.database_key)
- version_data = self.get_one("admin", {"_id": "version"}, fail_on_empty=False, fail_on_more=True)
+ version_data = self.get_one(
+ "admin", {"_id": "version"}, fail_on_empty=False, fail_on_more=True
+ )
if version_data and version_data.get("serial"):
self.set_secret_key(b64decode(version_data["serial"]))
self.secret_obtained = True
self.database_key = master_key
self.set_secret_key(master_key)
if config.get("uri"):
- self.client = MongoClient(config["uri"], replicaSet=config.get("replicaset", None))
+ self.client = MongoClient(
+ config["uri"], replicaSet=config.get("replicaset", None)
+ )
else:
- self.client = MongoClient(config["host"], config["port"], replicaSet=config.get("replicaset", None))
+ self.client = MongoClient(
+ config["host"],
+ config["port"],
+ replicaSet=config.get("replicaset", None),
+ )
# TODO add as parameters also username=config.get("user"), password=config.get("password"))
# when all modules are ready
self.db = self.client[config["name"]]
if "loglevel" in config:
- self.logger.setLevel(getattr(logging, config['loglevel']))
+ self.logger.setLevel(getattr(logging, config["loglevel"]))
# get data to try a connection
now = time()
while True:
try:
- version_data = self.get_one("admin", {"_id": "version"}, fail_on_empty=False, fail_on_more=True)
+ version_data = self.get_one(
+ "admin",
+ {"_id": "version"},
+ fail_on_empty=False,
+ fail_on_more=True,
+ )
# check database status is ok
- if version_data and version_data.get("status") != 'ENABLED':
- raise DbException("Wrong database status '{}'".format(version_data.get("status")),
- http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ if version_data and version_data.get("status") != "ENABLED":
+ raise DbException(
+ "Wrong database status '{}'".format(
+ version_data.get("status")
+ ),
+ http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
# check version
- db_version = None if not version_data else version_data.get("version")
+ db_version = (
+ None if not version_data else version_data.get("version")
+ )
if target_version and target_version != db_version:
- raise DbException("Invalid database version {}. Expected {}".format(db_version, target_version))
+ raise DbException(
+ "Invalid database version {}. Expected {}".format(
+ db_version, target_version
+ )
+ )
# get serial
if version_data and version_data.get("serial"):
self.secret_obtained = True
self.set_secret_key(b64decode(version_data["serial"]))
- self.logger.info("Connected to database {} version {}".format(config["name"], db_version))
+ self.logger.info(
+ "Connected to database {} version {}".format(
+ config["name"], db_version
+ )
+ )
return
except errors.ConnectionFailure as e:
if time() - now >= self.conn_initial_timout:
return db_filter
for query_k, query_v in q_filter.items():
dot_index = query_k.rfind(".")
- if dot_index > 1 and query_k[dot_index+1:] in ("eq", "ne", "gt", "gte", "lt", "lte", "cont",
- "ncont", "neq"):
- operator = "$" + query_k[dot_index + 1:]
+ if dot_index > 1 and query_k[dot_index + 1 :] in (
+ "eq",
+ "ne",
+ "gt",
+ "gte",
+ "lt",
+ "lte",
+ "cont",
+ "ncont",
+ "neq",
+ ):
+ operator = "$" + query_k[dot_index + 1 :]
if operator == "$neq":
operator = "$ne"
k = query_k[:dot_index]
return db_filter
except Exception as e:
- raise DbException("Invalid query string filter at {}:{}. Error: {}".format(query_k, v, e),
- http_code=HTTPStatus.BAD_REQUEST)
+ raise DbException(
+ "Invalid query string filter at {}:{}. Error: {}".format(query_k, v, e),
+ http_code=HTTPStatus.BAD_REQUEST,
+ )
def get_list(self, table, q_filter=None):
"""
rows = collection.find(db_filter)
if rows.count() == 0:
if fail_on_empty:
- raise DbException("Not found any {} with filter='{}'".format(table[:-1], q_filter),
- HTTPStatus.NOT_FOUND)
+ raise DbException(
+ "Not found any {} with filter='{}'".format(
+ table[:-1], q_filter
+ ),
+ HTTPStatus.NOT_FOUND,
+ )
return None
elif rows.count() > 1:
if fail_on_more:
- raise DbException("Found more than one {} with filter='{}'".format(table[:-1], q_filter),
- HTTPStatus.CONFLICT)
+ raise DbException(
+ "Found more than one {} with filter='{}'".format(
+ table[:-1], q_filter
+ ),
+ HTTPStatus.CONFLICT,
+ )
return rows[0]
except Exception as e: # TODO refine
raise DbException(e)
rows = collection.delete_one(self._format_filter(q_filter))
if rows.deleted_count == 0:
if fail_on_empty:
- raise DbException("Not found any {} with filter='{}'".format(table[:-1], q_filter),
- HTTPStatus.NOT_FOUND)
+ raise DbException(
+ "Not found any {} with filter='{}'".format(
+ table[:-1], q_filter
+ ),
+ HTTPStatus.NOT_FOUND,
+ )
return None
return {"deleted": rows.deleted_count}
except Exception as e: # TODO refine
except Exception as e: # TODO refine
raise DbException(e)
- def set_one(self, table, q_filter, update_dict, fail_on_empty=True, unset=None, pull=None, push=None,
- push_list=None, pull_list=None, upsert=False):
+ def set_one(
+ self,
+ table,
+ q_filter,
+ update_dict,
+ fail_on_empty=True,
+ unset=None,
+ pull=None,
+ push=None,
+ push_list=None,
+ pull_list=None,
+ upsert=False,
+ ):
"""
Modifies an entry at database
:param table: collection or table
if pull or pull_list:
db_oper["$pull"] = pull or {}
if pull_list:
- db_oper["$pull"].update({k: {"$in": v} for k, v in pull_list.items()})
+ db_oper["$pull"].update(
+ {k: {"$in": v} for k, v in pull_list.items()}
+ )
if push or push_list:
db_oper["$push"] = push or {}
if push_list:
- db_oper["$push"].update({k: {"$each": v} for k, v in push_list.items()})
+ db_oper["$push"].update(
+ {k: {"$each": v} for k, v in push_list.items()}
+ )
with self.lock:
collection = self.db[table]
- rows = collection.update_one(self._format_filter(q_filter), db_oper, upsert=upsert)
+ rows = collection.update_one(
+ self._format_filter(q_filter), db_oper, upsert=upsert
+ )
if rows.matched_count == 0:
if fail_on_empty:
- raise DbException("Not found any {} with filter='{}'".format(table[:-1], q_filter),
- HTTPStatus.NOT_FOUND)
+ raise DbException(
+ "Not found any {} with filter='{}'".format(
+ table[:-1], q_filter
+ ),
+ HTTPStatus.NOT_FOUND,
+ )
return None
return {"modified": rows.modified_count}
except Exception as e: # TODO refine
raise DbException(e)
- def set_list(self, table, q_filter, update_dict, unset=None, pull=None, push=None, push_list=None, pull_list=None):
+ def set_list(
+ self,
+ table,
+ q_filter,
+ update_dict,
+ unset=None,
+ pull=None,
+ push=None,
+ push_list=None,
+ pull_list=None,
+ ):
"""
Modifies al matching entries at database
:param table: collection or table
if pull or pull_list:
db_oper["$pull"] = pull or {}
if pull_list:
- db_oper["$pull"].update({k: {"$in": v} for k, v in pull_list.items()})
+ db_oper["$pull"].update(
+ {k: {"$in": v} for k, v in pull_list.items()}
+ )
if push or push_list:
db_oper["$push"] = push or {}
if push_list:
- db_oper["$push"].update({k: {"$each": v} for k, v in push_list.items()})
+ db_oper["$push"].update(
+ {k: {"$each": v} for k, v in push_list.items()}
+ )
with self.lock:
collection = self.db[table]
rows = collection.update_many(self._format_filter(q_filter), db_oper)
rows = collection.replace_one(db_filter, indata)
if rows.matched_count == 0:
if fail_on_empty:
- raise DbException("Not found any {} with _id='{}'".format(table[:-1], _id), HTTPStatus.NOT_FOUND)
+ raise DbException(
+ "Not found any {} with _id='{}'".format(table[:-1], _id),
+ HTTPStatus.NOT_FOUND,
+ )
return None
return {"replaced": rows.modified_count}
except Exception as e: # TODO refine
class FsBase(object):
- def __init__(self, logger_name='fs', lock=False):
+ def __init__(self, logger_name="fs", lock=False):
"""
Constructor of FsBase
:param logger_name: logging name
import os
import logging
+
# import tarfile
from http import HTTPStatus
from shutil import rmtree
class FsLocal(FsBase):
-
- def __init__(self, logger_name='fs', lock=False):
+ def __init__(self, logger_name="fs", lock=False):
super().__init__(logger_name, lock)
self.path = None
if not self.path.endswith("/"):
self.path += "/"
if not os.path.exists(self.path):
- raise FsException("Invalid configuration param at '[storage]': path '{}' does not exist".format(
- config["path"]))
+ raise FsException(
+ "Invalid configuration param at '[storage]': path '{}' does not exist".format(
+ config["path"]
+ )
+ )
except FsException:
raise
except Exception as e: # TODO refine
f = "/".join(storage)
return open(self.path + f, mode)
except FileNotFoundError:
- raise FsException("File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND)
+ raise FsException(
+ "File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND
+ )
except IOError:
- raise FsException("File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST)
+ raise FsException(
+ "File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST
+ )
def dir_ls(self, storage):
"""
f = "/".join(storage)
return os.listdir(self.path + f)
except NotADirectoryError:
- raise FsException("File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND)
+ raise FsException(
+ "File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND
+ )
except IOError:
- raise FsException("File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST)
+ raise FsException(
+ "File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST
+ )
def file_delete(self, storage, ignore_non_exist=False):
"""
if os.path.exists(f):
rmtree(f)
elif not ignore_non_exist:
- raise FsException("File {} does not exist".format(storage), http_code=HTTPStatus.NOT_FOUND)
+ raise FsException(
+ "File {} does not exist".format(storage),
+ http_code=HTTPStatus.NOT_FOUND,
+ )
except (IOError, PermissionError) as e:
- raise FsException("File {} cannot be deleted: {}".format(f, e), http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise FsException(
+ "File {} cannot be deleted: {}".format(f, e),
+ http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
def sync(self, from_path=None):
pass # Not needed in fslocal
exception_file = next(cursor, None)
if exception_file:
- raise FsException("Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise FsException(
+ "Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR
+ )
if requested_file.metadata["type"] in ("file", "sym"):
grid_file = requested_file
self.file_type = requested_file.metadata["type"]
else:
- raise FsException("Type isn't file", http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise FsException(
+ "Type isn't file", http_code=HTTPStatus.INTERNAL_SERVER_ERROR
+ )
if grid_file:
self._id = grid_file._id
if self._id:
self.fs.delete(self._id)
- cursor = self.fs.find({
- "filename": self.filename.split("/")[0],
- "metadata": {"type": "dir"}})
+ cursor = self.fs.find(
+ {"filename": self.filename.split("/")[0], "metadata": {"type": "dir"}}
+ )
parent_dir = next(cursor, None)
if not parent_dir:
parent_dir_name = self.filename.split("/")[0]
- self.filename = self.filename.replace(parent_dir_name, parent_dir_name[:-1], 1)
+ self.filename = self.filename.replace(
+ parent_dir_name, parent_dir_name[:-1], 1
+ )
self.seek(0, 0)
if self._id:
self.fs.upload_from_stream_with_id(
- self._id,
- self.filename,
- self,
- metadata={"type": self.file_type}
+ self._id, self.filename, self, metadata={"type": self.file_type}
)
else:
self.fs.upload_from_stream(
- self.filename,
- self,
- metadata={"type": self.file_type}
+ self.filename, self, metadata={"type": self.file_type}
)
super(GridByteStream, self).close()
exception_file = next(cursor, None)
if exception_file:
- raise FsException("Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise FsException(
+ "Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR
+ )
if requested_file.metadata["type"] in ("file", "dir"):
grid_file = requested_file
self.file_type = requested_file.metadata["type"]
else:
- raise FsException("File type isn't file", http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise FsException(
+ "File type isn't file", http_code=HTTPStatus.INTERNAL_SERVER_ERROR
+ )
if grid_file:
stream = BytesIO()
if self._id:
self.fs.delete(self._id)
- cursor = self.fs.find({
- "filename": self.filename.split("/")[0],
- "metadata": {"type": "dir"}})
+ cursor = self.fs.find(
+ {"filename": self.filename.split("/")[0], "metadata": {"type": "dir"}}
+ )
parent_dir = next(cursor, None)
if not parent_dir:
parent_dir_name = self.filename.split("/")[0]
- self.filename = self.filename.replace(parent_dir_name, parent_dir_name[:-1], 1)
+ self.filename = self.filename.replace(
+ parent_dir_name, parent_dir_name[:-1], 1
+ )
self.seek(0, 0)
stream = BytesIO()
stream.seek(0, 0)
if self._id:
self.fs.upload_from_stream_with_id(
- self._id,
- self.filename,
- stream,
- metadata={"type": self.file_type}
+ self._id, self.filename, stream, metadata={"type": self.file_type}
)
else:
self.fs.upload_from_stream(
- self.filename,
- stream,
- metadata={"type": self.file_type}
+ self.filename, stream, metadata={"type": self.file_type}
)
stream.close()
super(GridStringStream, self).close()
class FsMongo(FsBase):
-
- def __init__(self, logger_name='fs', lock=False):
+ def __init__(self, logger_name="fs", lock=False):
super().__init__(logger_name, lock)
self.path = None
self.client = None
continue
os.makedirs(self.path + directory.filename, exist_ok=True)
- file_cursor = self.fs.find({"metadata.type": {"$in": ["file", "sym"]}}, no_cursor_timeout=True)
+ file_cursor = self.fs.find(
+ {"metadata.type": {"$in": ["file", "sym"]}}, no_cursor_timeout=True
+ )
for writing_file in file_cursor:
if from_path and not writing_file.filename.startswith(from_path):
raise
os.symlink(link, file_path)
else:
- with open(file_path, 'wb+') as file_stream:
+ with open(file_path, "wb+") as file_stream:
self.fs.download_to_stream(writing_file._id, file_stream)
if "permissions" in writing_file.metadata:
os.chmod(file_path, writing_file.metadata["permissions"])
if "path" in config:
self.path = config["path"]
else:
- raise FsException("Missing parameter \"path\"")
+ raise FsException('Missing parameter "path"')
if not self.path.endswith("/"):
self.path += "/"
if not os.path.exists(self.path):
- raise FsException("Invalid configuration param at '[storage]': path '{}' does not exist".format(
- config["path"]))
+ raise FsException(
+ "Invalid configuration param at '[storage]': path '{}' does not exist".format(
+ config["path"]
+ )
+ )
elif not os.access(self.path, os.W_OK):
- raise FsException("Invalid configuration param at '[storage]': path '{}' is not writable".format(
- config["path"]))
+ raise FsException(
+ "Invalid configuration param at '[storage]': path '{}' is not writable".format(
+ config["path"]
+ )
+ )
if all(key in config.keys() for key in ["uri", "collection"]):
self.client = MongoClient(config["uri"])
self.fs = GridFSBucket(self.client[config["collection"]])
self.fs = GridFSBucket(self.client[config["collection"]])
else:
if "collection" not in config.keys():
- raise FsException("Missing parameter \"collection\"")
+ raise FsException('Missing parameter "collection"')
else:
- raise FsException("Missing parameters: \"uri\" or \"host\" + \"port\"")
+ raise FsException('Missing parameters: "uri" or "host" + "port"')
except FsException:
raise
except Exception as e: # TODO refine
:return: None or raises an exception
"""
try:
- self.fs.upload_from_stream(
- folder, BytesIO(), metadata={"type": "dir"})
+ self.fs.upload_from_stream(folder, BytesIO(), metadata={"type": "dir"})
except errors.FileExists: # make it idempotent
pass
except Exception as e:
"""
try:
dst_cursor = self.fs.find(
- {"filename": {"$regex": "^{}(/|$)".format(dst)}},
- no_cursor_timeout=True)
+ {"filename": {"$regex": "^{}(/|$)".format(dst)}}, no_cursor_timeout=True
+ )
for dst_file in dst_cursor:
self.fs.delete(dst_file._id)
src_cursor = self.fs.find(
- {"filename": {"$regex": "^{}(/|$)".format(src)}},
- no_cursor_timeout=True)
+ {"filename": {"$regex": "^{}(/|$)".format(src)}}, no_cursor_timeout=True
+ )
for src_file in src_cursor:
self.fs.rename(src_file._id, src_file.filename.replace(src, dst, 1))
exception_file = next(cursor, None)
if exception_file:
- raise FsException("Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise FsException(
+ "Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR
+ )
# if no special mode is required just check it does exists
if not mode:
exception_file = next(cursor, None)
if exception_file:
- raise FsException("Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise FsException(
+ "Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR
+ )
return requested_file.length
else:
file_type = "dir"
- metadata = {
- "type": file_type,
- "permissions": member.mode
- }
+ metadata = {"type": file_type, "permissions": member.mode}
- self.fs.upload_from_stream(
- f + "/" + member.name,
- stream,
- metadata=metadata
- )
+ self.fs.upload_from_stream(f + "/" + member.name, stream, metadata=metadata)
stream.close()
else:
return GridStringStream(f, self.fs, mode)
except errors.NoFile:
- raise FsException("File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND)
+ raise FsException(
+ "File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND
+ )
except IOError:
- raise FsException("File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST)
+ raise FsException(
+ "File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST
+ )
def dir_ls(self, storage):
"""
exception_dir = next(dir_cursor, None)
if exception_dir:
- raise FsException("Multiple directories found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise FsException(
+ "Multiple directories found",
+ http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
if requested_dir.metadata["type"] != "dir":
- raise FsException("File {} does not exist".format(f), http_code=HTTPStatus.NOT_FOUND)
+ raise FsException(
+ "File {} does not exist".format(f),
+ http_code=HTTPStatus.NOT_FOUND,
+ )
- files_cursor = self.fs.find({"filename": {"$regex": "^{}/([^/])*".format(f)}})
+ files_cursor = self.fs.find(
+ {"filename": {"$regex": "^{}/([^/])*".format(f)}}
+ )
for children_file in files_cursor:
- files += [children_file.filename.replace(f + '/', '', 1)]
+ files += [children_file.filename.replace(f + "/", "", 1)]
return files
except IOError:
- raise FsException("File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST)
+ raise FsException(
+ "File {} cannot be opened".format(f), http_code=HTTPStatus.BAD_REQUEST
+ )
def file_delete(self, storage, ignore_non_exist=False):
"""
exception_file = next(file_cursor, None)
if exception_file:
- raise FsException("Multiple files found", http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise FsException(
+ "Multiple files found",
+ http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
if requested_file.metadata["type"] == "dir":
dir_cursor = self.fs.find({"filename": {"$regex": "^{}".format(f)}})
else:
self.fs.delete(requested_file._id)
if not found and not ignore_non_exist:
- raise FsException("File {} does not exist".format(storage), http_code=HTTPStatus.NOT_FOUND)
+ raise FsException(
+ "File {} does not exist".format(storage),
+ http_code=HTTPStatus.NOT_FOUND,
+ )
except IOError as e:
- raise FsException("File {} cannot be deleted: {}".format(f, e), http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise FsException(
+ "File {} cannot be deleted: {}".format(f, e),
+ http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
def sync(self, from_path=None):
"""
members = []
for root, dirs, files in os.walk(os_path):
for folder in dirs:
- member = {
- "filename": os.path.join(root, folder),
- "type": "dir"
- }
+ member = {"filename": os.path.join(root, folder), "type": "dir"}
members.append(member)
for file in files:
filename = os.path.join(root, file)
file_type = "sym"
else:
file_type = "file"
- member = {
- "filename": os.path.join(root, file),
- "type": file_type
- }
+ member = {"filename": os.path.join(root, file), "type": file_type}
members.append(member)
# Obtain files in mongo dict
# convert to relative path
rel_filename = os.path.relpath(member["filename"], self.path)
- last_modified_date = datetime.datetime.fromtimestamp(os.path.getmtime(member["filename"]))
+ last_modified_date = datetime.datetime.fromtimestamp(
+ os.path.getmtime(member["filename"])
+ )
remote_file = remote_files.get(rel_filename)
- upload_date = remote_file[0].uploadDate if remote_file else datetime.datetime.min
+ upload_date = (
+ remote_file[0].uploadDate if remote_file else datetime.datetime.min
+ )
# remove processed files from dict
remote_files.pop(rel_filename, None)
if file_type == "dir":
stream = BytesIO()
elif file_type == "sym":
- stream = BytesIO(os.readlink(member["filename"]).encode("utf-8"))
+ stream = BytesIO(
+ os.readlink(member["filename"]).encode("utf-8")
+ )
else:
fh = open(member["filename"], "rb")
stream = BytesIO(fh.read())
- metadata = {
- "type": file_type,
- "permissions": mask
- }
+ metadata = {"type": file_type, "permissions": mask}
- self.fs.upload_from_stream(
- rel_filename,
- stream,
- metadata=metadata
- )
+ self.fs.upload_from_stream(rel_filename, stream, metadata=metadata)
# delete old files
if remote_file:
def _get_mongo_files(self, from_path=None):
file_dict = {}
- file_cursor = self.fs.find(no_cursor_timeout=True, sort=[('uploadDate', -1)])
+ file_cursor = self.fs.find(no_cursor_timeout=True, sort=[("uploadDate", -1)])
for file in file_cursor:
if from_path and not file.filename.startswith(from_path):
continue
Base class for all msgXXXX classes
"""
- def __init__(self, logger_name='msg', lock=False):
+ def __init__(self, logger_name="msg", lock=False):
"""
Constructor of FsBase
:param logger_name: logging name
pass
def write(self, topic, key, msg):
- raise MsgException("Method 'write' not implemented", http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise MsgException(
+ "Method 'write' not implemented", http_code=HTTPStatus.INTERNAL_SERVER_ERROR
+ )
def read(self, topic):
- raise MsgException("Method 'read' not implemented", http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise MsgException(
+ "Method 'read' not implemented", http_code=HTTPStatus.INTERNAL_SERVER_ERROR
+ )
async def aiowrite(self, topic, key, msg, loop=None):
- raise MsgException("Method 'aiowrite' not implemented", http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ raise MsgException(
+ "Method 'aiowrite' not implemented",
+ http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
- async def aioread(self, topic, loop=None, callback=None, aiocallback=None, group_id=None, **kwargs):
- raise MsgException("Method 'aioread' not implemented", http_code=HTTPStatus.INTERNAL_SERVER_ERROR)
+ async def aioread(
+ self, topic, loop=None, callback=None, aiocallback=None, group_id=None, **kwargs
+ ):
+ raise MsgException(
+ "Method 'aioread' not implemented",
+ http_code=HTTPStatus.INTERNAL_SERVER_ERROR,
+ )
from aiokafka.errors import KafkaError
from osm_common.msgbase import MsgBase, MsgException
-__author__ = "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>, " \
- "Guillermo Calvino <guillermo.calvinosanchez@altran.com>"
+__author__ = (
+ "Alfonso Tierno <alfonso.tiernosepulveda@telefonica.com>, "
+ "Guillermo Calvino <guillermo.calvinosanchez@altran.com>"
+)
class MsgKafka(MsgBase):
- def __init__(self, logger_name='msg', lock=False):
+ def __init__(self, logger_name="msg", lock=False):
super().__init__(logger_name, lock)
self.host = None
self.port = None
:param msg: message content, can be string or dictionary
:return: None or raises MsgException on failing
"""
- retry = 2 # Try two times
+ retry = 2 # Try two times
while retry:
try:
- self.loop.run_until_complete(self.aiowrite(topic=topic, key=key, msg=msg))
+ self.loop.run_until_complete(
+ self.aiowrite(topic=topic, key=key, msg=msg)
+ )
break
except Exception as e:
retry -= 1
if retry == 0:
- raise MsgException("Error writing {} topic: {}".format(topic, str(e)))
+ raise MsgException(
+ "Error writing {} topic: {}".format(topic, str(e))
+ )
def read(self, topic):
"""
if not loop:
loop = self.loop
try:
- self.producer = AIOKafkaProducer(loop=loop, key_serializer=str.encode, value_serializer=str.encode,
- bootstrap_servers=self.broker)
+ self.producer = AIOKafkaProducer(
+ loop=loop,
+ key_serializer=str.encode,
+ value_serializer=str.encode,
+ bootstrap_servers=self.broker,
+ )
await self.producer.start()
- await self.producer.send(topic=topic, key=key, value=yaml.safe_dump(msg, default_flow_style=True))
+ await self.producer.send(
+ topic=topic, key=key, value=yaml.safe_dump(msg, default_flow_style=True)
+ )
except Exception as e:
- raise MsgException("Error publishing topic '{}', key '{}': {}".format(topic, key, e))
+ raise MsgException(
+ "Error publishing topic '{}', key '{}': {}".format(topic, key, e)
+ )
finally:
await self.producer.stop()
- async def aioread(self, topic, loop=None, callback=None, aiocallback=None, group_id=None, from_beginning=None,
- **kwargs):
+ async def aioread(
+ self,
+ topic,
+ loop=None,
+ callback=None,
+ aiocallback=None,
+ group_id=None,
+ from_beginning=None,
+ **kwargs
+ ):
"""
Asyncio read from one or several topics.
:param topic: can be str: single topic; or str list: several topics
topic_list = topic
else:
topic_list = (topic,)
- self.consumer = AIOKafkaConsumer(loop=loop, bootstrap_servers=self.broker, group_id=group_id,
- auto_offset_reset="earliest" if from_beginning else "latest")
+ self.consumer = AIOKafkaConsumer(
+ loop=loop,
+ bootstrap_servers=self.broker,
+ group_id=group_id,
+ auto_offset_reset="earliest" if from_beginning else "latest",
+ )
await self.consumer.start()
self.consumer.subscribe(topic_list)
async for message in self.consumer:
if callback:
- callback(message.topic, yaml.safe_load(message.key), yaml.safe_load(message.value), **kwargs)
+ callback(
+ message.topic,
+ yaml.safe_load(message.key),
+ yaml.safe_load(message.value),
+ **kwargs
+ )
elif aiocallback:
- await aiocallback(message.topic, yaml.safe_load(message.key), yaml.safe_load(message.value),
- **kwargs)
+ await aiocallback(
+ message.topic,
+ yaml.safe_load(message.key),
+ yaml.safe_load(message.value),
+ **kwargs
+ )
else:
- return message.topic, yaml.safe_load(message.key), yaml.safe_load(message.value)
+ return (
+ message.topic,
+ yaml.safe_load(message.key),
+ yaml.safe_load(message.value),
+ )
except KafkaError as e:
raise MsgException(str(e))
finally:
class MsgLocal(MsgBase):
-
- def __init__(self, logger_name='msg', lock=False):
+ def __init__(self, logger_name="msg", lock=False):
super().__init__(logger_name, lock)
self.path = None
# create a different file for each topic
with self.lock:
if topic not in self.files_write:
self.files_write[topic] = open(self.path + topic, "a+")
- yaml.safe_dump({key: msg}, self.files_write[topic], default_flow_style=True, width=20000)
+ yaml.safe_dump(
+ {key: msg},
+ self.files_write[topic],
+ default_flow_style=True,
+ width=20000,
+ )
self.files_write[topic].flush()
except Exception as e: # TODO refine
raise MsgException(str(e), HTTPStatus.INTERNAL_SERVER_ERROR)
if isinstance(topic, (list, tuple)):
topic_list = topic
else:
- topic_list = (topic, )
+ topic_list = (topic,)
while True:
for single_topic in topic_list:
with self.lock:
if single_topic not in self.files_read:
- self.files_read[single_topic] = open(self.path + single_topic, "a+")
+ self.files_read[single_topic] = open(
+ self.path + single_topic, "a+"
+ )
self.buffer[single_topic] = ""
- self.buffer[single_topic] += self.files_read[single_topic].readline()
+ self.buffer[single_topic] += self.files_read[
+ single_topic
+ ].readline()
if not self.buffer[single_topic].endswith("\n"):
continue
msg_dict = yaml.safe_load(self.buffer[single_topic])
except Exception as e: # TODO refine
raise MsgException(str(e), HTTPStatus.INTERNAL_SERVER_ERROR)
- async def aioread(self, topic, loop=None, callback=None, aiocallback=None, group_id=None, **kwargs):
+ async def aioread(
+ self, topic, loop=None, callback=None, aiocallback=None, group_id=None, **kwargs
+ ):
"""
Asyncio read from one or several topics. It blocks
:param topic: can be str: single topic; or str list: several topics
import hashlib
-_METADATA_FILE_PATH = 'TOSCA-Metadata/TOSCA.meta'
-_METADATA_DESCRIPTOR_FIELD = 'Entry-Definitions'
-_METADATA_MANIFEST_FIELD = 'ETSI-Entry-Manifest'
-_METADATA_CHANGELOG_FIELD = 'ETSI-Entry-Change-Log'
-_METADATA_LICENSES_FIELD = 'ETSI-Entry-Licenses'
-_METADATA_DEFAULT_CHANGELOG_PATH = 'ChangeLog.txt'
-_METADATA_DEFAULT_LICENSES_PATH = 'Licenses'
-_MANIFEST_FILE_PATH_FIELD = 'Source'
-_MANIFEST_FILE_HASH_ALGORITHM_FIELD = 'Algorithm'
-_MANIFEST_FILE_HASH_DIGEST_FIELD = 'Hash'
+_METADATA_FILE_PATH = "TOSCA-Metadata/TOSCA.meta"
+_METADATA_DESCRIPTOR_FIELD = "Entry-Definitions"
+_METADATA_MANIFEST_FIELD = "ETSI-Entry-Manifest"
+_METADATA_CHANGELOG_FIELD = "ETSI-Entry-Change-Log"
+_METADATA_LICENSES_FIELD = "ETSI-Entry-Licenses"
+_METADATA_DEFAULT_CHANGELOG_PATH = "ChangeLog.txt"
+_METADATA_DEFAULT_LICENSES_PATH = "Licenses"
+_MANIFEST_FILE_PATH_FIELD = "Source"
+_MANIFEST_FILE_HASH_ALGORITHM_FIELD = "Algorithm"
+_MANIFEST_FILE_HASH_DIGEST_FIELD = "Hash"
class SOL004PackageException(Exception):
class SOL004Package:
- def __init__(self, package_path=''):
+ def __init__(self, package_path=""):
self._package_path = package_path
self._package_metadata = self._parse_package_metadata()
self._manifest_data = self._parse_manifest_data()
except FileNotFoundError as e:
raise e
except (Exception, OSError) as e:
- raise SOL004PackageException('Error parsing {}: {}'.format(_METADATA_FILE_PATH, e))
+ raise SOL004PackageException(
+ "Error parsing {}: {}".format(_METADATA_FILE_PATH, e)
+ )
def _parse_package_metadata_without_metadata_dir(self):
package_root_files = {f for f in os.listdir(self._package_path)}
- package_root_yamls = [f for f in package_root_files if f.endswith('.yml') or f.endswith('.yaml')]
+ package_root_yamls = [
+ f for f in package_root_files if f.endswith(".yml") or f.endswith(".yaml")
+ ]
if len(package_root_yamls) != 1:
- error_msg = 'Error parsing package metadata: there should be exactly 1 descriptor YAML, found {}'
+ error_msg = "Error parsing package metadata: there should be exactly 1 descriptor YAML, found {}"
raise SOL004PackageException(error_msg.format(len(package_root_yamls)))
# TODO: Parse extra metadata from descriptor YAML?
- return [{
- _METADATA_DESCRIPTOR_FIELD: package_root_yamls[0],
- _METADATA_MANIFEST_FIELD: '{}.mf'.format(os.path.splitext(package_root_yamls[0])[0]),
- _METADATA_CHANGELOG_FIELD: _METADATA_DEFAULT_CHANGELOG_PATH,
- _METADATA_LICENSES_FIELD: _METADATA_DEFAULT_LICENSES_PATH
- }]
+ return [
+ {
+ _METADATA_DESCRIPTOR_FIELD: package_root_yamls[0],
+ _METADATA_MANIFEST_FIELD: "{}.mf".format(
+ os.path.splitext(package_root_yamls[0])[0]
+ ),
+ _METADATA_CHANGELOG_FIELD: _METADATA_DEFAULT_CHANGELOG_PATH,
+ _METADATA_LICENSES_FIELD: _METADATA_DEFAULT_LICENSES_PATH,
+ }
+ ]
def _parse_manifest_data(self):
manifest_path = None
manifest_path = tosca_meta[_METADATA_MANIFEST_FIELD]
break
else:
- error_msg = 'Error parsing {}: no {} field on path'.format(_METADATA_FILE_PATH, _METADATA_MANIFEST_FIELD)
+ error_msg = "Error parsing {}: no {} field on path".format(
+ _METADATA_FILE_PATH, _METADATA_MANIFEST_FIELD
+ )
raise SOL004PackageException(error_msg)
try:
return self._parse_file_in_blocks(manifest_path)
except (Exception, OSError) as e:
- raise SOL004PackageException('Error parsing {}: {}'.format(manifest_path, e))
+ raise SOL004PackageException(
+ "Error parsing {}: {}".format(manifest_path, e)
+ )
def _get_package_file_full_path(self, file_relative_path):
return os.path.join(self._package_path, file_relative_path)
def _parse_file_in_blocks(self, file_relative_path):
file_path = self._get_package_file_full_path(file_relative_path)
with open(file_path) as f:
- blocks = f.read().split('\n\n')
+ blocks = f.read().split("\n\n")
parsed_blocks = map(yaml.safe_load, blocks)
return [block for block in parsed_blocks if block is not None]
def _get_package_file_manifest_data(self, file_relative_path):
for file_data in self._manifest_data:
- if file_data.get(_MANIFEST_FILE_PATH_FIELD, '') == file_relative_path:
+ if file_data.get(_MANIFEST_FILE_PATH_FIELD, "") == file_relative_path:
return file_data
- error_msg = 'Error parsing {} manifest data: file not found on manifest file'.format(file_relative_path)
+ error_msg = (
+ "Error parsing {} manifest data: file not found on manifest file".format(
+ file_relative_path
+ )
+ )
raise SOL004PackageException(error_msg)
def get_package_file_hash_digest_from_manifest(self, file_relative_path):
try:
return file_manifest_data[_MANIFEST_FILE_HASH_DIGEST_FIELD]
except Exception as e:
- raise SOL004PackageException('Error parsing {} hash digest: {}'.format(file_relative_path, e))
+ raise SOL004PackageException(
+ "Error parsing {} hash digest: {}".format(file_relative_path, e)
+ )
def get_package_file_hash_algorithm_from_manifest(self, file_relative_path):
"""Returns the hash algorithm of a file inside this package as specified on the manifest file."""
try:
return file_manifest_data[_MANIFEST_FILE_HASH_ALGORITHM_FIELD]
except Exception as e:
- raise SOL004PackageException('Error parsing {} hash digest: {}'.format(file_relative_path, e))
+ raise SOL004PackageException(
+ "Error parsing {} hash digest: {}".format(file_relative_path, e)
+ )
@staticmethod
def _get_hash_function_from_hash_algorithm(hash_algorithm):
- function_to_algorithm = {
- 'SHA-256': hashlib.sha256,
- 'SHA-512': hashlib.sha512
- }
+ function_to_algorithm = {"SHA-256": hashlib.sha256, "SHA-512": hashlib.sha512}
if hash_algorithm not in function_to_algorithm:
- error_msg = 'Error checking hash function: hash algorithm {} not supported'.format(hash_algorithm)
+ error_msg = (
+ "Error checking hash function: hash algorithm {} not supported".format(
+ hash_algorithm
+ )
+ )
raise SOL004PackageException(error_msg)
return function_to_algorithm[hash_algorithm]
with open(file_path, "rb") as f:
return hash_function(f.read()).hexdigest()
except Exception as e:
- raise SOL004PackageException('Error hashing {}: {}'.format(file_relative_path, e))
+ raise SOL004PackageException(
+ "Error hashing {}: {}".format(file_relative_path, e)
+ )
def validate_package_file_hash(self, file_relative_path):
"""Validates the integrity of a file using the hash algorithm and digest on the package manifest."""
- hash_algorithm = self.get_package_file_hash_algorithm_from_manifest(file_relative_path)
+ hash_algorithm = self.get_package_file_hash_algorithm_from_manifest(
+ file_relative_path
+ )
file_hash = self._calculate_file_hash(file_relative_path, hash_algorithm)
- expected_file_hash = self.get_package_file_hash_digest_from_manifest(file_relative_path)
+ expected_file_hash = self.get_package_file_hash_digest_from_manifest(
+ file_relative_path
+ )
if file_hash != expected_file_hash:
- error_msg = 'Error validating {} hash: calculated hash {} is different than manifest hash {}'
- raise SOL004PackageException(error_msg.format(file_relative_path, file_hash, expected_file_hash))
+ error_msg = "Error validating {} hash: calculated hash {} is different than manifest hash {}"
+ raise SOL004PackageException(
+ error_msg.format(file_relative_path, file_hash, expected_file_hash)
+ )
def validate_package_hashes(self):
"""Validates the integrity of all files listed on the package manifest."""
if _METADATA_DESCRIPTOR_FIELD in tosca_meta:
return tosca_meta[_METADATA_DESCRIPTOR_FIELD]
- error_msg = 'Error: no {} entry found on {}'.format(_METADATA_DESCRIPTOR_FIELD, _METADATA_FILE_PATH)
+ error_msg = "Error: no {} entry found on {}".format(
+ _METADATA_DESCRIPTOR_FIELD, _METADATA_FILE_PATH
+ )
raise SOL004PackageException(error_msg)
class MyNativeCharm(CharmBase):
-
def __init__(self, framework, key):
super().__init__(framework, key)
# Invalid hash algorithm
Source: Scripts/charms/simple/src/charm.py
Algorithm: SHA-733
-Hash: 7895f7b9e1b7ed5b5bcd64398950ca95b456d7fc973334351474eed466c2f480
+Hash: ea72f897a966e6174ed9164fabc3c500df5a2f712eb6b22ab2408afb07d04d14
# Wrong hash
Source: Scripts/charms/simple/hooks/start
class MyNativeCharm(CharmBase):
-
def __init__(self, framework, key):
super().__init__(framework, key)
Source: Scripts/charms/simple/src/charm.py
Algorithm: SHA-256
-Hash: 7895f7b9e1b7ed5b5bcd64398950ca95b456d7fc973334351474eed466c2f480
+Hash: ea72f897a966e6174ed9164fabc3c500df5a2f712eb6b22ab2408afb07d04d14
Source: Scripts/charms/simple/hooks/start
Algorithm: SHA-256
class MyNativeCharm(CharmBase):
-
def __init__(self, framework, key):
super().__init__(framework, key)
Source: Scripts/charms/simple/src/charm.py
Algorithm: SHA-256
-Hash: 7895f7b9e1b7ed5b5bcd64398950ca95b456d7fc973334351474eed466c2f480
+Hash: ea72f897a966e6174ed9164fabc3c500df5a2f712eb6b22ab2408afb07d04d14
Source: Scripts/charms/simple/hooks/start
Algorithm: SHA-256
def test_db_connect(db_base):
with pytest.raises(DbException) as excinfo:
db_base.db_connect(None)
- assert str(excinfo.value).startswith(exception_message("Method 'db_connect' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'db_connect' not implemented")
+ )
def test_db_disconnect(db_base):
def test_get_list(db_base):
with pytest.raises(DbException) as excinfo:
db_base.get_list(None, None)
- assert str(excinfo.value).startswith(exception_message("Method 'get_list' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'get_list' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
def test_get_one(db_base):
with pytest.raises(DbException) as excinfo:
db_base.get_one(None, None, None, None)
- assert str(excinfo.value).startswith(exception_message("Method 'get_one' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'get_one' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
def test_create(db_base):
with pytest.raises(DbException) as excinfo:
db_base.create(None, None)
- assert str(excinfo.value).startswith(exception_message("Method 'create' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'create' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
def test_create_list(db_base):
with pytest.raises(DbException) as excinfo:
db_base.create_list(None, None)
- assert str(excinfo.value).startswith(exception_message("Method 'create_list' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'create_list' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
def test_del_list(db_base):
with pytest.raises(DbException) as excinfo:
db_base.del_list(None, None)
- assert str(excinfo.value).startswith(exception_message("Method 'del_list' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'del_list' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
def test_del_one(db_base):
with pytest.raises(DbException) as excinfo:
db_base.del_one(None, None, None)
- assert str(excinfo.value).startswith(exception_message("Method 'del_one' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'del_one' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
for db_base in self.db_bases:
for value, salt in TEST:
# no encryption
- encrypted = db_base.encrypt(value, schema_version='1.0', salt=salt)
- self.assertEqual(encrypted, value, "value '{}' has been encrypted".format(value))
- decrypted = db_base.decrypt(encrypted, schema_version='1.0', salt=salt)
- self.assertEqual(decrypted, value, "value '{}' has been decrypted".format(value))
+ encrypted = db_base.encrypt(value, schema_version="1.0", salt=salt)
+ self.assertEqual(
+ encrypted, value, "value '{}' has been encrypted".format(value)
+ )
+ decrypted = db_base.decrypt(encrypted, schema_version="1.0", salt=salt)
+ self.assertEqual(
+ decrypted, value, "value '{}' has been decrypted".format(value)
+ )
# encrypt/decrypt
- encrypted = db_base.encrypt(value, schema_version='1.1', salt=salt)
- self.assertNotEqual(encrypted, value, "value '{}' has not been encrypted".format(value))
+ encrypted = db_base.encrypt(value, schema_version="1.1", salt=salt)
+ self.assertNotEqual(
+ encrypted, value, "value '{}' has not been encrypted".format(value)
+ )
self.assertIsInstance(encrypted, str, "Encrypted is not ascii text")
- decrypted = db_base.decrypt(encrypted, schema_version='1.1', salt=salt)
- self.assertEqual(decrypted, value, "value is not equal after encryption/decryption")
+ decrypted = db_base.decrypt(encrypted, schema_version="1.1", salt=salt)
+ self.assertEqual(
+ decrypted, value, "value is not equal after encryption/decryption"
+ )
def test_encrypt_decrypt_salt(self):
value = "value to be encrypted!"
for db_base in self.db_bases:
for salt in (None, "salt 1", "1afd5d1a-4a7e-4d9c-8c65-251290183106"):
# encrypt/decrypt
- encrypted.append(db_base.encrypt(value, schema_version='1.1', salt=salt))
- self.assertNotEqual(encrypted[-1], value, "value '{}' has not been encrypted".format(value))
+ encrypted.append(
+ db_base.encrypt(value, schema_version="1.1", salt=salt)
+ )
+ self.assertNotEqual(
+ encrypted[-1],
+ value,
+ "value '{}' has not been encrypted".format(value),
+ )
self.assertIsInstance(encrypted[-1], str, "Encrypted is not ascii text")
- decrypted = db_base.decrypt(encrypted[-1], schema_version='1.1', salt=salt)
- self.assertEqual(decrypted, value, "value is not equal after encryption/decryption")
+ decrypted = db_base.decrypt(
+ encrypted[-1], schema_version="1.1", salt=salt
+ )
+ self.assertEqual(
+ decrypted, value, "value is not equal after encryption/decryption"
+ )
for i in range(0, len(encrypted)):
- for j in range(i+1, len(encrypted)):
- self.assertNotEqual(encrypted[i], encrypted[j],
- "encryption with different salt must contain different result")
+ for j in range(i + 1, len(encrypted)):
+ self.assertNotEqual(
+ encrypted[i],
+ encrypted[j],
+ "encryption with different salt must contain different result",
+ )
# decrypt with a different master key
try:
- decrypted = self.db_bases[-1].decrypt(encrypted[0], schema_version='1.1', salt=None)
- self.assertNotEqual(encrypted[0], decrypted, "Decryption with different KEY must generate different result")
+ decrypted = self.db_bases[-1].decrypt(
+ encrypted[0], schema_version="1.1", salt=None
+ )
+ self.assertNotEqual(
+ encrypted[0],
+ decrypted,
+ "Decryption with different KEY must generate different result",
+ )
except DbException as e:
- self.assertEqual(e.http_code, HTTPStatus.INTERNAL_SERVER_ERROR,
- "Decryption with different KEY does not provide expected http_code")
+ self.assertEqual(
+ e.http_code,
+ HTTPStatus.INTERNAL_SERVER_ERROR,
+ "Decryption with different KEY does not provide expected http_code",
+ )
class TestDeepUpdate(unittest.TestCase):
({"A": ["a", "b", "a"]}, {"A": {"$a": None}}, {"A": ["b"]}),
({"A": ["a", "b", "a"]}, {"A": {"$d": None}}, {"A": ["a", "b", "a"]}),
# delete and insert at 0
- ({"A": ["a", "b", "c"]}, {"A": {"$b": None, "$+[0]": "b"}}, {"A": ["b", "a", "c"]}),
+ (
+ {"A": ["a", "b", "c"]},
+ {"A": {"$b": None, "$+[0]": "b"}},
+ {"A": ["b", "a", "c"]},
+ ),
# delete and edit
- ({"A": ["a", "b", "a"]}, {"A": {"$a": None, "$[1]": {"c": "d"}}}, {"A": [{"c": "d"}]}),
+ (
+ {"A": ["a", "b", "a"]},
+ {"A": {"$a": None, "$[1]": {"c": "d"}}},
+ {"A": [{"c": "d"}]},
+ ),
# insert if not exist
({"A": ["a", "b", "c"]}, {"A": {"$+b": "b"}}, {"A": ["a", "b", "c"]}),
({"A": ["a", "b", "c"]}, {"A": {"$+d": "f"}}, {"A": ["a", "b", "c", "f"]}),
# edit by filter
- ({"A": ["a", "b", "a"]}, {"A": {"$b": {"c": "d"}}}, {"A": ["a", {"c": "d"}, "a"]}),
- ({"A": ["a", "b", "a"]}, {"A": {"$b": None, "$+[0]": "b", "$+": "c"}}, {"A": ["b", "a", "a", "c"]}),
+ (
+ {"A": ["a", "b", "a"]},
+ {"A": {"$b": {"c": "d"}}},
+ {"A": ["a", {"c": "d"}, "a"]},
+ ),
+ (
+ {"A": ["a", "b", "a"]},
+ {"A": {"$b": None, "$+[0]": "b", "$+": "c"}},
+ {"A": ["b", "a", "a", "c"]},
+ ),
({"A": ["a", "b", "a"]}, {"A": {"$c": None}}, {"A": ["a", "b", "a"]}),
# index deletion out of range
({"A": ["a", "b", "a"]}, {"A": {"$[5]": None}}, {"A": ["a", "b", "a"]}),
# nested array->dict
- ({"A": ["a", "b", {"id": "1", "c": {"d": 2}}]}, {"A": {"$id: '1'": {"h": None, "c": {"d": "e", "f": "g"}}}},
- {"A": ["a", "b", {"id": "1", "c": {"d": "e", "f": "g"}}]}),
- ({"A": [{"id": 1, "c": {"d": 2}}, {"id": 1, "c": {"f": []}}]},
- {"A": {"$id: 1": {"h": None, "c": {"d": "e", "f": "g"}}}},
- {"A": [{"id": 1, "c": {"d": "e", "f": "g"}}, {"id": 1, "c": {"d": "e", "f": "g"}}]}),
+ (
+ {"A": ["a", "b", {"id": "1", "c": {"d": 2}}]},
+ {"A": {"$id: '1'": {"h": None, "c": {"d": "e", "f": "g"}}}},
+ {"A": ["a", "b", {"id": "1", "c": {"d": "e", "f": "g"}}]},
+ ),
+ (
+ {"A": [{"id": 1, "c": {"d": 2}}, {"id": 1, "c": {"f": []}}]},
+ {"A": {"$id: 1": {"h": None, "c": {"d": "e", "f": "g"}}}},
+ {
+ "A": [
+ {"id": 1, "c": {"d": "e", "f": "g"}},
+ {"id": 1, "c": {"d": "e", "f": "g"}},
+ ]
+ },
+ ),
# nested array->array
- ({"A": ["a", "b", ["a", "b"]]}, {"A": {"$b": None, "$[2]": {"$b": {}, "$+": "c"}}},
- {"A": ["a", ["a", {}, "c"]]}),
+ (
+ {"A": ["a", "b", ["a", "b"]]},
+ {"A": {"$b": None, "$[2]": {"$b": {}, "$+": "c"}}},
+ {"A": ["a", ["a", {}, "c"]]},
+ ),
# types str and int different, so not found
- ({"A": ["a", {"id": "1", "c": "d"}]}, {"A": {"$id: 1": {"c": "e"}}}, {"A": ["a", {"id": "1", "c": "d"}]}),
-
+ (
+ {"A": ["a", {"id": "1", "c": "d"}]},
+ {"A": {"$id: 1": {"c": "e"}}},
+ {"A": ["a", {"id": "1", "c": "d"}]},
+ ),
)
for t in TEST:
print(t)
# index edition out of range
({"A": ["a", "b", "a"]}, {"A": {"$[5]": 6}}),
# conflict, two editions on index 2
- ({"A": ["a", {"id": "1", "c": "d"}]}, {"A": {"$id: '1'": {"c": "e"}, "$c: d": {"c": "f"}}}),
+ (
+ {"A": ["a", {"id": "1", "c": "d"}]},
+ {"A": {"$id: '1'": {"c": "e"}, "$c: d": {"c": "f"}}},
+ ),
)
for t in TEST:
print(t)
print(e)
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
from osm_common.dbmemory import DbMemory
from copy import deepcopy
-__author__ = 'Eduardo Sousa <eduardosousa@av.it.pt>'
+__author__ = "Eduardo Sousa <eduardosousa@av.it.pt>"
@pytest.fixture(scope="function", params=[True, False])
def db_memory_with_many_data(request):
db = DbMemory(lock=False)
- db.create_list("test", [
- {"_id": 1, "data": {"data2": {"data3": 1}}, "list": [{"a": 1}], "text": "sometext"},
- {"_id": 2, "data": {"data2": {"data3": 2}}, "list": [{"a": 2}], "list2": [1, 2, 3]},
- {"_id": 3, "data": {"data2": {"data3": 3}}, "list": [{"a": 3}]},
- {"_id": 4, "data": {"data2": {"data3": 4}}, "list": [{"a": 4}, {"a": 0}]},
- {"_id": 5, "data": {"data2": {"data3": 5}}, "list": [{"a": 5}]},
- {"_id": 6, "data": {"data2": {"data3": 6}}, "list": [{"0": {"a": 1}}]},
- {"_id": 7, "data": {"data2": {"data3": 7}}, "0": {"a": 0}},
- {"_id": 8, "list": [{"a": 3, "b": 0, "c": [{"a": 3, "b": 1}, {"a": 0, "b": "v"}]}, {"a": 0, "b": 1}]},
- ])
+ db.create_list(
+ "test",
+ [
+ {
+ "_id": 1,
+ "data": {"data2": {"data3": 1}},
+ "list": [{"a": 1}],
+ "text": "sometext",
+ },
+ {
+ "_id": 2,
+ "data": {"data2": {"data3": 2}},
+ "list": [{"a": 2}],
+ "list2": [1, 2, 3],
+ },
+ {"_id": 3, "data": {"data2": {"data3": 3}}, "list": [{"a": 3}]},
+ {"_id": 4, "data": {"data2": {"data3": 4}}, "list": [{"a": 4}, {"a": 0}]},
+ {"_id": 5, "data": {"data2": {"data3": 5}}, "list": [{"a": 5}]},
+ {"_id": 6, "data": {"data2": {"data3": 6}}, "list": [{"0": {"a": 1}}]},
+ {"_id": 7, "data": {"data2": {"data3": 7}}, "0": {"a": 0}},
+ {
+ "_id": 8,
+ "list": [
+ {"a": 3, "b": 0, "c": [{"a": 3, "b": 1}, {"a": 0, "b": "v"}]},
+ {"a": 0, "b": 1},
+ ],
+ },
+ ],
+ )
return db
def empty_exception_message():
- return 'database exception '
+ return "database exception "
def get_one_exception_message(db_filter):
def get_one_multiple_exception_message(db_filter):
- return "database exception Found more than one entry with filter='{}'".format(db_filter)
+ return "database exception Found more than one entry with filter='{}'".format(
+ db_filter
+ )
def del_one_exception_message(db_filter):
def test_constructor():
db = DbMemory()
- assert db.logger == logging.getLogger('db')
+ assert db.logger == logging.getLogger("db")
assert db.db == {}
def test_constructor_with_logger():
- logger_name = 'db_local'
+ logger_name = "db_local"
db = DbMemory(logger_name=logger_name)
assert db.logger == logging.getLogger(logger_name)
assert db.db == {}
def test_db_connect():
- logger_name = 'db_local'
- config = {'logger_name': logger_name}
+ logger_name = "db_local"
+ config = {"logger_name": logger_name}
db = DbMemory()
db.db_connect(config)
assert db.logger == logging.getLogger(logger_name)
db_memory.db_disconnect()
-@pytest.mark.parametrize("table, db_filter", [
- ("test", {}),
- ("test", {"_id": 1}),
- ("test", {"data": 1}),
- ("test", {"_id": 1, "data": 1})])
+@pytest.mark.parametrize(
+ "table, db_filter",
+ [
+ ("test", {}),
+ ("test", {"_id": 1}),
+ ("test", {"data": 1}),
+ ("test", {"_id": 1, "data": 1}),
+ ],
+)
def test_get_list_with_empty_db(db_memory, table, db_filter):
result = db_memory.get_list(table, db_filter)
assert len(result) == 0
-@pytest.mark.parametrize("table, db_filter, expected_data", [
- ("test", {}, [{"_id": 1, "data": 1}, {"_id": 2, "data": 2}, {"_id": 3, "data": 3}]),
- ("test", {"_id": 1}, [{"_id": 1, "data": 1}]),
- ("test", {"data": 1}, [{"_id": 1, "data": 1}]),
- ("test", {"_id": 1, "data": 1}, [{"_id": 1, "data": 1}]),
- ("test", {"_id": 2}, [{"_id": 2, "data": 2}]),
- ("test", {"data": 2}, [{"_id": 2, "data": 2}]),
- ("test", {"_id": 2, "data": 2}, [{"_id": 2, "data": 2}]),
- ("test", {"_id": 4}, []),
- ("test", {"data": 4}, []),
- ("test", {"_id": 4, "data": 4}, []),
- ("test_table", {}, []),
- ("test_table", {"_id": 1}, []),
- ("test_table", {"data": 1}, []),
- ("test_table", {"_id": 1, "data": 1}, [])])
-def test_get_list_with_non_empty_db(db_memory_with_data, table, db_filter, expected_data):
+@pytest.mark.parametrize(
+ "table, db_filter, expected_data",
+ [
+ (
+ "test",
+ {},
+ [{"_id": 1, "data": 1}, {"_id": 2, "data": 2}, {"_id": 3, "data": 3}],
+ ),
+ ("test", {"_id": 1}, [{"_id": 1, "data": 1}]),
+ ("test", {"data": 1}, [{"_id": 1, "data": 1}]),
+ ("test", {"_id": 1, "data": 1}, [{"_id": 1, "data": 1}]),
+ ("test", {"_id": 2}, [{"_id": 2, "data": 2}]),
+ ("test", {"data": 2}, [{"_id": 2, "data": 2}]),
+ ("test", {"_id": 2, "data": 2}, [{"_id": 2, "data": 2}]),
+ ("test", {"_id": 4}, []),
+ ("test", {"data": 4}, []),
+ ("test", {"_id": 4, "data": 4}, []),
+ ("test_table", {}, []),
+ ("test_table", {"_id": 1}, []),
+ ("test_table", {"data": 1}, []),
+ ("test_table", {"_id": 1, "data": 1}, []),
+ ],
+)
+def test_get_list_with_non_empty_db(
+ db_memory_with_data, table, db_filter, expected_data
+):
result = db_memory_with_data.get_list(table, db_filter)
assert len(result) == len(expected_data)
for data in expected_data:
def test_get_list_exception(db_memory_with_data):
- table = 'test'
+ table = "test"
db_filter = {}
db_memory_with_data._find = MagicMock(side_effect=Exception())
with pytest.raises(DbException) as excinfo:
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("table, db_filter, expected_data", [
- ("test", {"_id": 1}, {"_id": 1, "data": 1}),
- ("test", {"_id": 2}, {"_id": 2, "data": 2}),
- ("test", {"_id": 3}, {"_id": 3, "data": 3}),
- ("test", {"data": 1}, {"_id": 1, "data": 1}),
- ("test", {"data": 2}, {"_id": 2, "data": 2}),
- ("test", {"data": 3}, {"_id": 3, "data": 3}),
- ("test", {"_id": 1, "data": 1}, {"_id": 1, "data": 1}),
- ("test", {"_id": 2, "data": 2}, {"_id": 2, "data": 2}),
- ("test", {"_id": 3, "data": 3}, {"_id": 3, "data": 3})])
+@pytest.mark.parametrize(
+ "table, db_filter, expected_data",
+ [
+ ("test", {"_id": 1}, {"_id": 1, "data": 1}),
+ ("test", {"_id": 2}, {"_id": 2, "data": 2}),
+ ("test", {"_id": 3}, {"_id": 3, "data": 3}),
+ ("test", {"data": 1}, {"_id": 1, "data": 1}),
+ ("test", {"data": 2}, {"_id": 2, "data": 2}),
+ ("test", {"data": 3}, {"_id": 3, "data": 3}),
+ ("test", {"_id": 1, "data": 1}, {"_id": 1, "data": 1}),
+ ("test", {"_id": 2, "data": 2}, {"_id": 2, "data": 2}),
+ ("test", {"_id": 3, "data": 3}, {"_id": 3, "data": 3}),
+ ],
+)
def test_get_one(db_memory_with_data, table, db_filter, expected_data):
result = db_memory_with_data.get_one(table, db_filter)
assert result == expected_data
assert result in db_memory_with_data.db[table]
-@pytest.mark.parametrize("db_filter, expected_ids", [
- ({}, [1, 2, 3, 4, 5, 6, 7, 8]),
- ({"_id": 1}, [1]),
- ({"data.data2.data3": 2}, [2]),
- ({"data.data2.data3.eq": 2}, [2]),
- ({"data.data2.data3": [2]}, [2]),
- ({"data.data2.data3.cont": [2]}, [2]),
- ({"data.data2.data3.neq": 2}, [1, 3, 4, 5, 6, 7, 8]),
- ({"data.data2.data3.neq": [2]}, [1, 3, 4, 5, 6, 7, 8]),
- ({"data.data2.data3.ncont": [2]}, [1, 3, 4, 5, 6, 7, 8]),
- ({"data.data2.data3": [2, 3]}, [2, 3]),
- ({"data.data2.data3.gt": 4}, [5, 6, 7]),
- ({"data.data2.data3.gte": 4}, [4, 5, 6, 7]),
- ({"data.data2.data3.lt": 4}, [1, 2, 3]),
- ({"data.data2.data3.lte": 4}, [1, 2, 3, 4]),
- ({"data.data2.data3.lte": 4.5}, [1, 2, 3, 4]),
- ({"data.data2.data3.gt": "text"}, []),
- ({"nonexist.nonexist": "4"}, []),
- ({"nonexist.nonexist": None}, [1, 2, 3, 4, 5, 6, 7, 8]),
- ({"nonexist.nonexist.neq": "4"}, [1, 2, 3, 4, 5, 6, 7, 8]),
- ({"nonexist.nonexist.neq": None}, []),
- ({"text.eq": "sometext"}, [1]),
- ({"text.neq": "sometext"}, [2, 3, 4, 5, 6, 7, 8]),
- ({"text.eq": "somet"}, []),
- ({"text.gte": "a"}, [1]),
- ({"text.gte": "somet"}, [1]),
- ({"text.gte": "sometext"}, [1]),
- ({"text.lt": "somet"}, []),
- ({"data.data2.data3": 2, "data.data2.data4": None}, [2]),
- ({"data.data2.data3": 2, "data.data2.data4": 5}, []),
- ({"data.data2.data3": 4}, [4]),
- ({"data.data2.data3": [3, 4, "e"]}, [3, 4]),
- ({"data.data2.data3": None}, [8]),
- ({"data.data2": "4"}, []),
- ({"list.0.a": 1}, [1, 6]),
- ({"list2": 1}, [2]),
- ({"list2": [1, 5]}, [2]),
- ({"list2": [1, 2]}, [2]),
- ({"list2": [5, 7]}, []),
- ({"list.ANYINDEX.a": 1}, [1]),
- ({"list.a": 3, "list.b": 1}, [8]),
- ({"list.ANYINDEX.a": 3, "list.ANYINDEX.b": 1}, []),
- ({"list.ANYINDEX.a": 3, "list.ANYINDEX.c.a": 3}, [8]),
- ({"list.ANYINDEX.a": 3, "list.ANYINDEX.b": 0}, [8]),
- ({"list.ANYINDEX.a": 3, "list.ANYINDEX.c.ANYINDEX.a": 0, "list.ANYINDEX.c.ANYINDEX.b": "v"}, [8]),
- ({"list.ANYINDEX.a": 3, "list.ANYINDEX.c.ANYINDEX.a": 0, "list.ANYINDEX.c.ANYINDEX.b": 1}, []),
- ({"list.c.b": 1}, [8]),
- ({"list.c.b": None}, [1, 2, 3, 4, 5, 6, 7]),
- # ({"data.data2.data3": 4}, []),
- # ({"data.data2.data3": 4}, []),
-])
+@pytest.mark.parametrize(
+ "db_filter, expected_ids",
+ [
+ ({}, [1, 2, 3, 4, 5, 6, 7, 8]),
+ ({"_id": 1}, [1]),
+ ({"data.data2.data3": 2}, [2]),
+ ({"data.data2.data3.eq": 2}, [2]),
+ ({"data.data2.data3": [2]}, [2]),
+ ({"data.data2.data3.cont": [2]}, [2]),
+ ({"data.data2.data3.neq": 2}, [1, 3, 4, 5, 6, 7, 8]),
+ ({"data.data2.data3.neq": [2]}, [1, 3, 4, 5, 6, 7, 8]),
+ ({"data.data2.data3.ncont": [2]}, [1, 3, 4, 5, 6, 7, 8]),
+ ({"data.data2.data3": [2, 3]}, [2, 3]),
+ ({"data.data2.data3.gt": 4}, [5, 6, 7]),
+ ({"data.data2.data3.gte": 4}, [4, 5, 6, 7]),
+ ({"data.data2.data3.lt": 4}, [1, 2, 3]),
+ ({"data.data2.data3.lte": 4}, [1, 2, 3, 4]),
+ ({"data.data2.data3.lte": 4.5}, [1, 2, 3, 4]),
+ ({"data.data2.data3.gt": "text"}, []),
+ ({"nonexist.nonexist": "4"}, []),
+ ({"nonexist.nonexist": None}, [1, 2, 3, 4, 5, 6, 7, 8]),
+ ({"nonexist.nonexist.neq": "4"}, [1, 2, 3, 4, 5, 6, 7, 8]),
+ ({"nonexist.nonexist.neq": None}, []),
+ ({"text.eq": "sometext"}, [1]),
+ ({"text.neq": "sometext"}, [2, 3, 4, 5, 6, 7, 8]),
+ ({"text.eq": "somet"}, []),
+ ({"text.gte": "a"}, [1]),
+ ({"text.gte": "somet"}, [1]),
+ ({"text.gte": "sometext"}, [1]),
+ ({"text.lt": "somet"}, []),
+ ({"data.data2.data3": 2, "data.data2.data4": None}, [2]),
+ ({"data.data2.data3": 2, "data.data2.data4": 5}, []),
+ ({"data.data2.data3": 4}, [4]),
+ ({"data.data2.data3": [3, 4, "e"]}, [3, 4]),
+ ({"data.data2.data3": None}, [8]),
+ ({"data.data2": "4"}, []),
+ ({"list.0.a": 1}, [1, 6]),
+ ({"list2": 1}, [2]),
+ ({"list2": [1, 5]}, [2]),
+ ({"list2": [1, 2]}, [2]),
+ ({"list2": [5, 7]}, []),
+ ({"list.ANYINDEX.a": 1}, [1]),
+ ({"list.a": 3, "list.b": 1}, [8]),
+ ({"list.ANYINDEX.a": 3, "list.ANYINDEX.b": 1}, []),
+ ({"list.ANYINDEX.a": 3, "list.ANYINDEX.c.a": 3}, [8]),
+ ({"list.ANYINDEX.a": 3, "list.ANYINDEX.b": 0}, [8]),
+ (
+ {
+ "list.ANYINDEX.a": 3,
+ "list.ANYINDEX.c.ANYINDEX.a": 0,
+ "list.ANYINDEX.c.ANYINDEX.b": "v",
+ },
+ [8],
+ ),
+ (
+ {
+ "list.ANYINDEX.a": 3,
+ "list.ANYINDEX.c.ANYINDEX.a": 0,
+ "list.ANYINDEX.c.ANYINDEX.b": 1,
+ },
+ [],
+ ),
+ ({"list.c.b": 1}, [8]),
+ ({"list.c.b": None}, [1, 2, 3, 4, 5, 6, 7]),
+ # ({"data.data2.data3": 4}, []),
+ # ({"data.data2.data3": 4}, []),
+ ],
+)
def test_get_list(db_memory_with_many_data, db_filter, expected_ids):
result = db_memory_with_many_data.get_list("test", db_filter)
assert isinstance(result, list)
result_ids = [item["_id"] for item in result]
- assert len(result) == len(expected_ids), "for db_filter={} result={} expected_ids={}".format(db_filter, result,
- result_ids)
+ assert len(result) == len(
+ expected_ids
+ ), "for db_filter={} result={} expected_ids={}".format(
+ db_filter, result, result_ids
+ )
assert result_ids == expected_ids
for i in range(len(result)):
assert result[i] in db_memory_with_many_data.db["test"]
assert result == len(expected_ids)
-@pytest.mark.parametrize("table, db_filter, expected_data", [
- ("test", {}, {"_id": 1, "data": 1})])
-def test_get_one_with_multiple_results(db_memory_with_data, table, db_filter, expected_data):
+@pytest.mark.parametrize(
+ "table, db_filter, expected_data", [("test", {}, {"_id": 1, "data": 1})]
+)
+def test_get_one_with_multiple_results(
+ db_memory_with_data, table, db_filter, expected_data
+):
result = db_memory_with_data.get_one(table, db_filter, fail_on_more=False)
assert result == expected_data
assert len(db_memory_with_data.db) == 1
db_filter = {}
with pytest.raises(DbException) as excinfo:
db_memory_with_data.get_one(table, db_filter)
- assert str(excinfo.value) == (empty_exception_message() + get_one_multiple_exception_message(db_filter))
+ assert str(excinfo.value) == (
+ empty_exception_message() + get_one_multiple_exception_message(db_filter)
+ )
# assert excinfo.value.http_code == http.HTTPStatus.CONFLICT
-@pytest.mark.parametrize("table, db_filter", [
- ("test", {"_id": 4}),
- ("test", {"data": 4}),
- ("test", {"_id": 4, "data": 4}),
- ("test_table", {"_id": 4}),
- ("test_table", {"data": 4}),
- ("test_table", {"_id": 4, "data": 4})])
+@pytest.mark.parametrize(
+ "table, db_filter",
+ [
+ ("test", {"_id": 4}),
+ ("test", {"data": 4}),
+ ("test", {"_id": 4, "data": 4}),
+ ("test_table", {"_id": 4}),
+ ("test_table", {"data": 4}),
+ ("test_table", {"_id": 4, "data": 4}),
+ ],
+)
def test_get_one_with_non_empty_db_exception(db_memory_with_data, table, db_filter):
with pytest.raises(DbException) as excinfo:
db_memory_with_data.get_one(table, db_filter)
- assert str(excinfo.value) == (empty_exception_message() + get_one_exception_message(db_filter))
+ assert str(excinfo.value) == (
+ empty_exception_message() + get_one_exception_message(db_filter)
+ )
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("table, db_filter", [
- ("test", {"_id": 4}),
- ("test", {"data": 4}),
- ("test", {"_id": 4, "data": 4}),
- ("test_table", {"_id": 4}),
- ("test_table", {"data": 4}),
- ("test_table", {"_id": 4, "data": 4})])
+@pytest.mark.parametrize(
+ "table, db_filter",
+ [
+ ("test", {"_id": 4}),
+ ("test", {"data": 4}),
+ ("test", {"_id": 4, "data": 4}),
+ ("test_table", {"_id": 4}),
+ ("test_table", {"data": 4}),
+ ("test_table", {"_id": 4, "data": 4}),
+ ],
+)
def test_get_one_with_non_empty_db_none(db_memory_with_data, table, db_filter):
result = db_memory_with_data.get_one(table, db_filter, fail_on_empty=False)
assert result is None
-@pytest.mark.parametrize("table, db_filter", [
- ("test", {"_id": 4}),
- ("test", {"data": 4}),
- ("test", {"_id": 4, "data": 4}),
- ("test_table", {"_id": 4}),
- ("test_table", {"data": 4}),
- ("test_table", {"_id": 4, "data": 4})])
+@pytest.mark.parametrize(
+ "table, db_filter",
+ [
+ ("test", {"_id": 4}),
+ ("test", {"data": 4}),
+ ("test", {"_id": 4, "data": 4}),
+ ("test_table", {"_id": 4}),
+ ("test_table", {"data": 4}),
+ ("test_table", {"_id": 4, "data": 4}),
+ ],
+)
def test_get_one_with_empty_db_exception(db_memory, table, db_filter):
with pytest.raises(DbException) as excinfo:
db_memory.get_one(table, db_filter)
- assert str(excinfo.value) == (empty_exception_message() + get_one_exception_message(db_filter))
+ assert str(excinfo.value) == (
+ empty_exception_message() + get_one_exception_message(db_filter)
+ )
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("table, db_filter", [
- ("test", {"_id": 4}),
- ("test", {"data": 4}),
- ("test", {"_id": 4, "data": 4}),
- ("test_table", {"_id": 4}),
- ("test_table", {"data": 4}),
- ("test_table", {"_id": 4, "data": 4})])
+@pytest.mark.parametrize(
+ "table, db_filter",
+ [
+ ("test", {"_id": 4}),
+ ("test", {"data": 4}),
+ ("test", {"_id": 4, "data": 4}),
+ ("test_table", {"_id": 4}),
+ ("test_table", {"data": 4}),
+ ("test_table", {"_id": 4, "data": 4}),
+ ],
+)
def test_get_one_with_empty_db_none(db_memory, table, db_filter):
result = db_memory.get_one(table, db_filter, fail_on_empty=False)
assert result is None
def test_get_one_generic_exception(db_memory_with_data):
- table = 'test'
+ table = "test"
db_filter = {}
db_memory_with_data._find = MagicMock(side_effect=Exception())
with pytest.raises(DbException) as excinfo:
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("table, db_filter, expected_data", [
- ("test", {}, []),
- ("test", {"_id": 1}, [{"_id": 2, "data": 2}, {"_id": 3, "data": 3}]),
- ("test", {"_id": 2}, [{"_id": 1, "data": 1}, {"_id": 3, "data": 3}]),
- ("test", {"_id": 1, "data": 1}, [{"_id": 2, "data": 2}, {"_id": 3, "data": 3}]),
- ("test", {"_id": 2, "data": 2}, [{"_id": 1, "data": 1}, {"_id": 3, "data": 3}])])
-def test_del_list_with_non_empty_db(db_memory_with_data, table, db_filter, expected_data):
+@pytest.mark.parametrize(
+ "table, db_filter, expected_data",
+ [
+ ("test", {}, []),
+ ("test", {"_id": 1}, [{"_id": 2, "data": 2}, {"_id": 3, "data": 3}]),
+ ("test", {"_id": 2}, [{"_id": 1, "data": 1}, {"_id": 3, "data": 3}]),
+ ("test", {"_id": 1, "data": 1}, [{"_id": 2, "data": 2}, {"_id": 3, "data": 3}]),
+ ("test", {"_id": 2, "data": 2}, [{"_id": 1, "data": 1}, {"_id": 3, "data": 3}]),
+ ],
+)
+def test_del_list_with_non_empty_db(
+ db_memory_with_data, table, db_filter, expected_data
+):
result = db_memory_with_data.del_list(table, db_filter)
assert result["deleted"] == (3 - len(expected_data))
assert len(db_memory_with_data.db) == 1
assert data in db_memory_with_data.db[table]
-@pytest.mark.parametrize("table, db_filter", [
- ("test", {}),
- ("test", {"_id": 1}),
- ("test", {"_id": 2}),
- ("test", {"data": 1}),
- ("test", {"data": 2}),
- ("test", {"_id": 1, "data": 1}),
- ("test", {"_id": 2, "data": 2})])
+@pytest.mark.parametrize(
+ "table, db_filter",
+ [
+ ("test", {}),
+ ("test", {"_id": 1}),
+ ("test", {"_id": 2}),
+ ("test", {"data": 1}),
+ ("test", {"data": 2}),
+ ("test", {"_id": 1, "data": 1}),
+ ("test", {"_id": 2, "data": 2}),
+ ],
+)
def test_del_list_with_empty_db(db_memory, table, db_filter):
result = db_memory.del_list(table, db_filter)
- assert result['deleted'] == 0
+ assert result["deleted"] == 0
def test_del_list_generic_exception(db_memory_with_data):
- table = 'test'
+ table = "test"
db_filter = {}
db_memory_with_data._find = MagicMock(side_effect=Exception())
with pytest.raises(DbException) as excinfo:
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("table, db_filter, data", [
- ("test", {}, {"_id": 1, "data": 1}),
- ("test", {"_id": 1}, {"_id": 1, "data": 1}),
- ("test", {"data": 1}, {"_id": 1, "data": 1}),
- ("test", {"_id": 1, "data": 1}, {"_id": 1, "data": 1}),
- ("test", {"_id": 2}, {"_id": 2, "data": 2}),
- ("test", {"data": 2}, {"_id": 2, "data": 2}),
- ("test", {"_id": 2, "data": 2}, {"_id": 2, "data": 2})])
+@pytest.mark.parametrize(
+ "table, db_filter, data",
+ [
+ ("test", {}, {"_id": 1, "data": 1}),
+ ("test", {"_id": 1}, {"_id": 1, "data": 1}),
+ ("test", {"data": 1}, {"_id": 1, "data": 1}),
+ ("test", {"_id": 1, "data": 1}, {"_id": 1, "data": 1}),
+ ("test", {"_id": 2}, {"_id": 2, "data": 2}),
+ ("test", {"data": 2}, {"_id": 2, "data": 2}),
+ ("test", {"_id": 2, "data": 2}, {"_id": 2, "data": 2}),
+ ],
+)
def test_del_one(db_memory_with_data, table, db_filter, data):
result = db_memory_with_data.del_one(table, db_filter)
assert result == {"deleted": 1}
assert data not in db_memory_with_data.db[table]
-@pytest.mark.parametrize("table, db_filter", [
- ("test", {}),
- ("test", {"_id": 1}),
- ("test", {"_id": 2}),
- ("test", {"data": 1}),
- ("test", {"data": 2}),
- ("test", {"_id": 1, "data": 1}),
- ("test", {"_id": 2, "data": 2}),
- ("test_table", {}),
- ("test_table", {"_id": 1}),
- ("test_table", {"_id": 2}),
- ("test_table", {"data": 1}),
- ("test_table", {"data": 2}),
- ("test_table", {"_id": 1, "data": 1}),
- ("test_table", {"_id": 2, "data": 2})])
+@pytest.mark.parametrize(
+ "table, db_filter",
+ [
+ ("test", {}),
+ ("test", {"_id": 1}),
+ ("test", {"_id": 2}),
+ ("test", {"data": 1}),
+ ("test", {"data": 2}),
+ ("test", {"_id": 1, "data": 1}),
+ ("test", {"_id": 2, "data": 2}),
+ ("test_table", {}),
+ ("test_table", {"_id": 1}),
+ ("test_table", {"_id": 2}),
+ ("test_table", {"data": 1}),
+ ("test_table", {"data": 2}),
+ ("test_table", {"_id": 1, "data": 1}),
+ ("test_table", {"_id": 2, "data": 2}),
+ ],
+)
def test_del_one_with_empty_db_exception(db_memory, table, db_filter):
with pytest.raises(DbException) as excinfo:
db_memory.del_one(table, db_filter)
- assert str(excinfo.value) == (empty_exception_message() + del_one_exception_message(db_filter))
+ assert str(excinfo.value) == (
+ empty_exception_message() + del_one_exception_message(db_filter)
+ )
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("table, db_filter", [
- ("test", {}),
- ("test", {"_id": 1}),
- ("test", {"_id": 2}),
- ("test", {"data": 1}),
- ("test", {"data": 2}),
- ("test", {"_id": 1, "data": 1}),
- ("test", {"_id": 2, "data": 2}),
- ("test_table", {}),
- ("test_table", {"_id": 1}),
- ("test_table", {"_id": 2}),
- ("test_table", {"data": 1}),
- ("test_table", {"data": 2}),
- ("test_table", {"_id": 1, "data": 1}),
- ("test_table", {"_id": 2, "data": 2})])
+@pytest.mark.parametrize(
+ "table, db_filter",
+ [
+ ("test", {}),
+ ("test", {"_id": 1}),
+ ("test", {"_id": 2}),
+ ("test", {"data": 1}),
+ ("test", {"data": 2}),
+ ("test", {"_id": 1, "data": 1}),
+ ("test", {"_id": 2, "data": 2}),
+ ("test_table", {}),
+ ("test_table", {"_id": 1}),
+ ("test_table", {"_id": 2}),
+ ("test_table", {"data": 1}),
+ ("test_table", {"data": 2}),
+ ("test_table", {"_id": 1, "data": 1}),
+ ("test_table", {"_id": 2, "data": 2}),
+ ],
+)
def test_del_one_with_empty_db_none(db_memory, table, db_filter):
result = db_memory.del_one(table, db_filter, fail_on_empty=False)
assert result is None
-@pytest.mark.parametrize("table, db_filter", [
- ("test", {"_id": 4}),
- ("test", {"_id": 5}),
- ("test", {"data": 4}),
- ("test", {"data": 5}),
- ("test", {"_id": 1, "data": 2}),
- ("test", {"_id": 2, "data": 3}),
- ("test_table", {}),
- ("test_table", {"_id": 1}),
- ("test_table", {"_id": 2}),
- ("test_table", {"data": 1}),
- ("test_table", {"data": 2}),
- ("test_table", {"_id": 1, "data": 1}),
- ("test_table", {"_id": 2, "data": 2})])
+@pytest.mark.parametrize(
+ "table, db_filter",
+ [
+ ("test", {"_id": 4}),
+ ("test", {"_id": 5}),
+ ("test", {"data": 4}),
+ ("test", {"data": 5}),
+ ("test", {"_id": 1, "data": 2}),
+ ("test", {"_id": 2, "data": 3}),
+ ("test_table", {}),
+ ("test_table", {"_id": 1}),
+ ("test_table", {"_id": 2}),
+ ("test_table", {"data": 1}),
+ ("test_table", {"data": 2}),
+ ("test_table", {"_id": 1, "data": 1}),
+ ("test_table", {"_id": 2, "data": 2}),
+ ],
+)
def test_del_one_with_non_empty_db_exception(db_memory_with_data, table, db_filter):
with pytest.raises(DbException) as excinfo:
db_memory_with_data.del_one(table, db_filter)
- assert str(excinfo.value) == (empty_exception_message() + del_one_exception_message(db_filter))
+ assert str(excinfo.value) == (
+ empty_exception_message() + del_one_exception_message(db_filter)
+ )
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("table, db_filter", [
- ("test", {"_id": 4}),
- ("test", {"_id": 5}),
- ("test", {"data": 4}),
- ("test", {"data": 5}),
- ("test", {"_id": 1, "data": 2}),
- ("test", {"_id": 2, "data": 3}),
- ("test_table", {}),
- ("test_table", {"_id": 1}),
- ("test_table", {"_id": 2}),
- ("test_table", {"data": 1}),
- ("test_table", {"data": 2}),
- ("test_table", {"_id": 1, "data": 1}),
- ("test_table", {"_id": 2, "data": 2})])
+@pytest.mark.parametrize(
+ "table, db_filter",
+ [
+ ("test", {"_id": 4}),
+ ("test", {"_id": 5}),
+ ("test", {"data": 4}),
+ ("test", {"data": 5}),
+ ("test", {"_id": 1, "data": 2}),
+ ("test", {"_id": 2, "data": 3}),
+ ("test_table", {}),
+ ("test_table", {"_id": 1}),
+ ("test_table", {"_id": 2}),
+ ("test_table", {"data": 1}),
+ ("test_table", {"data": 2}),
+ ("test_table", {"_id": 1, "data": 1}),
+ ("test_table", {"_id": 2, "data": 2}),
+ ],
+)
def test_del_one_with_non_empty_db_none(db_memory_with_data, table, db_filter):
result = db_memory_with_data.del_one(table, db_filter, fail_on_empty=False)
assert result is None
-@pytest.mark.parametrize("fail_on_empty", [
- (True),
- (False)])
+@pytest.mark.parametrize("fail_on_empty", [(True), (False)])
def test_del_one_generic_exception(db_memory_with_data, fail_on_empty):
- table = 'test'
+ table = "test"
db_filter = {}
db_memory_with_data._find = MagicMock(side_effect=Exception())
with pytest.raises(DbException) as excinfo:
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("table, _id, indata", [
- ("test", 1, {"_id": 1, "data": 42}),
- ("test", 1, {"_id": 1, "data": 42, "kk": 34}),
- ("test", 1, {"_id": 1}),
- ("test", 2, {"_id": 2, "data": 42}),
- ("test", 2, {"_id": 2, "data": 42, "kk": 34}),
- ("test", 2, {"_id": 2}),
- ("test", 3, {"_id": 3, "data": 42}),
- ("test", 3, {"_id": 3, "data": 42, "kk": 34}),
- ("test", 3, {"_id": 3})])
+@pytest.mark.parametrize(
+ "table, _id, indata",
+ [
+ ("test", 1, {"_id": 1, "data": 42}),
+ ("test", 1, {"_id": 1, "data": 42, "kk": 34}),
+ ("test", 1, {"_id": 1}),
+ ("test", 2, {"_id": 2, "data": 42}),
+ ("test", 2, {"_id": 2, "data": 42, "kk": 34}),
+ ("test", 2, {"_id": 2}),
+ ("test", 3, {"_id": 3, "data": 42}),
+ ("test", 3, {"_id": 3, "data": 42, "kk": 34}),
+ ("test", 3, {"_id": 3}),
+ ],
+)
def test_replace(db_memory_with_data, table, _id, indata):
result = db_memory_with_data.replace(table, _id, indata)
assert result == {"updated": 1}
assert indata in db_memory_with_data.db[table]
-@pytest.mark.parametrize("table, _id, indata", [
- ("test", 1, {"_id": 1, "data": 42}),
- ("test", 2, {"_id": 2}),
- ("test", 3, {"_id": 3})])
+@pytest.mark.parametrize(
+ "table, _id, indata",
+ [
+ ("test", 1, {"_id": 1, "data": 42}),
+ ("test", 2, {"_id": 2}),
+ ("test", 3, {"_id": 3}),
+ ],
+)
def test_replace_without_data_exception(db_memory, table, _id, indata):
with pytest.raises(DbException) as excinfo:
db_memory.replace(table, _id, indata, fail_on_empty=True)
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("table, _id, indata", [
- ("test", 1, {"_id": 1, "data": 42}),
- ("test", 2, {"_id": 2}),
- ("test", 3, {"_id": 3})])
+@pytest.mark.parametrize(
+ "table, _id, indata",
+ [
+ ("test", 1, {"_id": 1, "data": 42}),
+ ("test", 2, {"_id": 2}),
+ ("test", 3, {"_id": 3}),
+ ],
+)
def test_replace_without_data_none(db_memory, table, _id, indata):
result = db_memory.replace(table, _id, indata, fail_on_empty=False)
assert result is None
-@pytest.mark.parametrize("table, _id, indata", [
- ("test", 11, {"_id": 11, "data": 42}),
- ("test", 12, {"_id": 12}),
- ("test", 33, {"_id": 33})])
+@pytest.mark.parametrize(
+ "table, _id, indata",
+ [
+ ("test", 11, {"_id": 11, "data": 42}),
+ ("test", 12, {"_id": 12}),
+ ("test", 33, {"_id": 33}),
+ ],
+)
def test_replace_with_data_exception(db_memory_with_data, table, _id, indata):
with pytest.raises(DbException) as excinfo:
db_memory_with_data.replace(table, _id, indata, fail_on_empty=True)
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("table, _id, indata", [
- ("test", 11, {"_id": 11, "data": 42}),
- ("test", 12, {"_id": 12}),
- ("test", 33, {"_id": 33})])
+@pytest.mark.parametrize(
+ "table, _id, indata",
+ [
+ ("test", 11, {"_id": 11, "data": 42}),
+ ("test", 12, {"_id": 12}),
+ ("test", 33, {"_id": 33}),
+ ],
+)
def test_replace_with_data_none(db_memory_with_data, table, _id, indata):
result = db_memory_with_data.replace(table, _id, indata, fail_on_empty=False)
assert result is None
-@pytest.mark.parametrize("fail_on_empty", [
- True,
- False])
+@pytest.mark.parametrize("fail_on_empty", [True, False])
def test_replace_generic_exception(db_memory_with_data, fail_on_empty):
- table = 'test'
+ table = "test"
_id = {}
- indata = {'_id': 1, 'data': 1}
+ indata = {"_id": 1, "data": 1}
db_memory_with_data._find = MagicMock(side_effect=Exception())
with pytest.raises(DbException) as excinfo:
db_memory_with_data.replace(table, _id, indata, fail_on_empty=fail_on_empty)
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("table, id, data", [
- ("test", "1", {"data": 1}),
- ("test", "1", {"data": 2}),
- ("test", "2", {"data": 1}),
- ("test", "2", {"data": 2}),
- ("test_table", "1", {"data": 1}),
- ("test_table", "1", {"data": 2}),
- ("test_table", "2", {"data": 1}),
- ("test_table", "2", {"data": 2}),
- ("test", "1", {"data_1": 1, "data_2": 2}),
- ("test", "1", {"data_1": 2, "data_2": 1}),
- ("test", "2", {"data_1": 1, "data_2": 2}),
- ("test", "2", {"data_1": 2, "data_2": 1}),
- ("test_table", "1", {"data_1": 1, "data_2": 2}),
- ("test_table", "1", {"data_1": 2, "data_2": 1}),
- ("test_table", "2", {"data_1": 1, "data_2": 2}),
- ("test_table", "2", {"data_1": 2, "data_2": 1})])
+@pytest.mark.parametrize(
+ "table, id, data",
+ [
+ ("test", "1", {"data": 1}),
+ ("test", "1", {"data": 2}),
+ ("test", "2", {"data": 1}),
+ ("test", "2", {"data": 2}),
+ ("test_table", "1", {"data": 1}),
+ ("test_table", "1", {"data": 2}),
+ ("test_table", "2", {"data": 1}),
+ ("test_table", "2", {"data": 2}),
+ ("test", "1", {"data_1": 1, "data_2": 2}),
+ ("test", "1", {"data_1": 2, "data_2": 1}),
+ ("test", "2", {"data_1": 1, "data_2": 2}),
+ ("test", "2", {"data_1": 2, "data_2": 1}),
+ ("test_table", "1", {"data_1": 1, "data_2": 2}),
+ ("test_table", "1", {"data_1": 2, "data_2": 1}),
+ ("test_table", "2", {"data_1": 1, "data_2": 2}),
+ ("test_table", "2", {"data_1": 2, "data_2": 1}),
+ ],
+)
def test_create_with_empty_db_with_id(db_memory, table, id, data):
data_to_insert = data
- data_to_insert['_id'] = id
+ data_to_insert["_id"] = id
returned_id = db_memory.create(table, data_to_insert)
assert returned_id == id
assert len(db_memory.db) == 1
assert data_to_insert in db_memory.db[table]
-@pytest.mark.parametrize("table, id, data", [
- ("test", "4", {"data": 1}),
- ("test", "5", {"data": 2}),
- ("test", "4", {"data": 1}),
- ("test", "5", {"data": 2}),
- ("test_table", "4", {"data": 1}),
- ("test_table", "5", {"data": 2}),
- ("test_table", "4", {"data": 1}),
- ("test_table", "5", {"data": 2}),
- ("test", "4", {"data_1": 1, "data_2": 2}),
- ("test", "5", {"data_1": 2, "data_2": 1}),
- ("test", "4", {"data_1": 1, "data_2": 2}),
- ("test", "5", {"data_1": 2, "data_2": 1}),
- ("test_table", "4", {"data_1": 1, "data_2": 2}),
- ("test_table", "5", {"data_1": 2, "data_2": 1}),
- ("test_table", "4", {"data_1": 1, "data_2": 2}),
- ("test_table", "5", {"data_1": 2, "data_2": 1})])
+@pytest.mark.parametrize(
+ "table, id, data",
+ [
+ ("test", "4", {"data": 1}),
+ ("test", "5", {"data": 2}),
+ ("test", "4", {"data": 1}),
+ ("test", "5", {"data": 2}),
+ ("test_table", "4", {"data": 1}),
+ ("test_table", "5", {"data": 2}),
+ ("test_table", "4", {"data": 1}),
+ ("test_table", "5", {"data": 2}),
+ ("test", "4", {"data_1": 1, "data_2": 2}),
+ ("test", "5", {"data_1": 2, "data_2": 1}),
+ ("test", "4", {"data_1": 1, "data_2": 2}),
+ ("test", "5", {"data_1": 2, "data_2": 1}),
+ ("test_table", "4", {"data_1": 1, "data_2": 2}),
+ ("test_table", "5", {"data_1": 2, "data_2": 1}),
+ ("test_table", "4", {"data_1": 1, "data_2": 2}),
+ ("test_table", "5", {"data_1": 2, "data_2": 1}),
+ ],
+)
def test_create_with_non_empty_db_with_id(db_memory_with_data, table, id, data):
data_to_insert = data
- data_to_insert['_id'] = id
+ data_to_insert["_id"] = id
returned_id = db_memory_with_data.create(table, data_to_insert)
assert returned_id == id
- assert len(db_memory_with_data.db) == (1 if table == 'test' else 2)
+ assert len(db_memory_with_data.db) == (1 if table == "test" else 2)
assert table in db_memory_with_data.db
- assert len(db_memory_with_data.db[table]) == (4 if table == 'test' else 1)
+ assert len(db_memory_with_data.db[table]) == (4 if table == "test" else 1)
assert data_to_insert in db_memory_with_data.db[table]
-@pytest.mark.parametrize("table, data", [
- ("test", {"data": 1}),
- ("test", {"data": 2}),
- ("test", {"data": 1}),
- ("test", {"data": 2}),
- ("test_table", {"data": 1}),
- ("test_table", {"data": 2}),
- ("test_table", {"data": 1}),
- ("test_table", {"data": 2}),
- ("test", {"data_1": 1, "data_2": 2}),
- ("test", {"data_1": 2, "data_2": 1}),
- ("test", {"data_1": 1, "data_2": 2}),
- ("test", {"data_1": 2, "data_2": 1}),
- ("test_table", {"data_1": 1, "data_2": 2}),
- ("test_table", {"data_1": 2, "data_2": 1}),
- ("test_table", {"data_1": 1, "data_2": 2}),
- ("test_table", {"data_1": 2, "data_2": 1})])
+@pytest.mark.parametrize(
+ "table, data",
+ [
+ ("test", {"data": 1}),
+ ("test", {"data": 2}),
+ ("test", {"data": 1}),
+ ("test", {"data": 2}),
+ ("test_table", {"data": 1}),
+ ("test_table", {"data": 2}),
+ ("test_table", {"data": 1}),
+ ("test_table", {"data": 2}),
+ ("test", {"data_1": 1, "data_2": 2}),
+ ("test", {"data_1": 2, "data_2": 1}),
+ ("test", {"data_1": 1, "data_2": 2}),
+ ("test", {"data_1": 2, "data_2": 1}),
+ ("test_table", {"data_1": 1, "data_2": 2}),
+ ("test_table", {"data_1": 2, "data_2": 1}),
+ ("test_table", {"data_1": 1, "data_2": 2}),
+ ("test_table", {"data_1": 2, "data_2": 1}),
+ ],
+)
def test_create_with_empty_db_without_id(db_memory, table, data):
returned_id = db_memory.create(table, data)
assert len(db_memory.db) == 1
assert table in db_memory.db
assert len(db_memory.db[table]) == 1
data_inserted = data
- data_inserted['_id'] = returned_id
+ data_inserted["_id"] = returned_id
assert data_inserted in db_memory.db[table]
-@pytest.mark.parametrize("table, data", [
- ("test", {"data": 1}),
- ("test", {"data": 2}),
- ("test", {"data": 1}),
- ("test", {"data": 2}),
- ("test_table", {"data": 1}),
- ("test_table", {"data": 2}),
- ("test_table", {"data": 1}),
- ("test_table", {"data": 2}),
- ("test", {"data_1": 1, "data_2": 2}),
- ("test", {"data_1": 2, "data_2": 1}),
- ("test", {"data_1": 1, "data_2": 2}),
- ("test", {"data_1": 2, "data_2": 1}),
- ("test_table", {"data_1": 1, "data_2": 2}),
- ("test_table", {"data_1": 2, "data_2": 1}),
- ("test_table", {"data_1": 1, "data_2": 2}),
- ("test_table", {"data_1": 2, "data_2": 1})])
+@pytest.mark.parametrize(
+ "table, data",
+ [
+ ("test", {"data": 1}),
+ ("test", {"data": 2}),
+ ("test", {"data": 1}),
+ ("test", {"data": 2}),
+ ("test_table", {"data": 1}),
+ ("test_table", {"data": 2}),
+ ("test_table", {"data": 1}),
+ ("test_table", {"data": 2}),
+ ("test", {"data_1": 1, "data_2": 2}),
+ ("test", {"data_1": 2, "data_2": 1}),
+ ("test", {"data_1": 1, "data_2": 2}),
+ ("test", {"data_1": 2, "data_2": 1}),
+ ("test_table", {"data_1": 1, "data_2": 2}),
+ ("test_table", {"data_1": 2, "data_2": 1}),
+ ("test_table", {"data_1": 1, "data_2": 2}),
+ ("test_table", {"data_1": 2, "data_2": 1}),
+ ],
+)
def test_create_with_non_empty_db_without_id(db_memory_with_data, table, data):
returned_id = db_memory_with_data.create(table, data)
- assert len(db_memory_with_data.db) == (1 if table == 'test' else 2)
+ assert len(db_memory_with_data.db) == (1 if table == "test" else 2)
assert table in db_memory_with_data.db
- assert len(db_memory_with_data.db[table]) == (4 if table == 'test' else 1)
+ assert len(db_memory_with_data.db[table]) == (4 if table == "test" else 1)
data_inserted = data
- data_inserted['_id'] = returned_id
+ data_inserted["_id"] = returned_id
assert data_inserted in db_memory_with_data.db[table]
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("db_content, update_dict, expected, message", [
- ({"a": {"none": None}}, {"a.b.num": "v"}, {"a": {"none": None, "b": {"num": "v"}}}, "create dict"),
- ({"a": {"none": None}}, {"a.none.num": "v"}, {"a": {"none": {"num": "v"}}}, "create dict over none"),
- ({"a": {"b": {"num": 4}}}, {"a.b.num": "v"}, {"a": {"b": {"num": "v"}}}, "replace_number"),
- ({"a": {"b": {"num": 4}}}, {"a.b.num.c.d": "v"}, None, "create dict over number should fail"),
- ({"a": {"b": {"num": 4}}}, {"a.b": "v"}, {"a": {"b": "v"}}, "replace dict with a string"),
- ({"a": {"b": {"num": 4}}}, {"a.b": None}, {"a": {"b": None}}, "replace dict with None"),
- ({"a": [{"b": {"num": 4}}]}, {"a.b.num": "v"}, None, "create dict over list should fail"),
- ({"a": [{"b": {"num": 4}}]}, {"a.0.b.num": "v"}, {"a": [{"b": {"num": "v"}}]}, "set list"),
- ({"a": [{"b": {"num": 4}}]}, {"a.3.b.num": "v"},
- {"a": [{"b": {"num": 4}}, None, None, {"b": {"num": "v"}}]}, "expand list"),
- ({"a": [[4]]}, {"a.0.0": "v"}, {"a": [["v"]]}, "set nested list"),
- ({"a": [[4]]}, {"a.0.2": "v"}, {"a": [[4, None, "v"]]}, "expand nested list"),
- ({"a": [[4]]}, {"a.2.2": "v"}, {"a": [[4], None, {"2": "v"}]}, "expand list and add number key")])
+@pytest.mark.parametrize(
+ "db_content, update_dict, expected, message",
+ [
+ (
+ {"a": {"none": None}},
+ {"a.b.num": "v"},
+ {"a": {"none": None, "b": {"num": "v"}}},
+ "create dict",
+ ),
+ (
+ {"a": {"none": None}},
+ {"a.none.num": "v"},
+ {"a": {"none": {"num": "v"}}},
+ "create dict over none",
+ ),
+ (
+ {"a": {"b": {"num": 4}}},
+ {"a.b.num": "v"},
+ {"a": {"b": {"num": "v"}}},
+ "replace_number",
+ ),
+ (
+ {"a": {"b": {"num": 4}}},
+ {"a.b.num.c.d": "v"},
+ None,
+ "create dict over number should fail",
+ ),
+ (
+ {"a": {"b": {"num": 4}}},
+ {"a.b": "v"},
+ {"a": {"b": "v"}},
+ "replace dict with a string",
+ ),
+ (
+ {"a": {"b": {"num": 4}}},
+ {"a.b": None},
+ {"a": {"b": None}},
+ "replace dict with None",
+ ),
+ (
+ {"a": [{"b": {"num": 4}}]},
+ {"a.b.num": "v"},
+ None,
+ "create dict over list should fail",
+ ),
+ (
+ {"a": [{"b": {"num": 4}}]},
+ {"a.0.b.num": "v"},
+ {"a": [{"b": {"num": "v"}}]},
+ "set list",
+ ),
+ (
+ {"a": [{"b": {"num": 4}}]},
+ {"a.3.b.num": "v"},
+ {"a": [{"b": {"num": 4}}, None, None, {"b": {"num": "v"}}]},
+ "expand list",
+ ),
+ ({"a": [[4]]}, {"a.0.0": "v"}, {"a": [["v"]]}, "set nested list"),
+ ({"a": [[4]]}, {"a.0.2": "v"}, {"a": [[4, None, "v"]]}, "expand nested list"),
+ (
+ {"a": [[4]]},
+ {"a.2.2": "v"},
+ {"a": [[4], None, {"2": "v"}]},
+ "expand list and add number key",
+ ),
+ ],
+)
def test_set_one(db_memory, db_content, update_dict, expected, message):
- db_memory._find = Mock(return_value=((0, db_content), ))
+ db_memory._find = Mock(return_value=((0, db_content),))
if expected is None:
with pytest.raises(DbException) as excinfo:
db_memory.set_one("table", {}, update_dict)
- assert (excinfo.value.http_code == http.HTTPStatus.NOT_FOUND), message
+ assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND, message
else:
db_memory.set_one("table", {}, update_dict)
- assert (db_content == expected), message
+ assert db_content == expected, message
class TestDbMemory(unittest.TestCase):
def test_set_one(self):
test_set = (
# (database content, set-content, expected database content (None=fails), message)
- ({"a": {"none": None}}, {"a.b.num": "v"}, {"a": {"none": None, "b": {"num": "v"}}}, "create dict"),
- ({"a": {"none": None}}, {"a.none.num": "v"}, {"a": {"none": {"num": "v"}}}, "create dict over none"),
- ({"a": {"b": {"num": 4}}}, {"a.b.num": "v"}, {"a": {"b": {"num": "v"}}}, "replace_number"),
- ({"a": {"b": {"num": 4}}}, {"a.b.num.c.d": "v"}, None, "create dict over number should fail"),
- ({"a": {"b": {"num": 4}}}, {"a.b": "v"}, {"a": {"b": "v"}}, "replace dict with a string"),
- ({"a": {"b": {"num": 4}}}, {"a.b": None}, {"a": {"b": None}}, "replace dict with None"),
-
- ({"a": [{"b": {"num": 4}}]}, {"a.b.num": "v"}, None, "create dict over list should fail"),
- ({"a": [{"b": {"num": 4}}]}, {"a.0.b.num": "v"}, {"a": [{"b": {"num": "v"}}]}, "set list"),
- ({"a": [{"b": {"num": 4}}]}, {"a.3.b.num": "v"},
- {"a": [{"b": {"num": 4}}, None, None, {"b": {"num": "v"}}]}, "expand list"),
+ (
+ {"a": {"none": None}},
+ {"a.b.num": "v"},
+ {"a": {"none": None, "b": {"num": "v"}}},
+ "create dict",
+ ),
+ (
+ {"a": {"none": None}},
+ {"a.none.num": "v"},
+ {"a": {"none": {"num": "v"}}},
+ "create dict over none",
+ ),
+ (
+ {"a": {"b": {"num": 4}}},
+ {"a.b.num": "v"},
+ {"a": {"b": {"num": "v"}}},
+ "replace_number",
+ ),
+ (
+ {"a": {"b": {"num": 4}}},
+ {"a.b.num.c.d": "v"},
+ None,
+ "create dict over number should fail",
+ ),
+ (
+ {"a": {"b": {"num": 4}}},
+ {"a.b": "v"},
+ {"a": {"b": "v"}},
+ "replace dict with a string",
+ ),
+ (
+ {"a": {"b": {"num": 4}}},
+ {"a.b": None},
+ {"a": {"b": None}},
+ "replace dict with None",
+ ),
+ (
+ {"a": [{"b": {"num": 4}}]},
+ {"a.b.num": "v"},
+ None,
+ "create dict over list should fail",
+ ),
+ (
+ {"a": [{"b": {"num": 4}}]},
+ {"a.0.b.num": "v"},
+ {"a": [{"b": {"num": "v"}}]},
+ "set list",
+ ),
+ (
+ {"a": [{"b": {"num": 4}}]},
+ {"a.3.b.num": "v"},
+ {"a": [{"b": {"num": 4}}, None, None, {"b": {"num": "v"}}]},
+ "expand list",
+ ),
({"a": [[4]]}, {"a.0.0": "v"}, {"a": [["v"]]}, "set nested list"),
- ({"a": [[4]]}, {"a.0.2": "v"}, {"a": [[4, None, "v"]]}, "expand nested list"),
- ({"a": [[4]]}, {"a.2.2": "v"}, {"a": [[4], None, {"2": "v"}]}, "expand list and add number key"),
+ (
+ {"a": [[4]]},
+ {"a.0.2": "v"},
+ {"a": [[4, None, "v"]]},
+ "expand nested list",
+ ),
+ (
+ {"a": [[4]]},
+ {"a.2.2": "v"},
+ {"a": [[4], None, {"2": "v"}]},
+ "expand list and add number key",
+ ),
({"a": None}, {"b.c": "v"}, {"a": None, "b": {"c": "v"}}, "expand at root"),
)
db_men = DbMemory()
db_men._find = Mock()
for db_content, update_dict, expected, message in test_set:
- db_men._find.return_value = ((0, db_content), )
+ db_men._find.return_value = ((0, db_content),)
if expected is None:
self.assertRaises(DbException, db_men.set_one, "table", {}, update_dict)
else:
db_men._find = Mock()
for db_content, pull_dict, expected, message in test_set:
db_content = deepcopy(db_content)
- db_men._find.return_value = ((0, db_content), )
+ db_men._find.return_value = ((0, db_content),)
if expected is None:
- self.assertRaises(DbException, db_men.set_one, "table", {}, None, fail_on_empty=False, pull=pull_dict)
+ self.assertRaises(
+ DbException,
+ db_men.set_one,
+ "table",
+ {},
+ None,
+ fail_on_empty=False,
+ pull=pull_dict,
+ )
else:
db_men.set_one("table", {}, None, pull=pull_dict)
self.assertEqual(db_content, expected, message)
example = {"a": [1, "1", 1], "d": {}, "n": None}
test_set = (
# (database content, set-content, expected database content (None=fails), message)
- (example, {"d.b.c": 1}, {"a": [1, "1", 1], "d": {"b": {"c": [1]}}, "n": None}, "push non existing arrray2"),
- (example, {"b": 1}, {"a": [1, "1", 1], "d": {}, "b": [1], "n": None}, "push non existing arrray3"),
- (example, {"a.6": 1}, {"a": [1, "1", 1, None, None, None, [1]], "d": {}, "n": None},
- "push non existing arrray"),
- (example, {"a": 2}, {"a": [1, "1", 1, 2], "d": {}, "n": None}, "push one item"),
- (example, {"a": {1: 1}}, {"a": [1, "1", 1, {1: 1}], "d": {}, "n": None}, "push a dict"),
+ (
+ example,
+ {"d.b.c": 1},
+ {"a": [1, "1", 1], "d": {"b": {"c": [1]}}, "n": None},
+ "push non existing arrray2",
+ ),
+ (
+ example,
+ {"b": 1},
+ {"a": [1, "1", 1], "d": {}, "b": [1], "n": None},
+ "push non existing arrray3",
+ ),
+ (
+ example,
+ {"a.6": 1},
+ {"a": [1, "1", 1, None, None, None, [1]], "d": {}, "n": None},
+ "push non existing arrray",
+ ),
+ (
+ example,
+ {"a": 2},
+ {"a": [1, "1", 1, 2], "d": {}, "n": None},
+ "push one item",
+ ),
+ (
+ example,
+ {"a": {1: 1}},
+ {"a": [1, "1", 1, {1: 1}], "d": {}, "n": None},
+ "push a dict",
+ ),
(example, {"d": 1}, None, "push over dict"),
(example, {"n": 1}, None, "push over None"),
)
db_men._find = Mock()
for db_content, push_dict, expected, message in test_set:
db_content = deepcopy(db_content)
- db_men._find.return_value = ((0, db_content), )
+ db_men._find.return_value = ((0, db_content),)
if expected is None:
- self.assertRaises(DbException, db_men.set_one, "table", {}, None, fail_on_empty=False, push=push_dict)
+ self.assertRaises(
+ DbException,
+ db_men.set_one,
+ "table",
+ {},
+ None,
+ fail_on_empty=False,
+ push=push_dict,
+ )
else:
db_men.set_one("table", {}, None, push=push_dict)
self.assertEqual(db_content, expected, message)
example = {"a": [1, "1", 1], "d": {}, "n": None}
test_set = (
# (database content, set-content, expected database content (None=fails), message)
- (example, {"d.b.c": [1]}, {"a": [1, "1", 1], "d": {"b": {"c": [1]}}, "n": None},
- "push non existing arrray2"),
- (example, {"b": [1]}, {"a": [1, "1", 1], "d": {}, "b": [1], "n": None}, "push non existing arrray3"),
- (example, {"a.6": [1]}, {"a": [1, "1", 1, None, None, None, [1]], "d": {}, "n": None},
- "push non existing arrray"),
- (example, {"a": [2, 3]}, {"a": [1, "1", 1, 2, 3], "d": {}, "n": None}, "push two item"),
- (example, {"a": [{1: 1}]}, {"a": [1, "1", 1, {1: 1}], "d": {}, "n": None}, "push a dict"),
+ (
+ example,
+ {"d.b.c": [1]},
+ {"a": [1, "1", 1], "d": {"b": {"c": [1]}}, "n": None},
+ "push non existing arrray2",
+ ),
+ (
+ example,
+ {"b": [1]},
+ {"a": [1, "1", 1], "d": {}, "b": [1], "n": None},
+ "push non existing arrray3",
+ ),
+ (
+ example,
+ {"a.6": [1]},
+ {"a": [1, "1", 1, None, None, None, [1]], "d": {}, "n": None},
+ "push non existing arrray",
+ ),
+ (
+ example,
+ {"a": [2, 3]},
+ {"a": [1, "1", 1, 2, 3], "d": {}, "n": None},
+ "push two item",
+ ),
+ (
+ example,
+ {"a": [{1: 1}]},
+ {"a": [1, "1", 1, {1: 1}], "d": {}, "n": None},
+ "push a dict",
+ ),
(example, {"d": [1]}, None, "push over dict"),
(example, {"n": [1]}, None, "push over None"),
(example, {"a": 1}, None, "invalid push list non an array"),
db_men._find = Mock()
for db_content, push_list, expected, message in test_set:
db_content = deepcopy(db_content)
- db_men._find.return_value = ((0, db_content), )
+ db_men._find.return_value = ((0, db_content),)
if expected is None:
- self.assertRaises(DbException, db_men.set_one, "table", {}, None, fail_on_empty=False,
- push_list=push_list)
+ self.assertRaises(
+ DbException,
+ db_men.set_one,
+ "table",
+ {},
+ None,
+ fail_on_empty=False,
+ push_list=push_list,
+ )
else:
db_men.set_one("table", {}, None, push_list=push_list)
self.assertEqual(db_content, expected, message)
db_men._find = Mock()
for db_content, unset_dict, expected, message in test_set:
db_content = deepcopy(db_content)
- db_men._find.return_value = ((0, db_content), )
+ db_men._find.return_value = ((0, db_content),)
if expected is None:
- self.assertRaises(DbException, db_men.set_one, "table", {}, None, fail_on_empty=False, unset=unset_dict)
+ self.assertRaises(
+ DbException,
+ db_men.set_one,
+ "table",
+ {},
+ None,
+ fail_on_empty=False,
+ unset=unset_dict,
+ )
else:
db_men.set_one("table", {}, None, unset=unset_dict)
self.assertEqual(db_content, expected, message)
def test_mkdir(fs_base):
with pytest.raises(FsException) as excinfo:
fs_base.mkdir(None)
- assert str(excinfo.value).startswith(exception_message("Method 'mkdir' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'mkdir' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
def test_file_exists(fs_base):
with pytest.raises(FsException) as excinfo:
fs_base.file_exists(None)
- assert str(excinfo.value).startswith(exception_message("Method 'file_exists' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'file_exists' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
def test_file_size(fs_base):
with pytest.raises(FsException) as excinfo:
fs_base.file_size(None)
- assert str(excinfo.value).startswith(exception_message("Method 'file_size' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'file_size' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
def test_file_extract(fs_base):
with pytest.raises(FsException) as excinfo:
fs_base.file_extract(None, None)
- assert str(excinfo.value).startswith(exception_message("Method 'file_extract' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'file_extract' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
def test_file_open(fs_base):
with pytest.raises(FsException) as excinfo:
fs_base.file_open(None, None)
- assert str(excinfo.value).startswith(exception_message("Method 'file_open' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'file_open' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
def test_file_delete(fs_base):
with pytest.raises(FsException) as excinfo:
fs_base.file_delete(None, None)
- assert str(excinfo.value).startswith(exception_message("Method 'file_delete' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'file_delete' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
def valid_path():
- return tempfile.gettempdir() + '/'
+ return tempfile.gettempdir() + "/"
def invalid_path():
- return '/#tweeter/'
+ return "/#tweeter/"
@pytest.fixture(scope="function", params=[True, False])
def fs_local(request):
fs = FsLocal(lock=request.param)
- fs.fs_connect({'path': valid_path()})
+ fs.fs_connect({"path": valid_path()})
return fs
def fs_connect_exception_message(path):
- return "storage exception Invalid configuration param at '[storage]': path '{}' does not exist".format(path)
+ return "storage exception Invalid configuration param at '[storage]': path '{}' does not exist".format(
+ path
+ )
def file_open_file_not_found_exception(storage):
- f = storage if isinstance(storage, str) else '/'.join(storage)
+ f = storage if isinstance(storage, str) else "/".join(storage)
return "storage exception File {} does not exist".format(f)
def file_open_io_exception(storage):
- f = storage if isinstance(storage, str) else '/'.join(storage)
+ f = storage if isinstance(storage, str) else "/".join(storage)
return "storage exception File {} cannot be opened".format(f)
def dir_ls_not_a_directory_exception(storage):
- f = storage if isinstance(storage, str) else '/'.join(storage)
+ f = storage if isinstance(storage, str) else "/".join(storage)
return "storage exception File {} does not exist".format(f)
def dir_ls_io_exception(storage):
- f = storage if isinstance(storage, str) else '/'.join(storage)
+ f = storage if isinstance(storage, str) else "/".join(storage)
return "storage exception File {} cannot be opened".format(f)
def test_constructor_without_logger():
fs = FsLocal()
- assert fs.logger == logging.getLogger('fs')
+ assert fs.logger == logging.getLogger("fs")
assert fs.path is None
def test_constructor_with_logger():
- logger_name = 'fs_local'
+ logger_name = "fs_local"
fs = FsLocal(logger_name=logger_name)
assert fs.logger == logging.getLogger(logger_name)
assert fs.path is None
assert params["path"] == valid_path()
-@pytest.mark.parametrize("config, exp_logger, exp_path", [
- ({'logger_name': 'fs_local', 'path': valid_path()}, 'fs_local', valid_path()),
- ({'logger_name': 'fs_local', 'path': valid_path()[:-1]}, 'fs_local', valid_path()),
- ({'path': valid_path()}, 'fs', valid_path()),
- ({'path': valid_path()[:-1]}, 'fs', valid_path())])
+@pytest.mark.parametrize(
+ "config, exp_logger, exp_path",
+ [
+ ({"logger_name": "fs_local", "path": valid_path()}, "fs_local", valid_path()),
+ (
+ {"logger_name": "fs_local", "path": valid_path()[:-1]},
+ "fs_local",
+ valid_path(),
+ ),
+ ({"path": valid_path()}, "fs", valid_path()),
+ ({"path": valid_path()[:-1]}, "fs", valid_path()),
+ ],
+)
def test_fs_connect_with_valid_config(config, exp_logger, exp_path):
fs = FsLocal()
fs.fs_connect(config)
assert fs.path == exp_path
-@pytest.mark.parametrize("config, exp_exception_message", [
- ({'logger_name': 'fs_local', 'path': invalid_path()}, fs_connect_exception_message(invalid_path())),
- ({'logger_name': 'fs_local', 'path': invalid_path()[:-1]}, fs_connect_exception_message(invalid_path()[:-1])),
- ({'path': invalid_path()}, fs_connect_exception_message(invalid_path())),
- ({'path': invalid_path()[:-1]}, fs_connect_exception_message(invalid_path()[:-1]))])
+@pytest.mark.parametrize(
+ "config, exp_exception_message",
+ [
+ (
+ {"logger_name": "fs_local", "path": invalid_path()},
+ fs_connect_exception_message(invalid_path()),
+ ),
+ (
+ {"logger_name": "fs_local", "path": invalid_path()[:-1]},
+ fs_connect_exception_message(invalid_path()[:-1]),
+ ),
+ ({"path": invalid_path()}, fs_connect_exception_message(invalid_path())),
+ (
+ {"path": invalid_path()[:-1]},
+ fs_connect_exception_message(invalid_path()[:-1]),
+ ),
+ ],
+)
def test_fs_connect_with_invalid_path(config, exp_exception_message):
fs = FsLocal()
with pytest.raises(FsException) as excinfo:
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
-@pytest.mark.parametrize("storage, mode, expected", [
- (str(uuid.uuid4()), 'file', False),
- ([str(uuid.uuid4())], 'file', False),
- (str(uuid.uuid4()), 'dir', False),
- ([str(uuid.uuid4())], 'dir', False)])
+@pytest.mark.parametrize(
+ "storage, mode, expected",
+ [
+ (str(uuid.uuid4()), "file", False),
+ ([str(uuid.uuid4())], "file", False),
+ (str(uuid.uuid4()), "dir", False),
+ ([str(uuid.uuid4())], "dir", False),
+ ],
+)
def test_file_exists_returns_false(fs_local, storage, mode, expected):
assert fs_local.file_exists(storage, mode) == expected
-@pytest.mark.parametrize("storage, mode, expected", [
- (str(uuid.uuid4()), 'file', True),
- ([str(uuid.uuid4())], 'file', True),
- (str(uuid.uuid4()), 'dir', True),
- ([str(uuid.uuid4())], 'dir', True)])
+@pytest.mark.parametrize(
+ "storage, mode, expected",
+ [
+ (str(uuid.uuid4()), "file", True),
+ ([str(uuid.uuid4())], "file", True),
+ (str(uuid.uuid4()), "dir", True),
+ ([str(uuid.uuid4())], "dir", True),
+ ],
+)
def test_file_exists_returns_true(fs_local, storage, mode, expected):
- path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0]
- if mode == 'file':
+ path = (
+ valid_path() + storage
+ if isinstance(storage, str)
+ else valid_path() + storage[0]
+ )
+ if mode == "file":
os.mknod(path)
- elif mode == 'dir':
+ elif mode == "dir":
os.mkdir(path)
assert fs_local.file_exists(storage, mode) == expected
- if mode == 'file':
+ if mode == "file":
os.remove(path)
- elif mode == 'dir':
+ elif mode == "dir":
os.rmdir(path)
-@pytest.mark.parametrize("storage, mode", [
- (str(uuid.uuid4()), 'file'),
- ([str(uuid.uuid4())], 'file'),
- (str(uuid.uuid4()), 'dir'),
- ([str(uuid.uuid4())], 'dir')])
+@pytest.mark.parametrize(
+ "storage, mode",
+ [
+ (str(uuid.uuid4()), "file"),
+ ([str(uuid.uuid4())], "file"),
+ (str(uuid.uuid4()), "dir"),
+ ([str(uuid.uuid4())], "dir"),
+ ],
+)
def test_file_size(fs_local, storage, mode):
- path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0]
- if mode == 'file':
+ path = (
+ valid_path() + storage
+ if isinstance(storage, str)
+ else valid_path() + storage[0]
+ )
+ if mode == "file":
os.mknod(path)
- elif mode == 'dir':
+ elif mode == "dir":
os.mkdir(path)
size = os.path.getsize(path)
assert fs_local.file_size(storage) == size
- if mode == 'file':
+ if mode == "file":
os.remove(path)
- elif mode == 'dir':
+ elif mode == "dir":
os.rmdir(path)
-@pytest.mark.parametrize("files, path", [
- (['foo', 'bar', 'foobar'], str(uuid.uuid4())),
- (['foo', 'bar', 'foobar'], [str(uuid.uuid4())])])
+@pytest.mark.parametrize(
+ "files, path",
+ [
+ (["foo", "bar", "foobar"], str(uuid.uuid4())),
+ (["foo", "bar", "foobar"], [str(uuid.uuid4())]),
+ ],
+)
def test_file_extract(fs_local, files, path):
for f in files:
os.mknod(valid_path() + f)
- tar_path = valid_path() + str(uuid.uuid4()) + '.tar'
- with tarfile.open(tar_path, 'w') as tar:
+ tar_path = valid_path() + str(uuid.uuid4()) + ".tar"
+ with tarfile.open(tar_path, "w") as tar:
for f in files:
tar.add(valid_path() + f, arcname=f)
- with tarfile.open(tar_path, 'r') as tar:
+ with tarfile.open(tar_path, "r") as tar:
fs_local.file_extract(tar, path)
- extracted_path = valid_path() + (path if isinstance(path, str) else '/'.join(path))
+ extracted_path = valid_path() + (path if isinstance(path, str) else "/".join(path))
ls_dir = os.listdir(extracted_path)
assert len(ls_dir) == len(files)
for f in files:
shutil.rmtree(extracted_path)
-@pytest.mark.parametrize("storage, mode", [
- (str(uuid.uuid4()), 'r'),
- (str(uuid.uuid4()), 'w'),
- (str(uuid.uuid4()), 'a'),
- (str(uuid.uuid4()), 'rb'),
- (str(uuid.uuid4()), 'wb'),
- (str(uuid.uuid4()), 'ab'),
- ([str(uuid.uuid4())], 'r'),
- ([str(uuid.uuid4())], 'w'),
- ([str(uuid.uuid4())], 'a'),
- ([str(uuid.uuid4())], 'rb'),
- ([str(uuid.uuid4())], 'wb'),
- ([str(uuid.uuid4())], 'ab')])
+@pytest.mark.parametrize(
+ "storage, mode",
+ [
+ (str(uuid.uuid4()), "r"),
+ (str(uuid.uuid4()), "w"),
+ (str(uuid.uuid4()), "a"),
+ (str(uuid.uuid4()), "rb"),
+ (str(uuid.uuid4()), "wb"),
+ (str(uuid.uuid4()), "ab"),
+ ([str(uuid.uuid4())], "r"),
+ ([str(uuid.uuid4())], "w"),
+ ([str(uuid.uuid4())], "a"),
+ ([str(uuid.uuid4())], "rb"),
+ ([str(uuid.uuid4())], "wb"),
+ ([str(uuid.uuid4())], "ab"),
+ ],
+)
def test_file_open(fs_local, storage, mode):
- path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0]
+ path = (
+ valid_path() + storage
+ if isinstance(storage, str)
+ else valid_path() + storage[0]
+ )
os.mknod(path)
file_obj = fs_local.file_open(storage, mode)
assert isinstance(file_obj, io.IOBase)
os.remove(path)
-@pytest.mark.parametrize("storage, mode", [
- (str(uuid.uuid4()), 'r'),
- (str(uuid.uuid4()), 'rb'),
- ([str(uuid.uuid4())], 'r'),
- ([str(uuid.uuid4())], 'rb')])
+@pytest.mark.parametrize(
+ "storage, mode",
+ [
+ (str(uuid.uuid4()), "r"),
+ (str(uuid.uuid4()), "rb"),
+ ([str(uuid.uuid4())], "r"),
+ ([str(uuid.uuid4())], "rb"),
+ ],
+)
def test_file_open_file_not_found_exception(fs_local, storage, mode):
with pytest.raises(FsException) as excinfo:
fs_local.file_open(storage, mode)
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("storage, mode", [
- (str(uuid.uuid4()), 'r'),
- (str(uuid.uuid4()), 'w'),
- (str(uuid.uuid4()), 'a'),
- (str(uuid.uuid4()), 'rb'),
- (str(uuid.uuid4()), 'wb'),
- (str(uuid.uuid4()), 'ab'),
- ([str(uuid.uuid4())], 'r'),
- ([str(uuid.uuid4())], 'w'),
- ([str(uuid.uuid4())], 'a'),
- ([str(uuid.uuid4())], 'rb'),
- ([str(uuid.uuid4())], 'wb'),
- ([str(uuid.uuid4())], 'ab')])
+@pytest.mark.parametrize(
+ "storage, mode",
+ [
+ (str(uuid.uuid4()), "r"),
+ (str(uuid.uuid4()), "w"),
+ (str(uuid.uuid4()), "a"),
+ (str(uuid.uuid4()), "rb"),
+ (str(uuid.uuid4()), "wb"),
+ (str(uuid.uuid4()), "ab"),
+ ([str(uuid.uuid4())], "r"),
+ ([str(uuid.uuid4())], "w"),
+ ([str(uuid.uuid4())], "a"),
+ ([str(uuid.uuid4())], "rb"),
+ ([str(uuid.uuid4())], "wb"),
+ ([str(uuid.uuid4())], "ab"),
+ ],
+)
def test_file_open_io_error(fs_local, storage, mode):
- path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0]
+ path = (
+ valid_path() + storage
+ if isinstance(storage, str)
+ else valid_path() + storage[0]
+ )
os.mknod(path)
os.chmod(path, 0)
with pytest.raises(FsException) as excinfo:
os.remove(path)
-@pytest.mark.parametrize("storage, with_files", [
- (str(uuid.uuid4()), True),
- (str(uuid.uuid4()), False),
- ([str(uuid.uuid4())], True),
- ([str(uuid.uuid4())], False)])
+@pytest.mark.parametrize(
+ "storage, with_files",
+ [
+ (str(uuid.uuid4()), True),
+ (str(uuid.uuid4()), False),
+ ([str(uuid.uuid4())], True),
+ ([str(uuid.uuid4())], False),
+ ],
+)
def test_dir_ls(fs_local, storage, with_files):
- path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0]
+ path = (
+ valid_path() + storage
+ if isinstance(storage, str)
+ else valid_path() + storage[0]
+ )
os.mkdir(path)
if with_files is True:
file_name = str(uuid.uuid4())
- file_path = path + '/' + file_name
+ file_path = path + "/" + file_name
os.mknod(file_path)
result = fs_local.dir_ls(storage)
shutil.rmtree(path)
-@pytest.mark.parametrize("storage", [
- (str(uuid.uuid4())),
- ([str(uuid.uuid4())])])
+@pytest.mark.parametrize("storage", [(str(uuid.uuid4())), ([str(uuid.uuid4())])])
def test_dir_ls_with_not_a_directory_error(fs_local, storage):
- path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0]
+ path = (
+ valid_path() + storage
+ if isinstance(storage, str)
+ else valid_path() + storage[0]
+ )
os.mknod(path)
with pytest.raises(FsException) as excinfo:
fs_local.dir_ls(storage)
os.remove(path)
-@pytest.mark.parametrize("storage", [
- (str(uuid.uuid4())),
- ([str(uuid.uuid4())])])
+@pytest.mark.parametrize("storage", [(str(uuid.uuid4())), ([str(uuid.uuid4())])])
def test_dir_ls_with_io_error(fs_local, storage):
- path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0]
+ path = (
+ valid_path() + storage
+ if isinstance(storage, str)
+ else valid_path() + storage[0]
+ )
os.mkdir(path)
os.chmod(path, 0)
with pytest.raises(FsException) as excinfo:
os.rmdir(path)
-@pytest.mark.parametrize("storage, with_files, ignore_non_exist", [
- (str(uuid.uuid4()), True, True),
- (str(uuid.uuid4()), False, True),
- (str(uuid.uuid4()), True, False),
- (str(uuid.uuid4()), False, False),
- ([str(uuid.uuid4())], True, True),
- ([str(uuid.uuid4())], False, True),
- ([str(uuid.uuid4())], True, False),
- ([str(uuid.uuid4())], False, False)])
+@pytest.mark.parametrize(
+ "storage, with_files, ignore_non_exist",
+ [
+ (str(uuid.uuid4()), True, True),
+ (str(uuid.uuid4()), False, True),
+ (str(uuid.uuid4()), True, False),
+ (str(uuid.uuid4()), False, False),
+ ([str(uuid.uuid4())], True, True),
+ ([str(uuid.uuid4())], False, True),
+ ([str(uuid.uuid4())], True, False),
+ ([str(uuid.uuid4())], False, False),
+ ],
+)
def test_file_delete_with_dir(fs_local, storage, with_files, ignore_non_exist):
- path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0]
+ path = (
+ valid_path() + storage
+ if isinstance(storage, str)
+ else valid_path() + storage[0]
+ )
os.mkdir(path)
if with_files is True:
- file_path = path + '/' + str(uuid.uuid4())
+ file_path = path + "/" + str(uuid.uuid4())
os.mknod(file_path)
fs_local.file_delete(storage, ignore_non_exist)
assert os.path.exists(path) is False
-@pytest.mark.parametrize("storage", [
- (str(uuid.uuid4())),
- ([str(uuid.uuid4())])])
+@pytest.mark.parametrize("storage", [(str(uuid.uuid4())), ([str(uuid.uuid4())])])
def test_file_delete_expect_exception(fs_local, storage):
with pytest.raises(FsException) as excinfo:
fs_local.file_delete(storage)
assert excinfo.value.http_code == http.HTTPStatus.NOT_FOUND
-@pytest.mark.parametrize("storage", [
- (str(uuid.uuid4())),
- ([str(uuid.uuid4())])])
+@pytest.mark.parametrize("storage", [(str(uuid.uuid4())), ([str(uuid.uuid4())])])
def test_file_delete_no_exception(fs_local, storage):
- path = valid_path() + storage if isinstance(storage, str) else valid_path() + storage[0]
+ path = (
+ valid_path() + storage
+ if isinstance(storage, str)
+ else valid_path() + storage[0]
+ )
fs_local.file_delete(storage, ignore_non_exist=True)
assert os.path.exists(path) is False
def valid_path():
- return tempfile.gettempdir() + '/'
+ return tempfile.gettempdir() + "/"
def invalid_path():
- return '/#tweeter/'
+ return "/#tweeter/"
@pytest.fixture(scope="function", params=[True, False])
def mock_gridfs_constructor(a, b):
pass
- monkeypatch.setattr(MongoClient, '__init__', mock_mongoclient_constructor)
- monkeypatch.setattr(MongoClient, '__getitem__', mock_mongoclient_getitem)
- monkeypatch.setattr(GridFSBucket, '__init__', mock_gridfs_constructor)
+ monkeypatch.setattr(MongoClient, "__init__", mock_mongoclient_constructor)
+ monkeypatch.setattr(MongoClient, "__getitem__", mock_mongoclient_getitem)
+ monkeypatch.setattr(GridFSBucket, "__init__", mock_gridfs_constructor)
fs = FsMongo(lock=request.param)
- fs.fs_connect({
- 'path': valid_path(),
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'})
+ fs.fs_connect(
+ {"path": valid_path(), "host": "mongo", "port": 27017, "collection": "files"}
+ )
return fs
def fs_connect_exception_message(path):
- return "storage exception Invalid configuration param at '[storage]': path '{}' does not exist".format(path)
+ return "storage exception Invalid configuration param at '[storage]': path '{}' does not exist".format(
+ path
+ )
def file_open_file_not_found_exception(storage):
- f = storage if isinstance(storage, str) else '/'.join(storage)
+ f = storage if isinstance(storage, str) else "/".join(storage)
return "storage exception File {} does not exist".format(f)
def file_open_io_exception(storage):
- f = storage if isinstance(storage, str) else '/'.join(storage)
+ f = storage if isinstance(storage, str) else "/".join(storage)
return "storage exception File {} cannot be opened".format(f)
def dir_ls_not_a_directory_exception(storage):
- f = storage if isinstance(storage, str) else '/'.join(storage)
+ f = storage if isinstance(storage, str) else "/".join(storage)
return "storage exception File {} does not exist".format(f)
def dir_ls_io_exception(storage):
- f = storage if isinstance(storage, str) else '/'.join(storage)
+ f = storage if isinstance(storage, str) else "/".join(storage)
return "storage exception File {} cannot be opened".format(f)
def test_constructor_without_logger():
fs = FsMongo()
- assert fs.logger == logging.getLogger('fs')
+ assert fs.logger == logging.getLogger("fs")
assert fs.path is None
assert fs.client is None
assert fs.fs is None
def test_constructor_with_logger():
- logger_name = 'fs_mongo'
+ logger_name = "fs_mongo"
fs = FsMongo(logger_name=logger_name)
assert fs.logger == logging.getLogger(logger_name)
assert fs.path is None
def mock_gridfs_find(self, search_query, **kwargs):
return []
- monkeypatch.setattr(GridFSBucket, 'find', mock_gridfs_find)
+ monkeypatch.setattr(GridFSBucket, "find", mock_gridfs_find)
params = fs_mongo.get_params()
assert len(params) == 2
assert "fs" in params
assert params["path"] == valid_path()
-@pytest.mark.parametrize("config, exp_logger, exp_path", [
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path(),
- 'uri': 'mongo:27017',
- 'collection': 'files'
- },
- 'fs_mongo', valid_path()
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path(),
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'
- },
- 'fs_mongo', valid_path()
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path()[:-1],
- 'uri': 'mongo:27017',
- 'collection': 'files'
- },
- 'fs_mongo', valid_path()
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path()[:-1],
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'
- },
- 'fs_mongo', valid_path()
- ),
- (
- {
- 'path': valid_path(),
- 'uri': 'mongo:27017',
- 'collection': 'files'
- },
- 'fs', valid_path()
- ),
- (
- {
- 'path': valid_path(),
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'
- },
- 'fs', valid_path()
- ),
- (
- {
- 'path': valid_path()[:-1],
- 'uri': 'mongo:27017',
- 'collection': 'files'
- },
- 'fs', valid_path()
- ),
- (
- {
- 'path': valid_path()[:-1],
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'
- },
- 'fs', valid_path()
- )])
+@pytest.mark.parametrize(
+ "config, exp_logger, exp_path",
+ [
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": valid_path(),
+ "uri": "mongo:27017",
+ "collection": "files",
+ },
+ "fs_mongo",
+ valid_path(),
+ ),
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": valid_path(),
+ "host": "mongo",
+ "port": 27017,
+ "collection": "files",
+ },
+ "fs_mongo",
+ valid_path(),
+ ),
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": valid_path()[:-1],
+ "uri": "mongo:27017",
+ "collection": "files",
+ },
+ "fs_mongo",
+ valid_path(),
+ ),
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": valid_path()[:-1],
+ "host": "mongo",
+ "port": 27017,
+ "collection": "files",
+ },
+ "fs_mongo",
+ valid_path(),
+ ),
+ (
+ {"path": valid_path(), "uri": "mongo:27017", "collection": "files"},
+ "fs",
+ valid_path(),
+ ),
+ (
+ {
+ "path": valid_path(),
+ "host": "mongo",
+ "port": 27017,
+ "collection": "files",
+ },
+ "fs",
+ valid_path(),
+ ),
+ (
+ {"path": valid_path()[:-1], "uri": "mongo:27017", "collection": "files"},
+ "fs",
+ valid_path(),
+ ),
+ (
+ {
+ "path": valid_path()[:-1],
+ "host": "mongo",
+ "port": 27017,
+ "collection": "files",
+ },
+ "fs",
+ valid_path(),
+ ),
+ ],
+)
def test_fs_connect_with_valid_config(config, exp_logger, exp_path):
fs = FsMongo()
fs.fs_connect(config)
assert type(fs.fs) == GridFSBucket
-@pytest.mark.parametrize("config, exp_exception_message", [
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': invalid_path(),
- 'uri': 'mongo:27017',
- 'collection': 'files'
- },
- fs_connect_exception_message(invalid_path())
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': invalid_path(),
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'
- },
- fs_connect_exception_message(invalid_path())
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': invalid_path()[:-1],
- 'uri': 'mongo:27017',
- 'collection': 'files'
- },
- fs_connect_exception_message(invalid_path()[:-1])
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': invalid_path()[:-1],
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'
- },
- fs_connect_exception_message(invalid_path()[:-1])
- ),
- (
- {
- 'path': invalid_path(),
- 'uri': 'mongo:27017',
- 'collection': 'files'
- },
- fs_connect_exception_message(invalid_path())
- ),
- (
- {
- 'path': invalid_path(),
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'
- },
- fs_connect_exception_message(invalid_path())
- ),
- (
- {
- 'path': invalid_path()[:-1],
- 'uri': 'mongo:27017',
- 'collection': 'files'
- },
- fs_connect_exception_message(invalid_path()[:-1])
- ),
- (
- {
- 'path': invalid_path()[:-1],
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'
- },
- fs_connect_exception_message(invalid_path()[:-1])
- ),
- (
- {
- 'path': '/',
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'
- },
- generic_fs_exception_message(
- "Invalid configuration param at '[storage]': path '/' is not writable"
- )
- )])
+@pytest.mark.parametrize(
+ "config, exp_exception_message",
+ [
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": invalid_path(),
+ "uri": "mongo:27017",
+ "collection": "files",
+ },
+ fs_connect_exception_message(invalid_path()),
+ ),
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": invalid_path(),
+ "host": "mongo",
+ "port": 27017,
+ "collection": "files",
+ },
+ fs_connect_exception_message(invalid_path()),
+ ),
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": invalid_path()[:-1],
+ "uri": "mongo:27017",
+ "collection": "files",
+ },
+ fs_connect_exception_message(invalid_path()[:-1]),
+ ),
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": invalid_path()[:-1],
+ "host": "mongo",
+ "port": 27017,
+ "collection": "files",
+ },
+ fs_connect_exception_message(invalid_path()[:-1]),
+ ),
+ (
+ {"path": invalid_path(), "uri": "mongo:27017", "collection": "files"},
+ fs_connect_exception_message(invalid_path()),
+ ),
+ (
+ {
+ "path": invalid_path(),
+ "host": "mongo",
+ "port": 27017,
+ "collection": "files",
+ },
+ fs_connect_exception_message(invalid_path()),
+ ),
+ (
+ {"path": invalid_path()[:-1], "uri": "mongo:27017", "collection": "files"},
+ fs_connect_exception_message(invalid_path()[:-1]),
+ ),
+ (
+ {
+ "path": invalid_path()[:-1],
+ "host": "mongo",
+ "port": 27017,
+ "collection": "files",
+ },
+ fs_connect_exception_message(invalid_path()[:-1]),
+ ),
+ (
+ {"path": "/", "host": "mongo", "port": 27017, "collection": "files"},
+ generic_fs_exception_message(
+ "Invalid configuration param at '[storage]': path '/' is not writable"
+ ),
+ ),
+ ],
+)
def test_fs_connect_with_invalid_path(config, exp_exception_message):
fs = FsMongo()
with pytest.raises(FsException) as excinfo:
assert str(excinfo.value) == exp_exception_message
-@pytest.mark.parametrize("config, exp_exception_message", [
- (
- {
- 'logger_name': 'fs_mongo',
- 'uri': 'mongo:27017',
- 'collection': 'files'
- },
- "Missing parameter \"path\""
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'
- },
- "Missing parameter \"path\""
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path(),
- 'collection': 'files'
- },
- "Missing parameters: \"uri\" or \"host\" + \"port\""
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path(),
- 'port': 27017,
- 'collection': 'files'
- },
- "Missing parameters: \"uri\" or \"host\" + \"port\""
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path(),
- 'host': 'mongo',
- 'collection': 'files'
- },
- "Missing parameters: \"uri\" or \"host\" + \"port\""
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path(),
- 'uri': 'mongo:27017'
- },
- "Missing parameter \"collection\""
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path(),
- 'host': 'mongo',
- 'port': 27017,
- },
- "Missing parameter \"collection\""
- )])
+@pytest.mark.parametrize(
+ "config, exp_exception_message",
+ [
+ (
+ {"logger_name": "fs_mongo", "uri": "mongo:27017", "collection": "files"},
+ 'Missing parameter "path"',
+ ),
+ (
+ {
+ "logger_name": "fs_mongo",
+ "host": "mongo",
+ "port": 27017,
+ "collection": "files",
+ },
+ 'Missing parameter "path"',
+ ),
+ (
+ {"logger_name": "fs_mongo", "path": valid_path(), "collection": "files"},
+ 'Missing parameters: "uri" or "host" + "port"',
+ ),
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": valid_path(),
+ "port": 27017,
+ "collection": "files",
+ },
+ 'Missing parameters: "uri" or "host" + "port"',
+ ),
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": valid_path(),
+ "host": "mongo",
+ "collection": "files",
+ },
+ 'Missing parameters: "uri" or "host" + "port"',
+ ),
+ (
+ {"logger_name": "fs_mongo", "path": valid_path(), "uri": "mongo:27017"},
+ 'Missing parameter "collection"',
+ ),
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": valid_path(),
+ "host": "mongo",
+ "port": 27017,
+ },
+ 'Missing parameter "collection"',
+ ),
+ ],
+)
def test_fs_connect_with_missing_parameters(config, exp_exception_message):
fs = FsMongo()
with pytest.raises(FsException) as excinfo:
assert str(excinfo.value) == generic_fs_exception_message(exp_exception_message)
-@pytest.mark.parametrize("config, exp_exception_message", [
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path(),
- 'uri': 'mongo:27017',
- 'collection': 'files'
- },
- "MongoClient crashed"
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path(),
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'
- },
- "MongoClient crashed"
- )])
-def test_fs_connect_with_invalid_mongoclient(config, exp_exception_message, monkeypatch):
+@pytest.mark.parametrize(
+ "config, exp_exception_message",
+ [
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": valid_path(),
+ "uri": "mongo:27017",
+ "collection": "files",
+ },
+ "MongoClient crashed",
+ ),
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": valid_path(),
+ "host": "mongo",
+ "port": 27017,
+ "collection": "files",
+ },
+ "MongoClient crashed",
+ ),
+ ],
+)
+def test_fs_connect_with_invalid_mongoclient(
+ config, exp_exception_message, monkeypatch
+):
def generate_exception(a, b, c=None):
raise Exception(exp_exception_message)
- monkeypatch.setattr(MongoClient, '__init__', generate_exception)
+ monkeypatch.setattr(MongoClient, "__init__", generate_exception)
fs = FsMongo()
with pytest.raises(FsException) as excinfo:
assert str(excinfo.value) == generic_fs_exception_message(exp_exception_message)
-@pytest.mark.parametrize("config, exp_exception_message", [
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path(),
- 'uri': 'mongo:27017',
- 'collection': 'files'
- },
- "Collection unavailable"
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path(),
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'
- },
- "Collection unavailable"
- )])
-def test_fs_connect_with_invalid_mongo_collection(config, exp_exception_message, monkeypatch):
+@pytest.mark.parametrize(
+ "config, exp_exception_message",
+ [
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": valid_path(),
+ "uri": "mongo:27017",
+ "collection": "files",
+ },
+ "Collection unavailable",
+ ),
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": valid_path(),
+ "host": "mongo",
+ "port": 27017,
+ "collection": "files",
+ },
+ "Collection unavailable",
+ ),
+ ],
+)
+def test_fs_connect_with_invalid_mongo_collection(
+ config, exp_exception_message, monkeypatch
+):
def mock_mongoclient_constructor(a, b, c=None):
pass
def generate_exception(a, b):
raise Exception(exp_exception_message)
- monkeypatch.setattr(MongoClient, '__init__', mock_mongoclient_constructor)
- monkeypatch.setattr(MongoClient, '__getitem__', generate_exception)
+ monkeypatch.setattr(MongoClient, "__init__", mock_mongoclient_constructor)
+ monkeypatch.setattr(MongoClient, "__getitem__", generate_exception)
fs = FsMongo()
with pytest.raises(FsException) as excinfo:
assert str(excinfo.value) == generic_fs_exception_message(exp_exception_message)
-@pytest.mark.parametrize("config, exp_exception_message", [
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path(),
- 'uri': 'mongo:27017',
- 'collection': 'files'
- },
- "GridFsBucket crashed"
- ),
- (
- {
- 'logger_name': 'fs_mongo',
- 'path': valid_path(),
- 'host': 'mongo',
- 'port': 27017,
- 'collection': 'files'
- },
- "GridFsBucket crashed"
- )])
-def test_fs_connect_with_invalid_gridfsbucket(config, exp_exception_message, monkeypatch):
+@pytest.mark.parametrize(
+ "config, exp_exception_message",
+ [
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": valid_path(),
+ "uri": "mongo:27017",
+ "collection": "files",
+ },
+ "GridFsBucket crashed",
+ ),
+ (
+ {
+ "logger_name": "fs_mongo",
+ "path": valid_path(),
+ "host": "mongo",
+ "port": 27017,
+ "collection": "files",
+ },
+ "GridFsBucket crashed",
+ ),
+ ],
+)
+def test_fs_connect_with_invalid_gridfsbucket(
+ config, exp_exception_message, monkeypatch
+):
def mock_mongoclient_constructor(a, b, c=None):
pass
def generate_exception(a, b):
raise Exception(exp_exception_message)
- monkeypatch.setattr(MongoClient, '__init__', mock_mongoclient_constructor)
- monkeypatch.setattr(MongoClient, '__getitem__', mock_mongoclient_getitem)
- monkeypatch.setattr(GridFSBucket, '__init__', generate_exception)
+ monkeypatch.setattr(MongoClient, "__init__", mock_mongoclient_constructor)
+ monkeypatch.setattr(MongoClient, "__getitem__", mock_mongoclient_getitem)
+ monkeypatch.setattr(GridFSBucket, "__init__", generate_exception)
fs = FsMongo()
with pytest.raises(FsException) as excinfo:
class FakeFS:
- directory_metadata = {'type': 'dir', 'permissions': 509}
- file_metadata = {'type': 'file', 'permissions': 436}
- symlink_metadata = {'type': 'sym', 'permissions': 511}
+ directory_metadata = {"type": "dir", "permissions": 509}
+ file_metadata = {"type": "file", "permissions": 436}
+ symlink_metadata = {"type": "sym", "permissions": 511}
tar_info = {
1: {
- "cursor": FakeCursor(1, 'example_tar', directory_metadata),
+ "cursor": FakeCursor(1, "example_tar", directory_metadata),
"metadata": directory_metadata,
- "stream_content": b'',
+ "stream_content": b"",
"stream_content_bad": b"Something",
- "path": './tmp/example_tar',
+ "path": "./tmp/example_tar",
},
2: {
- "cursor": FakeCursor(2, 'example_tar/directory', directory_metadata),
+ "cursor": FakeCursor(2, "example_tar/directory", directory_metadata),
"metadata": directory_metadata,
- "stream_content": b'',
+ "stream_content": b"",
"stream_content_bad": b"Something",
- "path": './tmp/example_tar/directory',
+ "path": "./tmp/example_tar/directory",
},
3: {
- "cursor": FakeCursor(3, 'example_tar/symlinks', directory_metadata),
+ "cursor": FakeCursor(3, "example_tar/symlinks", directory_metadata),
"metadata": directory_metadata,
- "stream_content": b'',
+ "stream_content": b"",
"stream_content_bad": b"Something",
- "path": './tmp/example_tar/symlinks',
+ "path": "./tmp/example_tar/symlinks",
},
4: {
- "cursor": FakeCursor(4, 'example_tar/directory/file', file_metadata),
+ "cursor": FakeCursor(4, "example_tar/directory/file", file_metadata),
"metadata": file_metadata,
"stream_content": b"Example test",
"stream_content_bad": b"Example test2",
- "path": './tmp/example_tar/directory/file',
+ "path": "./tmp/example_tar/directory/file",
},
5: {
- "cursor": FakeCursor(5, 'example_tar/symlinks/file_link', symlink_metadata),
+ "cursor": FakeCursor(5, "example_tar/symlinks/file_link", symlink_metadata),
"metadata": symlink_metadata,
"stream_content": b"../directory/file",
"stream_content_bad": b"",
- "path": './tmp/example_tar/symlinks/file_link',
+ "path": "./tmp/example_tar/symlinks/file_link",
},
6: {
- "cursor": FakeCursor(6, 'example_tar/symlinks/directory_link', symlink_metadata),
+ "cursor": FakeCursor(
+ 6, "example_tar/symlinks/directory_link", symlink_metadata
+ ),
"metadata": symlink_metadata,
"stream_content": b"../directory/",
"stream_content_bad": b"",
- "path": './tmp/example_tar/symlinks/directory_link',
- }
+ "path": "./tmp/example_tar/symlinks/directory_link",
+ },
}
def upload_from_stream(self, f, stream, metadata=None):
os.mkdir(path)
os.mkdir("{}example_local".format(path))
os.mkdir("{}example_local/directory".format(path))
- with open("{}example_local/directory/test_file".format(path), "w+") as test_file:
+ with open(
+ "{}example_local/directory/test_file".format(path), "w+"
+ ) as test_file:
test_file.write(file_content)
fs.reverse_sync("example_local")
def test_write(msg_base):
with pytest.raises(MsgException) as excinfo:
msg_base.write("test", "test", "test")
- assert str(excinfo.value).startswith(exception_message("Method 'write' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'write' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
def test_read(msg_base):
with pytest.raises(MsgException) as excinfo:
msg_base.read("test")
- assert str(excinfo.value).startswith(exception_message("Method 'read' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'read' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
def test_aiowrite(msg_base, event_loop):
with pytest.raises(MsgException) as excinfo:
- event_loop.run_until_complete(msg_base.aiowrite("test", "test", "test", event_loop))
- assert str(excinfo.value).startswith(exception_message("Method 'aiowrite' not implemented"))
+ event_loop.run_until_complete(
+ msg_base.aiowrite("test", "test", "test", event_loop)
+ )
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'aiowrite' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
def test_aioread(msg_base, event_loop):
with pytest.raises(MsgException) as excinfo:
event_loop.run_until_complete(msg_base.aioread("test", event_loop))
- assert str(excinfo.value).startswith(exception_message("Method 'aioread' not implemented"))
+ assert str(excinfo.value).startswith(
+ exception_message("Method 'aioread' not implemented")
+ )
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
def valid_path():
- return tempfile.gettempdir() + '/'
+ return tempfile.gettempdir() + "/"
def invalid_path():
- return '/#tweeter/'
+ return "/#tweeter/"
@pytest.fixture(scope="function", params=[True, False])
def test_constructor():
msg = MsgLocal()
- assert msg.logger == logging.getLogger('msg')
+ assert msg.logger == logging.getLogger("msg")
assert msg.path is None
assert len(msg.files_read) == 0
assert len(msg.files_write) == 0
def test_constructor_with_logger():
- logger_name = 'msg_local'
+ logger_name = "msg_local"
msg = MsgLocal(logger_name=logger_name)
assert msg.logger == logging.getLogger(logger_name)
assert msg.path is None
assert len(msg.buffer) == 0
-@pytest.mark.parametrize("config, logger_name, path", [
- ({"logger_name": "msg_local", "path": valid_path()}, "msg_local", valid_path()),
- ({"logger_name": "msg_local", "path": valid_path()[:-1]}, "msg_local", valid_path()),
- ({"logger_name": "msg_local", "path": valid_path() + "test_it/"}, "msg_local", valid_path() + "test_it/"),
- ({"logger_name": "msg_local", "path": valid_path() + "test_it"}, "msg_local", valid_path() + "test_it/"),
- ({"path": valid_path()}, "msg", valid_path()),
- ({"path": valid_path()[:-1]}, "msg", valid_path()),
- ({"path": valid_path() + "test_it/"}, "msg", valid_path() + "test_it/"),
- ({"path": valid_path() + "test_it"}, "msg", valid_path() + "test_it/")])
+@pytest.mark.parametrize(
+ "config, logger_name, path",
+ [
+ ({"logger_name": "msg_local", "path": valid_path()}, "msg_local", valid_path()),
+ (
+ {"logger_name": "msg_local", "path": valid_path()[:-1]},
+ "msg_local",
+ valid_path(),
+ ),
+ (
+ {"logger_name": "msg_local", "path": valid_path() + "test_it/"},
+ "msg_local",
+ valid_path() + "test_it/",
+ ),
+ (
+ {"logger_name": "msg_local", "path": valid_path() + "test_it"},
+ "msg_local",
+ valid_path() + "test_it/",
+ ),
+ ({"path": valid_path()}, "msg", valid_path()),
+ ({"path": valid_path()[:-1]}, "msg", valid_path()),
+ ({"path": valid_path() + "test_it/"}, "msg", valid_path() + "test_it/"),
+ ({"path": valid_path() + "test_it"}, "msg", valid_path() + "test_it/"),
+ ],
+)
def test_connect(msg_local, config, logger_name, path):
msg_local.connect(config)
assert msg_local.logger == logging.getLogger(logger_name)
assert len(msg_local.buffer) == 0
-@pytest.mark.parametrize("config", [
- ({"logger_name": "msg_local", "path": invalid_path()}),
- ({"path": invalid_path()})])
+@pytest.mark.parametrize(
+ "config",
+ [
+ ({"logger_name": "msg_local", "path": invalid_path()}),
+ ({"path": invalid_path()}),
+ ],
+)
def test_connect_with_exception(msg_local, config):
with pytest.raises(MsgException) as excinfo:
msg_local.connect(config)
def test_disconnect_with_read(msg_local_config):
- msg_local_config.read('topic1', blocks=False)
- msg_local_config.read('topic2', blocks=False)
+ msg_local_config.read("topic1", blocks=False)
+ msg_local_config.read("topic2", blocks=False)
files_read = msg_local_config.files_read.copy()
files_write = msg_local_config.files_write.copy()
msg_local_config.disconnect()
for f in files_read.values():
assert f.closed
-
+
for f in files_write.values():
assert f.closed
def test_disconnect_with_read_and_write(msg_local_with_data):
- msg_local_with_data.read('topic1', blocks=False)
- msg_local_with_data.read('topic2', blocks=False)
+ msg_local_with_data.read("topic1", blocks=False)
+ msg_local_with_data.read("topic2", blocks=False)
files_read = msg_local_with_data.files_read.copy()
files_write = msg_local_with_data.files_write.copy()
assert f.closed
-@pytest.mark.parametrize("topic, key, msg", [
- ("test_topic", "test_key", "test_msg"),
- ("test", "test_key", "test_msg"),
- ("test_topic", "test", "test_msg"),
- ("test_topic", "test_key", "test"),
- ("test_topic", "test_list", ["a", "b", "c"]),
- ("test_topic", "test_tuple", ("c", "b", "a")),
- ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}),
- ("test_topic", "test_number", 123),
- ("test_topic", "test_float", 1.23),
- ("test_topic", "test_boolean", True),
- ("test_topic", "test_none", None)])
+@pytest.mark.parametrize(
+ "topic, key, msg",
+ [
+ ("test_topic", "test_key", "test_msg"),
+ ("test", "test_key", "test_msg"),
+ ("test_topic", "test", "test_msg"),
+ ("test_topic", "test_key", "test"),
+ ("test_topic", "test_list", ["a", "b", "c"]),
+ ("test_topic", "test_tuple", ("c", "b", "a")),
+ ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}),
+ ("test_topic", "test_number", 123),
+ ("test_topic", "test_float", 1.23),
+ ("test_topic", "test_boolean", True),
+ ("test_topic", "test_none", None),
+ ],
+)
def test_write(msg_local_config, topic, key, msg):
file_path = msg_local_config.path + topic
msg_local_config.write(topic, key, msg)
assert os.path.exists(file_path)
- with open(file_path, 'r') as stream:
- assert yaml.safe_load(stream) == {key: msg if not isinstance(msg, tuple) else list(msg)}
-
-
-@pytest.mark.parametrize("topic, key, msg, times", [
- ("test_topic", "test_key", "test_msg", 2),
- ("test", "test_key", "test_msg", 3),
- ("test_topic", "test", "test_msg", 4),
- ("test_topic", "test_key", "test", 2),
- ("test_topic", "test_list", ["a", "b", "c"], 3),
- ("test_topic", "test_tuple", ("c", "b", "a"), 4),
- ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}, 2),
- ("test_topic", "test_number", 123, 3),
- ("test_topic", "test_float", 1.23, 4),
- ("test_topic", "test_boolean", True, 2),
- ("test_topic", "test_none", None, 3)])
+ with open(file_path, "r") as stream:
+ assert yaml.safe_load(stream) == {
+ key: msg if not isinstance(msg, tuple) else list(msg)
+ }
+
+
+@pytest.mark.parametrize(
+ "topic, key, msg, times",
+ [
+ ("test_topic", "test_key", "test_msg", 2),
+ ("test", "test_key", "test_msg", 3),
+ ("test_topic", "test", "test_msg", 4),
+ ("test_topic", "test_key", "test", 2),
+ ("test_topic", "test_list", ["a", "b", "c"], 3),
+ ("test_topic", "test_tuple", ("c", "b", "a"), 4),
+ ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}, 2),
+ ("test_topic", "test_number", 123, 3),
+ ("test_topic", "test_float", 1.23, 4),
+ ("test_topic", "test_boolean", True, 2),
+ ("test_topic", "test_none", None, 3),
+ ],
+)
def test_write_with_multiple_calls(msg_local_config, topic, key, msg, times):
file_path = msg_local_config.path + topic
-
+
for _ in range(times):
msg_local_config.write(topic, key, msg)
assert os.path.exists(file_path)
- with open(file_path, 'r') as stream:
+ with open(file_path, "r") as stream:
for _ in range(times):
data = stream.readline()
- assert yaml.safe_load(data) == {key: msg if not isinstance(msg, tuple) else list(msg)}
+ assert yaml.safe_load(data) == {
+ key: msg if not isinstance(msg, tuple) else list(msg)
+ }
def test_write_exception(msg_local_config):
msg_local_config.files_write = MagicMock()
msg_local_config.files_write.__contains__.side_effect = Exception()
-
+
with pytest.raises(MsgException) as excinfo:
msg_local_config.write("test", "test", "test")
assert str(excinfo.value).startswith(empty_exception_message())
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
-@pytest.mark.parametrize("topics, datas", [
- (["topic"], [{"key": "value"}]),
- (["topic1"], [{"key": "value"}]),
- (["topic2"], [{"key": "value"}]),
- (["topic", "topic1"], [{"key": "value"}]),
- (["topic", "topic2"], [{"key": "value"}]),
- (["topic1", "topic2"], [{"key": "value"}]),
- (["topic", "topic1", "topic2"], [{"key": "value"}]),
- (["topic"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic1"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic2"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}])])
+@pytest.mark.parametrize(
+ "topics, datas",
+ [
+ (["topic"], [{"key": "value"}]),
+ (["topic1"], [{"key": "value"}]),
+ (["topic2"], [{"key": "value"}]),
+ (["topic", "topic1"], [{"key": "value"}]),
+ (["topic", "topic2"], [{"key": "value"}]),
+ (["topic1", "topic2"], [{"key": "value"}]),
+ (["topic", "topic1", "topic2"], [{"key": "value"}]),
+ (["topic"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic1"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ ],
+)
def test_read(msg_local_with_data, topics, datas):
def write_to_topic(topics, datas):
# Allow msglocal to block while waiting
t.join()
-@pytest.mark.parametrize("topics, datas", [
- (["topic"], [{"key": "value"}]),
- (["topic1"], [{"key": "value"}]),
- (["topic2"], [{"key": "value"}]),
- (["topic", "topic1"], [{"key": "value"}]),
- (["topic", "topic2"], [{"key": "value"}]),
- (["topic1", "topic2"], [{"key": "value"}]),
- (["topic", "topic1", "topic2"], [{"key": "value"}]),
- (["topic"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic1"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic2"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}])])
+@pytest.mark.parametrize(
+ "topics, datas",
+ [
+ (["topic"], [{"key": "value"}]),
+ (["topic1"], [{"key": "value"}]),
+ (["topic2"], [{"key": "value"}]),
+ (["topic", "topic1"], [{"key": "value"}]),
+ (["topic", "topic2"], [{"key": "value"}]),
+ (["topic1", "topic2"], [{"key": "value"}]),
+ (["topic", "topic1", "topic2"], [{"key": "value"}]),
+ (["topic"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic1"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ ],
+)
def test_read_non_block(msg_local_with_data, topics, datas):
def write_to_topic(topics, datas):
for topic in topics:
for topic in topics:
for data in datas:
- recv_topic, recv_key, recv_msg = msg_local_with_data.read(topic, blocks=False)
+ recv_topic, recv_key, recv_msg = msg_local_with_data.read(
+ topic, blocks=False
+ )
key = list(data.keys())[0]
val = data[key]
assert recv_topic == topic
assert recv_msg == val
-@pytest.mark.parametrize("topics, datas", [
- (["topic"], [{"key": "value"}]),
- (["topic1"], [{"key": "value"}]),
- (["topic2"], [{"key": "value"}]),
- (["topic", "topic1"], [{"key": "value"}]),
- (["topic", "topic2"], [{"key": "value"}]),
- (["topic1", "topic2"], [{"key": "value"}]),
- (["topic", "topic1", "topic2"], [{"key": "value"}]),
- (["topic"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic1"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic2"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}])])
+@pytest.mark.parametrize(
+ "topics, datas",
+ [
+ (["topic"], [{"key": "value"}]),
+ (["topic1"], [{"key": "value"}]),
+ (["topic2"], [{"key": "value"}]),
+ (["topic", "topic1"], [{"key": "value"}]),
+ (["topic", "topic2"], [{"key": "value"}]),
+ (["topic1", "topic2"], [{"key": "value"}]),
+ (["topic", "topic1", "topic2"], [{"key": "value"}]),
+ (["topic"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic1"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ ],
+)
def test_read_non_block_none(msg_local_with_data, topics, datas):
def write_to_topic(topics, datas):
time.sleep(2)
with open(msg_local_with_data.path + topic, "a+") as fp:
yaml.safe_dump(data, fp, default_flow_style=True, width=20000)
fp.flush()
+
# If file is not opened first, the messages written won't be seen
for topic in topics:
if topic not in msg_local_with_data.files_read:
t.join()
-@pytest.mark.parametrize("blocks", [
- (True),
- (False)])
+@pytest.mark.parametrize("blocks", [(True), (False)])
def test_read_exception(msg_local_with_data, blocks):
msg_local_with_data.files_read = MagicMock()
msg_local_with_data.files_read.__contains__.side_effect = Exception()
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
-@pytest.mark.parametrize("topics, datas", [
- (["topic"], [{"key": "value"}]),
- (["topic1"], [{"key": "value"}]),
- (["topic2"], [{"key": "value"}]),
- (["topic", "topic1"], [{"key": "value"}]),
- (["topic", "topic2"], [{"key": "value"}]),
- (["topic1", "topic2"], [{"key": "value"}]),
- (["topic", "topic1", "topic2"], [{"key": "value"}]),
- (["topic"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic1"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic2"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
- (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}])])
+@pytest.mark.parametrize(
+ "topics, datas",
+ [
+ (["topic"], [{"key": "value"}]),
+ (["topic1"], [{"key": "value"}]),
+ (["topic2"], [{"key": "value"}]),
+ (["topic", "topic1"], [{"key": "value"}]),
+ (["topic", "topic2"], [{"key": "value"}]),
+ (["topic1", "topic2"], [{"key": "value"}]),
+ (["topic", "topic1", "topic2"], [{"key": "value"}]),
+ (["topic"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic1"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic", "topic1"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ (["topic", "topic1", "topic2"], [{"key": "value"}, {"key1": "value1"}]),
+ ],
+)
def test_aioread(msg_local_with_data, event_loop, topics, datas):
def write_to_topic(topics, datas):
time.sleep(2)
with open(msg_local_with_data.path + topic, "a+") as fp:
yaml.safe_dump(data, fp, default_flow_style=True, width=20000)
fp.flush()
+
# If file is not opened first, the messages written won't be seen
for topic in topics:
if topic not in msg_local_with_data.files_read:
t.start()
for topic in topics:
for data in datas:
- recv = event_loop.run_until_complete(msg_local_with_data.aioread(topic, event_loop))
+ recv = event_loop.run_until_complete(
+ msg_local_with_data.aioread(topic, event_loop)
+ )
recv_topic, recv_key, recv_msg = recv
key = list(data.keys())[0]
val = data[key]
assert excinfo.value.http_code == http.HTTPStatus.INTERNAL_SERVER_ERROR
-@pytest.mark.parametrize("topic, key, msg", [
- ("test_topic", "test_key", "test_msg"),
- ("test", "test_key", "test_msg"),
- ("test_topic", "test", "test_msg"),
- ("test_topic", "test_key", "test"),
- ("test_topic", "test_list", ["a", "b", "c"]),
- ("test_topic", "test_tuple", ("c", "b", "a")),
- ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}),
- ("test_topic", "test_number", 123),
- ("test_topic", "test_float", 1.23),
- ("test_topic", "test_boolean", True),
- ("test_topic", "test_none", None)])
+@pytest.mark.parametrize(
+ "topic, key, msg",
+ [
+ ("test_topic", "test_key", "test_msg"),
+ ("test", "test_key", "test_msg"),
+ ("test_topic", "test", "test_msg"),
+ ("test_topic", "test_key", "test"),
+ ("test_topic", "test_list", ["a", "b", "c"]),
+ ("test_topic", "test_tuple", ("c", "b", "a")),
+ ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}),
+ ("test_topic", "test_number", 123),
+ ("test_topic", "test_float", 1.23),
+ ("test_topic", "test_boolean", True),
+ ("test_topic", "test_none", None),
+ ],
+)
def test_aiowrite(msg_local_config, event_loop, topic, key, msg):
file_path = msg_local_config.path + topic
event_loop.run_until_complete(msg_local_config.aiowrite(topic, key, msg))
assert os.path.exists(file_path)
- with open(file_path, 'r') as stream:
- assert yaml.safe_load(stream) == {key: msg if not isinstance(msg, tuple) else list(msg)}
-
-
-@pytest.mark.parametrize("topic, key, msg, times", [
- ("test_topic", "test_key", "test_msg", 2),
- ("test", "test_key", "test_msg", 3),
- ("test_topic", "test", "test_msg", 4),
- ("test_topic", "test_key", "test", 2),
- ("test_topic", "test_list", ["a", "b", "c"], 3),
- ("test_topic", "test_tuple", ("c", "b", "a"), 4),
- ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}, 2),
- ("test_topic", "test_number", 123, 3),
- ("test_topic", "test_float", 1.23, 4),
- ("test_topic", "test_boolean", True, 2),
- ("test_topic", "test_none", None, 3)])
-def test_aiowrite_with_multiple_calls(msg_local_config, event_loop, topic, key, msg, times):
+ with open(file_path, "r") as stream:
+ assert yaml.safe_load(stream) == {
+ key: msg if not isinstance(msg, tuple) else list(msg)
+ }
+
+
+@pytest.mark.parametrize(
+ "topic, key, msg, times",
+ [
+ ("test_topic", "test_key", "test_msg", 2),
+ ("test", "test_key", "test_msg", 3),
+ ("test_topic", "test", "test_msg", 4),
+ ("test_topic", "test_key", "test", 2),
+ ("test_topic", "test_list", ["a", "b", "c"], 3),
+ ("test_topic", "test_tuple", ("c", "b", "a"), 4),
+ ("test_topic", "test_dict", {"a": 1, "b": 2, "c": 3}, 2),
+ ("test_topic", "test_number", 123, 3),
+ ("test_topic", "test_float", 1.23, 4),
+ ("test_topic", "test_boolean", True, 2),
+ ("test_topic", "test_none", None, 3),
+ ],
+)
+def test_aiowrite_with_multiple_calls(
+ msg_local_config, event_loop, topic, key, msg, times
+):
file_path = msg_local_config.path + topic
for _ in range(times):
event_loop.run_until_complete(msg_local_config.aiowrite(topic, key, msg))
assert os.path.exists(file_path)
- with open(file_path, 'r') as stream:
+ with open(file_path, "r") as stream:
for _ in range(times):
data = stream.readline()
- assert yaml.safe_load(data) == {key: msg if not isinstance(msg, tuple) else list(msg)}
+ assert yaml.safe_load(data) == {
+ key: msg if not isinstance(msg, tuple) else list(msg)
+ }
def test_aiowrite_exception(msg_local_config, event_loop):
msg_local_config.files_write = MagicMock()
msg_local_config.files_write.__contains__.side_effect = Exception()
-
+
with pytest.raises(MsgException) as excinfo:
event_loop.run_until_complete(msg_local_config.aiowrite("test", "test", "test"))
assert str(excinfo.value).startswith(empty_exception_message())
class SOL004ValidatorTest(unittest.TestCase):
def test_get_package_file_hash_algorithm_from_manifest_with_metadata_dir(self):
- package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf')
- algorithm = package.get_package_file_hash_algorithm_from_manifest('Scripts/charms/simple/src/charm.py')
- self.assertEqual(algorithm, 'SHA-256')
+ package = SOL004Package(
+ "osm_common/tests/packages/native_charm_with_metadata_dir_vnf"
+ )
+ algorithm = package.get_package_file_hash_algorithm_from_manifest(
+ "Scripts/charms/simple/src/charm.py"
+ )
+ self.assertEqual(algorithm, "SHA-256")
def test_get_package_file_hash_algorithm_from_manifest_without_metadata_dir(self):
- package = SOL004Package('osm_common/tests/packages/native_charm_without_metadata_dir_vnf')
- algorithm = package.get_package_file_hash_algorithm_from_manifest('Scripts/charms/simple/src/charm.py')
- self.assertEqual(algorithm, 'SHA-256')
+ package = SOL004Package(
+ "osm_common/tests/packages/native_charm_without_metadata_dir_vnf"
+ )
+ algorithm = package.get_package_file_hash_algorithm_from_manifest(
+ "Scripts/charms/simple/src/charm.py"
+ )
+ self.assertEqual(algorithm, "SHA-256")
def test_get_package_file_hash_algorithm_from_manifest_on_non_existent_file(self):
- package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf')
+ package = SOL004Package(
+ "osm_common/tests/packages/native_charm_with_metadata_dir_vnf"
+ )
with self.assertRaises(SOL004PackageException):
- package.get_package_file_hash_algorithm_from_manifest('Non/Existing/file')
+ package.get_package_file_hash_algorithm_from_manifest("Non/Existing/file")
def test_get_package_file_hash_digest_from_manifest_with_metadata_dir(self):
- package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf')
- digest = package.get_package_file_hash_digest_from_manifest('Scripts/charms/simple/src/charm.py')
- self.assertEqual(digest, '7895f7b9e1b7ed5b5bcd64398950ca95b456d7fc973334351474eed466c2f480')
+ package = SOL004Package(
+ "osm_common/tests/packages/native_charm_with_metadata_dir_vnf"
+ )
+ digest = package.get_package_file_hash_digest_from_manifest(
+ "Scripts/charms/simple/src/charm.py"
+ )
+ self.assertEqual(
+ digest, "ea72f897a966e6174ed9164fabc3c500df5a2f712eb6b22ab2408afb07d04d14"
+ )
def test_get_package_file_hash_digest_from_manifest_without_metadata_dir(self):
- package = SOL004Package('osm_common/tests/packages/native_charm_without_metadata_dir_vnf')
- digest = package.get_package_file_hash_digest_from_manifest('Scripts/charms/simple/src/charm.py')
- self.assertEqual(digest, '7895f7b9e1b7ed5b5bcd64398950ca95b456d7fc973334351474eed466c2f480')
+ package = SOL004Package(
+ "osm_common/tests/packages/native_charm_without_metadata_dir_vnf"
+ )
+ digest = package.get_package_file_hash_digest_from_manifest(
+ "Scripts/charms/simple/src/charm.py"
+ )
+ self.assertEqual(
+ digest, "ea72f897a966e6174ed9164fabc3c500df5a2f712eb6b22ab2408afb07d04d14"
+ )
def test_get_package_file_hash_digest_from_manifest_on_non_existent_file(self):
- package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf')
+ package = SOL004Package(
+ "osm_common/tests/packages/native_charm_with_metadata_dir_vnf"
+ )
with self.assertRaises(SOL004PackageException):
- package.get_package_file_hash_digest_from_manifest('Non/Existing/file')
+ package.get_package_file_hash_digest_from_manifest("Non/Existing/file")
- def test_get_package_file_hash_digest_from_manifest_on_non_existing_hash_entry(self):
- package = SOL004Package('osm_common/tests/packages/invalid_package_vnf')
+ def test_get_package_file_hash_digest_from_manifest_on_non_existing_hash_entry(
+ self,
+ ):
+ package = SOL004Package("osm_common/tests/packages/invalid_package_vnf")
with self.assertRaises(SOL004PackageException):
- package.get_package_file_hash_digest_from_manifest('Scripts/charms/simple/hooks/upgrade-charm')
+ package.get_package_file_hash_digest_from_manifest(
+ "Scripts/charms/simple/hooks/upgrade-charm"
+ )
def test_validate_package_file_hash_with_metadata_dir(self):
- package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf')
- package.validate_package_file_hash('Scripts/charms/simple/src/charm.py')
+ package = SOL004Package(
+ "osm_common/tests/packages/native_charm_with_metadata_dir_vnf"
+ )
+ package.validate_package_file_hash("Scripts/charms/simple/src/charm.py")
def test_validate_package_file_hash_without_metadata_dir(self):
- package = SOL004Package('osm_common/tests/packages/native_charm_without_metadata_dir_vnf')
- package.validate_package_file_hash('Scripts/charms/simple/src/charm.py')
+ package = SOL004Package(
+ "osm_common/tests/packages/native_charm_without_metadata_dir_vnf"
+ )
+ package.validate_package_file_hash("Scripts/charms/simple/src/charm.py")
def test_validate_package_file_hash_on_non_existing_file(self):
- package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf')
+ package = SOL004Package(
+ "osm_common/tests/packages/native_charm_with_metadata_dir_vnf"
+ )
with self.assertRaises(SOL004PackageException):
- package.validate_package_file_hash('Non/Existing/file')
+ package.validate_package_file_hash("Non/Existing/file")
def test_validate_package_file_hash_on_wrong_manifest_hash(self):
- package = SOL004Package('osm_common/tests/packages/invalid_package_vnf')
+ package = SOL004Package("osm_common/tests/packages/invalid_package_vnf")
with self.assertRaises(SOL004PackageException):
- package.validate_package_file_hash('Scripts/charms/simple/hooks/start')
+ package.validate_package_file_hash("Scripts/charms/simple/hooks/start")
def test_validate_package_file_hash_on_unsupported_hash_algorithm(self):
- package = SOL004Package('osm_common/tests/packages/invalid_package_vnf')
+ package = SOL004Package("osm_common/tests/packages/invalid_package_vnf")
with self.assertRaises(SOL004PackageException):
- package.validate_package_file_hash('Scripts/charms/simple/src/charm.py')
+ package.validate_package_file_hash("Scripts/charms/simple/src/charm.py")
def test_validate_package_hashes_with_metadata_dir(self):
- package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf')
+ package = SOL004Package(
+ "osm_common/tests/packages/native_charm_with_metadata_dir_vnf"
+ )
package.validate_package_hashes()
def test_validate_package_hashes_without_metadata_dir(self):
- package = SOL004Package('osm_common/tests/packages/native_charm_without_metadata_dir_vnf')
+ package = SOL004Package(
+ "osm_common/tests/packages/native_charm_without_metadata_dir_vnf"
+ )
package.validate_package_hashes()
def test_validate_package_hashes_on_invalid_package(self):
- package = SOL004Package('osm_common/tests/packages/invalid_package_vnf')
+ package = SOL004Package("osm_common/tests/packages/invalid_package_vnf")
with self.assertRaises(SOL004PackageException):
package.validate_package_hashes()
def test_get_descriptor_location_with_metadata_dir(self):
- package = SOL004Package('osm_common/tests/packages/native_charm_with_metadata_dir_vnf')
+ package = SOL004Package(
+ "osm_common/tests/packages/native_charm_with_metadata_dir_vnf"
+ )
descriptor_path = package.get_descriptor_location()
- self.assertEqual(descriptor_path, 'Definitions/native_charm_vnfd.yaml')
+ self.assertEqual(descriptor_path, "Definitions/native_charm_vnfd.yaml")
def test_get_descriptor_location_without_metadata_dir(self):
- package = SOL004Package('osm_common/tests/packages/native_charm_without_metadata_dir_vnf')
+ package = SOL004Package(
+ "osm_common/tests/packages/native_charm_without_metadata_dir_vnf"
+ )
descriptor_path = package.get_descriptor_location()
- self.assertEqual(descriptor_path, 'native_charm_vnfd.yaml')
+ self.assertEqual(descriptor_path, "native_charm_vnfd.yaml")
here = os.path.abspath(os.path.dirname(__file__))
_name = "osm_common"
-README = open(os.path.join(here, 'README.rst')).read()
+README = open(os.path.join(here, "README.rst")).read()
setup(
name=_name,
- description='OSM common utilities',
+ description="OSM common utilities",
long_description=README,
- version_command=('git describe --tags --long --dirty --match v*', 'pep440-git-full'),
-
- author='ETSI OSM',
- author_email='osmsupport@etsi.com',
- maintainer='ETSI OSM',
- maintainer_email='osmsupport@etsi.com',
-
- url='https://osm.etsi.org/gitweb/?p=osm/common.git;a=summary',
- license='Apache 2.0',
- setup_requires=['setuptools-version-command'],
-
+ version_command=(
+ "git describe --tags --long --dirty --match v*",
+ "pep440-git-full",
+ ),
+ author="ETSI OSM",
+ author_email="osmsupport@etsi.com",
+ maintainer="ETSI OSM",
+ maintainer_email="osmsupport@etsi.com",
+ url="https://osm.etsi.org/gitweb/?p=osm/common.git;a=summary",
+ license="Apache 2.0",
+ setup_requires=["setuptools-version-command"],
packages=[_name],
include_package_data=True,
-
)
deps = black
skip_install = true
commands =
- - black --check --diff osm_common/
+ black --check --diff osm_common/
+ black --check --diff setup.py
#######################################################################################
[testenv:cover]
-r{toxinidir}/requirements-test.txt
pylint
commands =
- pylint -E osm_common
+ pylint -E osm_common
#######################################################################################
[testenv:safety]
ignore =
W291,
W293,
+ W503,
E123,
E125,
+ E203,
E226,
E241
exclude =