From 3fd1c93ac2ba14385ada5e760d5e2bf0870d3964 Mon Sep 17 00:00:00 2001 From: Julien Moura Date: Tue, 18 Jul 2023 12:50:35 +0200 Subject: [PATCH] Rename files to comply with Python convention PEP8 --- src/rok4/{Exceptions.py => exceptions.py} | 0 src/rok4/{Layer.py => layer.py} | 10 +- src/rok4/{Pyramid.py => pyramid.py} | 28 ++--- src/rok4/{Raster.py => raster.py} | 4 +- src/rok4/{Storage.py => storage.py} | 85 +++++++------ .../{TileMatrixSet.py => tile_matrix_set.py} | 6 +- src/rok4/{Utils.py => utils.py} | 0 src/rok4/{Vector.py => vector.py} | 4 +- tests/{test_Layer.py => test_layer.py} | 6 +- tests/{test_Pyramid.py => test_pyramid.py} | 10 +- tests/{test_Raster.py => test_raster.py} | 4 +- tests/{test_Storage.py => test_storage.py} | 117 ++++++++++++------ ...leMatrixSet.py => test_tile_matrix_set.py} | 4 +- tests/{test_Utils.py => test_utils.py} | 4 +- tests/{test_Vector.py => test_vector.py} | 6 +- 15 files changed, 167 insertions(+), 121 deletions(-) rename src/rok4/{Exceptions.py => exceptions.py} (100%) rename src/rok4/{Layer.py => layer.py} (98%) rename src/rok4/{Pyramid.py => pyramid.py} (99%) rename src/rok4/{Raster.py => raster.py} (98%) rename src/rok4/{Storage.py => storage.py} (95%) rename src/rok4/{TileMatrixSet.py => tile_matrix_set.py} (98%) rename src/rok4/{Utils.py => utils.py} (100%) rename src/rok4/{Vector.py => vector.py} (99%) rename tests/{test_Layer.py => test_layer.py} (98%) rename tests/{test_Pyramid.py => test_pyramid.py} (98%) rename tests/{test_Raster.py => test_raster.py} (99%) rename tests/{test_Storage.py => test_storage.py} (94%) rename tests/{test_TileMatrixSet.py => test_tile_matrix_set.py} (99%) rename tests/{test_Utils.py => test_utils.py} (99%) rename tests/{test_Vector.py => test_vector.py} (97%) diff --git a/src/rok4/Exceptions.py b/src/rok4/exceptions.py similarity index 100% rename from src/rok4/Exceptions.py rename to src/rok4/exceptions.py diff --git a/src/rok4/Layer.py b/src/rok4/layer.py similarity index 98% rename from src/rok4/Layer.py rename to src/rok4/layer.py index 316a9ca..f7a891b 100644 --- a/src/rok4/Layer.py +++ b/src/rok4/layer.py @@ -11,11 +11,11 @@ import os import re -from rok4.Exceptions import * -from rok4.Pyramid import Pyramid, PyramidType -from rok4.TileMatrixSet import TileMatrixSet -from rok4.Storage import * -from rok4.Utils import * +from rok4.exceptions import * +from rok4.pyramid import Pyramid, PyramidType +from rok4.tile_matrix_set import TileMatrixSet +from rok4.storage import * +from rok4.utils import * class Layer: diff --git a/src/rok4/Pyramid.py b/src/rok4/pyramid.py similarity index 99% rename from src/rok4/Pyramid.py rename to src/rok4/pyramid.py index 7667244..8ca8afc 100644 --- a/src/rok4/Pyramid.py +++ b/src/rok4/pyramid.py @@ -17,10 +17,10 @@ import mapbox_vector_tile from PIL import Image -from rok4.Exceptions import * -from rok4.TileMatrixSet import TileMatrixSet, TileMatrix -from rok4.Storage import * -from rok4.Utils import * +from rok4.exceptions import * +from rok4.tile_matrix_set import TileMatrixSet, TileMatrix +from rok4.storage import * +from rok4.utils import * class PyramidType(Enum): @@ -549,11 +549,8 @@ def serializable(self) -> Dict: Returns: Dict: descriptor structured object description """ - - serialization = { - "tile_matrix_set": self.__tms.name, - "format": self.__format - } + + serialization = {"tile_matrix_set": self.__tms.name, "format": self.__format} serialization["levels"] = [] sorted_levels = sorted(self.__levels.values(), key=lambda l: l.resolution, reverse=True) @@ -620,7 +617,7 @@ def storage_root(self) -> str: Returns: str: Pyramid's storage root """ - + return self.__storage["root"].split("@", 1)[ 0 ] # Suppression de l'éventuel hôte de spécification du cluster S3 @@ -670,7 +667,6 @@ def format(self) -> str: @property def tile_extension(self) -> str: - if self.__format in [ "TIFF_RAW_UINT8", "TIFF_LZW_UINT8", @@ -835,7 +831,7 @@ def get_level(self, level_id: str) -> "Level": Returns: The corresponding pyramid's level, None if not present """ - + return self.__levels.get(level_id, None) def get_levels(self, bottom_id: str = None, top_id: str = None) -> List[Level]: @@ -1019,7 +1015,6 @@ def get_slab_path_from_infos( else: return slab_path - def get_tile_data_binary(self, level: str, column: int, row: int) -> str: """Get a pyramid's tile as binary string @@ -1182,7 +1177,6 @@ def get_tile_data_raster(self, level: str, column: int, row: int) -> numpy.ndarr level_object = self.get_level(level) if self.__format == "TIFF_JPG_UINT8" or self.__format == "TIFF_JPG90_UINT8": - try: img = Image.open(io.BytesIO(binary_tile)) except Exception as e: @@ -1379,6 +1373,8 @@ def size(self) -> int: Returns: int: size of the pyramid """ - if not hasattr(self,"_Pyramid__size") : - self.__size = size_path(get_path_from_infos(self.__storage["type"], self.__storage["root"], self.__name)) + if not hasattr(self, "_Pyramid__size"): + self.__size = size_path( + get_path_from_infos(self.__storage["type"], self.__storage["root"], self.__name) + ) return self.__size diff --git a/src/rok4/Raster.py b/src/rok4/raster.py similarity index 98% rename from src/rok4/Raster.py rename to src/rok4/raster.py index 640f4bf..c86901b 100644 --- a/src/rok4/Raster.py +++ b/src/rok4/raster.py @@ -14,8 +14,8 @@ from osgeo import ogr, gdal -from rok4.Storage import exists, get_osgeo_path, put_data_str -from rok4.Utils import ColorFormat, compute_bbox, compute_format +from rok4.storage import exists, get_osgeo_path, put_data_str +from rok4.utils import ColorFormat, compute_bbox, compute_format # Enable GDAL/OGR exceptions ogr.UseExceptions() diff --git a/src/rok4/Storage.py b/src/rok4/storage.py similarity index 95% rename from src/rok4/Storage.py rename to src/rok4/storage.py index bcbee52..3015e5e 100644 --- a/src/rok4/Storage.py +++ b/src/rok4/storage.py @@ -45,7 +45,7 @@ gdal.UseExceptions() -from rok4.Exceptions import * +from rok4.exceptions import * class StorageType(Enum): @@ -75,7 +75,7 @@ def __get_s3_client(bucket_name: str) -> Tuple[Dict[str, Union["boto3.client", s Returns: Tuple[Dict[str, Union['boto3.client',str]], str, str]: the S3 informations (client, host, key, secret) and the simple bucket name """ - + global __S3_CLIENTS, __S3_DEFAULT_CLIENT if not __S3_CLIENTS: @@ -134,7 +134,7 @@ def __get_s3_client(bucket_name: str) -> Tuple[Dict[str, Union["boto3.client", s def disconnect_s3_clients() -> None: """Clean S3 clients""" - + global __S3_CLIENTS, __S3_DEFAULT_CLIENT __S3_CLIENTS = {} __S3_DEFAULT_CLIENT = None @@ -363,16 +363,15 @@ def get_data_binary(path: str, range: Tuple[int, int] = None) -> str: raise StorageError("FILE", e) elif storage_type == StorageType.HTTP or storage_type == StorageType.HTTPS: - - if range is None : + if range is None: try: reponse = requests.get(f"{storage_type.value}{path}", stream=True) data = reponse.content - if reponse.status_code == 404 : + if reponse.status_code == 404: raise FileNotFoundError(f"{storage_type.value}{path}") except Exception as e: raise StorageError(storage_type.name, e) - else : + else: raise NotImplementedError else: @@ -471,7 +470,6 @@ def get_size(path: str) -> int: raise StorageError("FILE", e) elif storage_type == StorageType.HTTP or storage_type == StorageType.HTTPS: - try: # Le stream=True permet de ne télécharger que le header initialement reponse = requests.get(storage_type.value + path, stream=True).headers["content-length"] @@ -526,12 +524,11 @@ def exists(path: str) -> bool: return os.path.exists(path) elif storage_type == StorageType.HTTP or storage_type == StorageType.HTTPS: - try: response = requests.get(storage_type.value + path, stream=True) - if response.status_code == 200 : + if response.status_code == 200: return True - else : + else: return False except Exception as e: raise StorageError(storage_type.name, e) @@ -839,43 +836,52 @@ def copy(from_path: str, to_path: str, from_md5: str = None) -> None: f"CEPH and S3", f"Cannot copy CEPH object {from_path} to S3 object {to_path} : {e}" ) - elif (from_type == StorageType.HTTP or from_type == StorageType.HTTPS) and to_type == StorageType.FILE : - + elif ( + from_type == StorageType.HTTP or from_type == StorageType.HTTPS + ) and to_type == StorageType.FILE: try: - response = requests.get(from_type.value + from_path, stream = True) + response = requests.get(from_type.value + from_path, stream=True) with open(to_path, "wb") as f: - for chunk in response.iter_content(chunk_size=65536) : + for chunk in response.iter_content(chunk_size=65536): if chunk: f.write(chunk) except Exception as e: - raise StorageError(f"HTTP(S) and FILE", f"Cannot copy HTTP(S) object {from_path} to FILE object {to_path} : {e}") - - elif (from_type == StorageType.HTTP or from_type == StorageType.HTTPS) and to_type == StorageType.CEPH : + raise StorageError( + f"HTTP(S) and FILE", + f"Cannot copy HTTP(S) object {from_path} to FILE object {to_path} : {e}", + ) + elif ( + from_type == StorageType.HTTP or from_type == StorageType.HTTPS + ) and to_type == StorageType.CEPH: to_ioctx = __get_ceph_ioctx(to_tray) try: - response = requests.get(from_type.value + from_path, stream = True) + response = requests.get(from_type.value + from_path, stream=True) offset = 0 - for chunk in response.iter_content(chunk_size=65536) : + for chunk in response.iter_content(chunk_size=65536): if chunk: size = len(chunk) to_ioctx.write(to_base_name, chunk, offset) offset += size except Exception as e: - raise StorageError(f"HTTP(S) and CEPH", f"Cannot copy HTTP(S) object {from_path} to CEPH object {to_path} : {e}") - - elif (from_type == StorageType.HTTP or from_type == StorageType.HTTPS) and to_type == StorageType.S3 : + raise StorageError( + f"HTTP(S) and CEPH", + f"Cannot copy HTTP(S) object {from_path} to CEPH object {to_path} : {e}", + ) + elif ( + from_type == StorageType.HTTP or from_type == StorageType.HTTPS + ) and to_type == StorageType.S3: to_s3_client, to_bucket = __get_s3_client(to_tray) try: - response = requests.get(from_type.value + from_path, stream = True) - with tempfile.NamedTemporaryFile("w+b",delete=False) as f: + response = requests.get(from_type.value + from_path, stream=True) + with tempfile.NamedTemporaryFile("w+b", delete=False) as f: name_fich = f.name - for chunk in response.iter_content(chunk_size=65536) : + for chunk in response.iter_content(chunk_size=65536): if chunk: f.write(chunk) @@ -884,7 +890,10 @@ def copy(from_path: str, to_path: str, from_md5: str = None) -> None: os.remove(name_fich) except Exception as e: - raise StorageError(f"HTTP(S) and S3", f"Cannot copy HTTP(S) object {from_path} to S3 object {to_path} : {e}") + raise StorageError( + f"HTTP(S) and S3", + f"Cannot copy HTTP(S) object {from_path} to S3 object {to_path} : {e}", + ) else: raise StorageError( @@ -998,7 +1007,8 @@ def get_osgeo_path(path: str) -> str: else: raise NotImplementedError(f"Cannot get a GDAL/OGR compliant path from {path}") -def size_path(path: str) -> int : + +def size_path(path: str) -> int: """Return the size of the path given (or, for the CEPH, the sum of the size of each object of the .list) Args: @@ -1011,10 +1021,10 @@ def size_path(path: str) -> int : Returns: int: size of the path """ - storage_type, unprefixed_path, tray_name, base_name = get_infos_from_path(path) + storage_type, unprefixed_path, tray_name, base_name = get_infos_from_path(path) if storage_type == StorageType.FILE: - try : + try: total = 0 with os.scandir(unprefixed_path) as it: for entry in it: @@ -1029,24 +1039,23 @@ def size_path(path: str) -> int : elif storage_type == StorageType.S3: s3_client, bucket_name = __get_s3_client(tray_name) - try : - paginator = s3_client["client"].get_paginator('list_objects_v2') + try: + paginator = s3_client["client"].get_paginator("list_objects_v2") pages = paginator.paginate( Bucket=bucket_name, - Prefix=base_name+"/", + Prefix=base_name + "/", PaginationConfig={ - 'PageSize': 10000, - } + "PageSize": 10000, + }, ) total = 0 for page in pages: - for key in page['Contents']: - total += key['Size'] + for key in page["Contents"]: + total += key["Size"] except Exception as e: raise StorageError("S3", e) - elif storage_type == StorageType.CEPH: raise NotImplementedError else: diff --git a/src/rok4/TileMatrixSet.py b/src/rok4/tile_matrix_set.py similarity index 98% rename from src/rok4/TileMatrixSet.py rename to src/rok4/tile_matrix_set.py index 472810c..037e6b6 100644 --- a/src/rok4/TileMatrixSet.py +++ b/src/rok4/tile_matrix_set.py @@ -9,9 +9,9 @@ - ROK4_TMS_DIRECTORY """ -from rok4.Exceptions import * -from rok4.Storage import get_data_str -from rok4.Utils import * +from rok4.exceptions import * +from rok4.storage import get_data_str +from rok4.utils import * from typing import Dict, List, Tuple from json.decoder import JSONDecodeError diff --git a/src/rok4/Utils.py b/src/rok4/utils.py similarity index 100% rename from src/rok4/Utils.py rename to src/rok4/utils.py diff --git a/src/rok4/Vector.py b/src/rok4/vector.py similarity index 99% rename from src/rok4/Vector.py rename to src/rok4/vector.py index 94e6def..b12261a 100644 --- a/src/rok4/Vector.py +++ b/src/rok4/vector.py @@ -7,8 +7,8 @@ """ from osgeo import ogr -from rok4.Storage import get_osgeo_path, copy -from rok4.Exceptions import * +from rok4.storage import get_osgeo_path, copy +from rok4.exceptions import * import os import tempfile diff --git a/tests/test_Layer.py b/tests/test_layer.py similarity index 98% rename from tests/test_Layer.py rename to tests/test_layer.py index 0a60d5c..ecc140e 100644 --- a/tests/test_Layer.py +++ b/tests/test_layer.py @@ -1,6 +1,6 @@ -from rok4.Layer import Layer -from rok4.Pyramid import PyramidType -from rok4.Exceptions import * +from rok4.layer import Layer +from rok4.pyramid import PyramidType +from rok4.exceptions import * import pytest import os diff --git a/tests/test_Pyramid.py b/tests/test_pyramid.py similarity index 98% rename from tests/test_Pyramid.py rename to tests/test_pyramid.py index 6eebfd7..14cda2d 100644 --- a/tests/test_Pyramid.py +++ b/tests/test_pyramid.py @@ -1,8 +1,8 @@ -from rok4.Pyramid import * -from rok4.TileMatrixSet import TileMatrixSet -from rok4.Storage import StorageType -from rok4.Utils import * -from rok4.Exceptions import * +from rok4.pyramid import * +from rok4.tile_matrix_set import TileMatrixSet +from rok4.storage import StorageType +from rok4.utils import * +from rok4.exceptions import * import pytest import os diff --git a/tests/test_Raster.py b/tests/test_raster.py similarity index 99% rename from tests/test_Raster.py rename to tests/test_raster.py index de35de1..ae466a5 100644 --- a/tests/test_Raster.py +++ b/tests/test_raster.py @@ -13,8 +13,8 @@ from unittest import mock, TestCase from unittest.mock import call, MagicMock, Mock, mock_open, patch -from rok4.Raster import Raster, RasterSet -from rok4.Utils import ColorFormat +from rok4.raster import Raster, RasterSet +from rok4.utils import ColorFormat # rok4.Raster.Raster class tests diff --git a/tests/test_Storage.py b/tests/test_storage.py similarity index 94% rename from tests/test_Storage.py rename to tests/test_storage.py index 9779954..8e0f90a 100644 --- a/tests/test_Storage.py +++ b/tests/test_storage.py @@ -1,5 +1,5 @@ -from rok4.Storage import * -from rok4.Exceptions import * +from rok4.storage import * +from rok4.exceptions import * import pytest import os @@ -21,6 +21,7 @@ def test_hash_file_ok(mock_file): except Exception as exc: assert False, f"FILE md5 sum raises an exception: {exc}" + @mock.patch.dict(os.environ, {}, clear=True) def test_get_infos_from_path(): assert (StorageType.S3, "toto/titi", "toto", "titi") == get_infos_from_path("s3://toto/titi") @@ -103,6 +104,7 @@ def test_file_read_ok(mock_file): except Exception as exc: assert False, f"FILE read raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -117,6 +119,7 @@ def test_s3_read_nok(mocked_s3_client): with pytest.raises(StorageError): data = get_data_str("s3://bucket/path/to/object") + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -159,8 +162,9 @@ def test_ceph_read_ok(mocked_rados_client): except Exception as exc: assert False, f"CEPH read raises an exception: {exc}" + @mock.patch.dict(os.environ, {}, clear=True) -@mock.patch("requests.get", side_effect={"status_code":404}) +@mock.patch("requests.get", side_effect={"status_code": 404}) def test_http_read_error(mock_http): with pytest.raises(StorageError): requests_instance = MagicMock() @@ -171,28 +175,31 @@ def test_http_read_error(mock_http): mock_http.assert_called_with("http://path/to/file.ext", stream=True) + @mock.patch.dict(os.environ, {}, clear=True) def test_http_read_range_error(): with pytest.raises(NotImplementedError): - data = get_data_binary("http://path/to/file.ext", (0,100)) + data = get_data_binary("http://path/to/file.ext", (0, 100)) + @mock.patch.dict(os.environ, {}, clear=True) @mock.patch("requests.get") def test_http_read_ok(mock_http): - try : + try: requests_instance = MagicMock() - requests_instance.content = b'data' + requests_instance.content = b"data" mock_http.return_value = requests_instance data = get_data_str("http://path/to/file.ext") mock_http.assert_called_with("http://path/to/file.ext", stream=True) - assert data == 'data' + assert data == "data" except Exception as exc: assert False, f"HTTP read raises an exception: {exc}" ############ put_data_str + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -225,6 +232,7 @@ def test_s3_write_ok(mocked_s3_client): except Exception as exc: assert False, f"S3 write raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -302,6 +310,7 @@ def test_copy_s3_file_nok(mock_hash_file, mock_makedirs, mocked_s3_client): copy("s3://bucket/source.ext", "file:///path/to/destination.ext", "toto") mock_makedirs.assert_called_once_with("/path/to", exist_ok=True) + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -320,6 +329,7 @@ def test_copy_file_s3_ok(mocked_s3_client): except Exception as exc: assert False, f"FILE -> S3 copy raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -338,6 +348,7 @@ def test_copy_s3_s3_ok(mocked_s3_client): except Exception as exc: assert False, f"S3 -> S3 copy raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -373,6 +384,7 @@ def test_copy_s3_s3_intercluster_nok(mocked_s3_client): with pytest.raises(StorageError): copy("s3://bucket@a/source.ext", "s3://bucket@c/destination.ext", "toto") + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -399,6 +411,7 @@ def test_copy_ceph_file_ok(mock_file, mock_makedirs, mocked_rados_client): except Exception as exc: assert False, f"CEPH -> FILE copy raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -423,6 +436,7 @@ def test_copy_file_ceph_ok(mock_file, mocked_rados_client): except Exception as exc: assert False, f"FILE -> CEPH copy raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -487,14 +501,14 @@ def test_copy_ceph_s3_ok(mock_file, mocked_s3_client, mocked_rados_client): except Exception as exc: assert False, f"CEPH -> S3 copy raises an exception: {exc}" + @mock.patch.dict(os.environ, {}, clear=True) -@mock.patch('requests.get') -@patch('builtins.open', new_callable=mock_open) +@mock.patch("requests.get") +@patch("builtins.open", new_callable=mock_open) def test_copy_http_file_ok(mock_open, mock_requests): try: - http_instance = MagicMock() - http_instance.iter_content.return_value = ["data","data2"] + http_instance.iter_content.return_value = ["data", "data2"] mock_requests.return_value = http_instance copy("http://path/to/source.ext", "file:///path/to/destination.ext") @@ -503,17 +517,20 @@ def test_copy_http_file_ok(mock_open, mock_requests): except Exception as exc: assert False, f"HTTP -> FILE copy raises an exception: {exc}" -@mock.patch.dict(os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, clear=True) -@mock.patch('rok4.Storage.rados.Rados') -@mock.patch('requests.get') + +@mock.patch.dict( + os.environ, + {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, + clear=True, +) +@mock.patch("rok4.Storage.rados.Rados") +@mock.patch("requests.get") def test_copy_http_ceph_ok(mock_requests, mocked_rados_client): try: - http_instance = MagicMock() - http_instance.iter_content.return_value = ["data","data2"] + http_instance.iter_content.return_value = ["data", "data2"] mock_requests.return_value = http_instance - disconnect_ceph_clients() ioctx_instance = MagicMock() ioctx_instance.write.return_value = None @@ -526,16 +543,20 @@ def test_copy_http_ceph_ok(mock_requests, mocked_rados_client): except Exception as exc: assert False, f"HTTP -> CEPH copy raises an exception: {exc}" -@mock.patch.dict(os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, clear=True) -@mock.patch('rok4.Storage.boto3.client') -@mock.patch('requests.get') -@patch('tempfile.NamedTemporaryFile', new_callable=mock_open) -@mock.patch('os.remove') + +@mock.patch.dict( + os.environ, + {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, + clear=True, +) +@mock.patch("rok4.Storage.boto3.client") +@mock.patch("requests.get") +@patch("tempfile.NamedTemporaryFile", new_callable=mock_open) +@mock.patch("os.remove") def test_copy_http_s3_ok(mock_remove, mock_tempfile, mock_requests, mocked_s3_client): try: - http_instance = MagicMock() - http_instance.iter_content.return_value = ["data","data2"] + http_instance.iter_content.return_value = ["data", "data2"] mock_requests.return_value = http_instance disconnect_s3_clients() @@ -546,7 +567,7 @@ def test_copy_http_s3_ok(mock_remove, mock_tempfile, mock_requests, mocked_s3_cl copy("http://path/to/source.ext", "s3://bucket/destination.ext") mock_requests.assert_called_once_with("http://path/to/source.ext", stream=True) - mock_tempfile.assert_called_once_with("w+b",delete=False) + mock_tempfile.assert_called_once_with("w+b", delete=False) except Exception as exc: assert False, f"HTTP -> CEPH copy raises an exception: {exc}" @@ -563,6 +584,7 @@ def test_link_hard_nok(): with pytest.raises(StorageError): link("ceph://pool1/source.ext", "ceph://pool2/destination.ext", True) + @mock.patch.dict(os.environ, {}, clear=True) @mock.patch("os.symlink", return_value=None) def test_link_file_ok(mock_link): @@ -582,6 +604,7 @@ def test_hlink_file_ok(mock_link): except Exception as exc: assert False, f"FILE hard link raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -619,6 +642,7 @@ def test_link_s3_ok(mocked_s3_client): except Exception as exc: assert False, f"S3 link raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -637,6 +661,7 @@ def test_link_s3_nok(mocked_s3_client): ############ get_size + @mock.patch.dict(os.environ, {}, clear=True) @mock.patch("os.stat") def test_size_file_ok(mock_stat): @@ -647,6 +672,7 @@ def test_size_file_ok(mock_stat): except Exception as exc: assert False, f"FILE size raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -667,6 +693,7 @@ def test_size_ceph_ok(mocked_rados_client): except Exception as exc: assert False, f"CEPH size raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -685,12 +712,12 @@ def test_size_s3_ok(mocked_s3_client): except Exception as exc: assert False, f"S3 size raises an exception: {exc}" + @mock.patch.dict(os.environ, {}, clear=True) -@mock.patch('requests.get') +@mock.patch("requests.get") def test_size_http_ok(mock_requests): - http_instance = MagicMock() - http_instance.headers = {"content-length":12} + http_instance.headers = {"content-length": 12} mock_requests.return_value = http_instance try: @@ -717,6 +744,7 @@ def test_exists_file_ok(mock_exists): except Exception as exc: assert False, f"FILE not exists raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -768,10 +796,10 @@ def test_exists_s3_ok(mocked_s3_client): except Exception as exc: assert False, f"CEPH not exists raises an exception: {exc}" + @mock.patch.dict(os.environ, {}, clear=True) -@mock.patch('requests.get') +@mock.patch("requests.get") def test_exists_http_ok(mock_requests): - http_instance = MagicMock() http_instance.status_code = 200 mock_requests.return_value = http_instance @@ -792,6 +820,7 @@ def test_exists_http_ok(mock_requests): ############ remove + @mock.patch.dict(os.environ, {}, clear=True) @mock.patch("os.remove") def test_remove_file_ok(mock_remove): @@ -807,6 +836,7 @@ def test_remove_file_ok(mock_remove): except Exception as exc: assert False, f"FILE deletion (not found) raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, @@ -832,6 +862,7 @@ def test_remove_ceph_ok(mocked_rados_client): except Exception as exc: assert False, f"CEPH deletion (not found) raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -861,6 +892,7 @@ def test_get_osgeo_path_file_ok(): except Exception as exc: assert False, f"FILE osgeo path raises an exception: {exc}" + @mock.patch.dict( os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, @@ -881,6 +913,7 @@ def test_get_osgeo_path_nok(): with pytest.raises(NotImplementedError): get_osgeo_path("ceph://pool/data.ext") + ############ size_path def test_size_path_file_ok(): try: @@ -889,22 +922,31 @@ def test_size_path_file_ok(): except Exception as exc: assert False, f"FILE size of the path raises an exception: {exc}" + def test_size_file_nok(): - with pytest.raises(StorageError) : + with pytest.raises(StorageError): size = size_path("file://tests/fixtures/TIFF_PBF_M") -@mock.patch.dict(os.environ, {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, clear=True) -def test_size_path_ceph_nok(): +@mock.patch.dict( + os.environ, + {"ROK4_CEPH_CONFFILE": "a", "ROK4_CEPH_CLUSTERNAME": "b", "ROK4_CEPH_USERNAME": "c"}, + clear=True, +) +def test_size_path_ceph_nok(): with pytest.raises(NotImplementedError): size = size_path("ceph://pool/path") -@mock.patch.dict(os.environ, {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, clear=True) -@mock.patch('rok4.Storage.boto3.client') -def test_size_path_s3_ok(mocked_s3_client): +@mock.patch.dict( + os.environ, + {"ROK4_S3_URL": "https://a,https://b", "ROK4_S3_SECRETKEY": "a,b", "ROK4_S3_KEY": "a,b"}, + clear=True, +) +@mock.patch("rok4.Storage.boto3.client") +def test_size_path_s3_ok(mocked_s3_client): disconnect_s3_clients() - pages = [{"Contents" : [{"Size" : 10},{"Size" : 20}]}, {"Contents" : [{"Size" : 50}]}] + pages = [{"Contents": [{"Size": 10}, {"Size": 20}]}, {"Contents": [{"Size": 50}]}] paginator = MagicMock() paginator.paginate.return_value = pages client = MagicMock() @@ -916,4 +958,3 @@ def test_size_path_s3_ok(mocked_s3_client): assert size == 80 except Exception as exc: assert False, f"S3 size of the path raises an exception: {exc}" - diff --git a/tests/test_TileMatrixSet.py b/tests/test_tile_matrix_set.py similarity index 99% rename from tests/test_TileMatrixSet.py rename to tests/test_tile_matrix_set.py index 4750f50..350669b 100644 --- a/tests/test_TileMatrixSet.py +++ b/tests/test_tile_matrix_set.py @@ -1,5 +1,5 @@ -from rok4.TileMatrixSet import TileMatrixSet -from rok4.Exceptions import * +from rok4.tile_matrix_set import TileMatrixSet +from rok4.exceptions import * import pytest import os diff --git a/tests/test_Utils.py b/tests/test_utils.py similarity index 99% rename from tests/test_Utils.py rename to tests/test_utils.py index cdba571..a5d93b3 100644 --- a/tests/test_Utils.py +++ b/tests/test_utils.py @@ -1,5 +1,5 @@ -from rok4.Utils import * -from rok4.Exceptions import * +from rok4.utils import * +from rok4.exceptions import * import pytest import os diff --git a/tests/test_Vector.py b/tests/test_vector.py similarity index 97% rename from tests/test_Vector.py rename to tests/test_vector.py index 221f5e4..dbecde2 100644 --- a/tests/test_Vector.py +++ b/tests/test_vector.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 -from rok4.Vector import * -from rok4.Exceptions import * -from rok4.Storage import disconnect_ceph_clients +from rok4.vector import * +from rok4.exceptions import * +from rok4.storage import disconnect_ceph_clients import pytest import os