Skip to content

Commit

Permalink
Fixes on deprecated mocks
Browse files Browse the repository at this point in the history
  • Loading branch information
pcrespov committed Feb 12, 2021
1 parent c4360b8 commit c58b0ff
Show file tree
Hide file tree
Showing 2 changed files with 105 additions and 82 deletions.
53 changes: 10 additions & 43 deletions services/web/server/tests/integration/test_exporter.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
import asyncio

# pylint:disable=redefined-outer-name,unused-argument,too-many-arguments
import cgi
import itertools
import json
import logging
import operator
import sys
import asyncio
import tempfile
from collections import deque
from contextlib import contextmanager
from copy import deepcopy
import operator
import itertools
from pathlib import Path
from typing import Any, Dict, List, Set, Callable, Tuple
from collections import deque
from typing import Any, Callable, Dict, List, Set, Tuple

import aiofiles
import aiohttp
Expand All @@ -27,6 +28,8 @@
from simcore_service_webserver.director import setup_director
from simcore_service_webserver.director_v2 import setup_director_v2
from simcore_service_webserver.exporter import setup_exporter
from simcore_service_webserver.exporter.async_hashing import Algorithm, checksum
from simcore_service_webserver.exporter.file_downloader import ParallelDownloader
from simcore_service_webserver.login import setup_login
from simcore_service_webserver.projects import setup_projects
from simcore_service_webserver.resource_manager import setup_resource_manager
Expand All @@ -35,11 +38,9 @@
from simcore_service_webserver.security_roles import UserRole
from simcore_service_webserver.session import setup_session
from simcore_service_webserver.socketio import setup_socketio
from simcore_service_webserver.users import setup_users
from simcore_service_webserver.storage_handlers import get_file_download_url
from simcore_service_webserver.storage import setup_storage
from simcore_service_webserver.exporter.file_downloader import ParallelDownloader
from simcore_service_webserver.exporter.async_hashing import Algorithm, checksum
from simcore_service_webserver.storage_handlers import get_file_download_url
from simcore_service_webserver.users import setup_users
from yarl import URL

log = logging.getLogger(__name__)
Expand Down Expand Up @@ -177,39 +178,6 @@ def get_exported_projects() -> List[Path]:
return [x for x in exporter_dir.glob("*.osparc")]


@pytest.fixture
async def monkey_patch_asyncio_subporcess(mocker):
# TODO: The below bug is not allowing me to fully test,
# mocking and waiting for an update
# https://bugs.python.org/issue35621
# this issue was patched in 3.8, no need
if sys.version_info.major == 3 and sys.version_info.minor >= 8:
raise RuntimeError(
"Issue no longer present in this version of python, "
"please remote this mock on python >= 3.8"
)

import subprocess

async def create_subprocess_exec(*command, **extra_params):
class MockResponse:
def __init__(self, command, **kwargs):
self.proc = subprocess.Popen(command, **extra_params)

async def communicate(self):
return self.proc.communicate()

@property
def returncode(self):
return self.proc.returncode

mock_response = MockResponse(command, **extra_params)

return mock_response

mocker.patch("asyncio.create_subprocess_exec", side_effect=create_subprocess_exec)


@pytest.fixture(scope="session")
def push_services_to_registry(
docker_registry: str, node_meta_schema: Dict
Expand Down Expand Up @@ -417,7 +385,6 @@ async def test_import_export_import_duplicate(
db_engine,
redis_client,
export_version,
monkey_patch_asyncio_subporcess,
simcore_services,
monkey_patch_aiohttp_request_url,
):
Expand Down
134 changes: 95 additions & 39 deletions services/web/server/tests/unit/isolated/test_exporter_archiving.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,16 @@
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
# pylint:disable=no-name-in-module
# pylint:disable=redefined-outer-name,unused-argument

import asyncio
import hashlib
import os
import random
import secrets
import string
import sys
import tempfile
import uuid
from concurrent.futures import ProcessPoolExecutor
from pathlib import Path
from typing import Dict, Iterator, List, Set
from typing import Dict, Iterator, List, Set, Tuple

import pytest
from simcore_service_webserver.exporter.archiving import (
Expand All @@ -25,51 +23,35 @@


@pytest.fixture
async def monkey_patch_asyncio_subporcess(loop, mocker):
# TODO: The below bug is not allowing me to fully test,
# mocking and waiting for an update
# https://bugs.python.org/issue35621
# this issue was patched in 3.8, no need
if sys.version_info.major == 3 and sys.version_info.minor >= 8:
raise RuntimeError(
"Issue no longer present in this version of python, "
"please remote this mock on python >= 3.8"
)

import subprocess

async def create_subprocess_exec(*command, **extra_params):
class MockResponse:
def __init__(self, command, **kwargs):
self.proc = subprocess.Popen(command, **extra_params)

async def communicate(self):
return self.proc.communicate()

@property
def returncode(self):
return self.proc.returncode

mock_response = MockResponse(command, **extra_params)
def temp_dir(tmpdir) -> Path:
# cast to Path object
return Path(tmpdir)

return mock_response

mocker.patch("asyncio.create_subprocess_exec", side_effect=create_subprocess_exec)
@pytest.fixture
def temp_dir2() -> Iterator[Path]:
with tempfile.TemporaryDirectory() as temp_dir:
temp_dir_path = Path(temp_dir)
extract_dir_path = temp_dir_path / "extract_dir"
extract_dir_path.mkdir(parents=True, exist_ok=True)
yield extract_dir_path


@pytest.fixture
def temp_dir(tmpdir) -> Path:
# Casts https://docs.pytest.org/en/stable/tmpdir.html#the-tmpdir-fixture to Path
return Path(tmpdir)
def temp_file() -> Iterator[Path]:
file_path = Path("/") / f"tmp/{next(tempfile._get_candidate_names())}"
file_path.write_text("test_data")
yield file_path
file_path.unlink()


@pytest.fixture
def project_uuid() -> str:
def project_uuid():
return str(uuid.uuid4())


@pytest.fixture
def dir_with_random_content() -> Path:
def dir_with_random_content() -> Iterator[Path]:
def random_string(length: int) -> str:
return "".join(secrets.choice(string.ascii_letters) for i in range(length))

Expand Down Expand Up @@ -101,7 +83,7 @@ def make_subdirectories_with_content(
)

def get_dirs_and_subdris_in_path(path_to_scan: Path) -> Iterator[Path]:
return [path for path in path_to_scan.rglob("*") if path.is_dir()]
return (path for path in path_to_scan.rglob("*") if path.is_dir())

with tempfile.TemporaryDirectory() as temp_dir:
temp_dir_path = Path(temp_dir)
Expand Down Expand Up @@ -154,6 +136,80 @@ def temp_dir_to_compress_with_too_many_targets(temp_dir, project_uuid) -> Path:
return nested_dir


def strip_directory_from_path(input_path: Path, to_strip: Path) -> Path:
_to_strip = f"{str(to_strip)}/"
return Path(str(input_path).replace(_to_strip, ""))


def get_all_files_in_dir(dir_path: Path) -> Set[Path]:
return {
strip_directory_from_path(x, dir_path)
for x in dir_path.rglob("*")
if x.is_file()
}


def _compute_hash(file_path: Path) -> Tuple[Path, str]:
with open(file_path, "rb") as file_to_hash:
file_hash = hashlib.md5()
chunk = file_to_hash.read(8192)
while chunk:
file_hash.update(chunk)
chunk = file_to_hash.read(8192)

return file_path, file_hash.hexdigest()


async def compute_hashes(file_paths: List[Path]) -> Dict[Path, str]:
"""given a list of files computes hashes for the files on a process pool"""

loop = asyncio.get_event_loop()

with ProcessPoolExecutor() as prcess_pool_executor:
tasks = [
loop.run_in_executor(prcess_pool_executor, _compute_hash, file_path)
for file_path in file_paths
]
return {k: v for k, v in await asyncio.gather(*tasks)}


def full_file_path_from_dir_and_subdirs(dir_path: Path) -> List[Path]:
return [x for x in dir_path.rglob("*") if x.is_file()]


async def assert_same_directory_content(
dir_to_compress: Path, output_dir: Path
) -> None:
input_set = get_all_files_in_dir(dir_to_compress)
output_set = get_all_files_in_dir(output_dir)
assert (
input_set == output_set
), f"There following files are missing {input_set - output_set}"

# computing the hashes for dir_to_compress and map in a dict
# with the name starting from the root of the directory and md5sum
dir_to_compress_hashes = {
strip_directory_from_path(k, dir_to_compress): v
for k, v in (
await compute_hashes(full_file_path_from_dir_and_subdirs(dir_to_compress))
).items()
}

# computing the hashes for output_dir and map in a dict
# with the name starting from the root of the directory and md5sum
output_dir_hashes = {
strip_directory_from_path(k, output_dir): v
for k, v in (
await compute_hashes(full_file_path_from_dir_and_subdirs(output_dir))
).items()
}

# finally check if hashes are mapped 1 to 1 in order to verify
# that the compress/decompress worked correctly
for key in dir_to_compress_hashes:
assert dir_to_compress_hashes[key] == output_dir_hashes[key]


def test_validate_osparc_file_name_ok():
algorithm, digest_sum = validate_osparc_import_name(
"v1#SHA256=80e69a0973e15f4a9c3c180d00a39ee0b0dfafe43356f867983e1180e9b5a892.osparc"
Expand Down

0 comments on commit c58b0ff

Please sign in to comment.