diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 2b05186e14c..6ef9940e670 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -39,7 +39,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -73,7 +73,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -120,7 +120,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -173,6 +173,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: + # KEEP 3.6 Development of this service is frozen python: [3.6] os: [ubuntu-20.04] fail-fast: false @@ -220,7 +221,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -271,7 +272,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -318,7 +319,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -395,7 +396,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8, 3.9] os: [ubuntu-20.04] fail-fast: false steps: @@ -429,7 +430,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -477,7 +478,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -524,7 +525,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -571,7 +572,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -618,7 +619,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -665,7 +666,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -712,7 +713,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -760,7 +761,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -808,7 +809,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -856,7 +857,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -904,7 +905,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -947,12 +948,12 @@ jobs: path: codeclimate.${{ github.job }}_coverage.json unit-test-webserver-05: - timeout-minutes: 15 # if this timeout gets too small, then split the tests + timeout-minutes: 14 # if this timeout gets too small, then split the tests name: "[unit] webserver 05" runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -1000,7 +1001,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -1048,7 +1049,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -1096,7 +1097,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -1144,7 +1145,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -1192,7 +1193,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -1269,7 +1270,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -1333,7 +1334,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -1397,7 +1398,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -1461,7 +1462,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -1525,7 +1526,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -1589,7 +1590,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -1634,7 +1635,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: @@ -1679,7 +1680,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] node: [14] os: [ubuntu-20.04] fail-fast: false @@ -1756,7 +1757,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - python: [3.6] + python: [3.8] os: [ubuntu-20.04] fail-fast: false steps: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 26605c9c3fe..52509097734 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks exclude: "^.venv$|^.cache$|^.pytest_cache$" default_language_version: - python: python3.6 + python: python3.8 repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v3.3.0 diff --git a/api/tests/requirements.txt b/api/tests/requirements.txt index de167a9500f..92bbec4cbf5 100644 --- a/api/tests/requirements.txt +++ b/api/tests/requirements.txt @@ -22,21 +22,10 @@ coverage==5.5 # via # -r requirements.in # pytest-cov -dataclasses==0.8 - # via werkzeug dictpath==0.1.3 # via openapi-core -idna-ssl==1.1.0 - # via aiohttp idna==3.1 - # via - # idna-ssl - # yarl -importlib-metadata==4.0.1 - # via - # jsonschema - # pluggy - # pytest + # via yarl iniconfig==1.1.1 # via pytest isodate==0.6.0 @@ -108,16 +97,11 @@ termcolor==1.1.0 toml==0.10.2 # via pytest typing-extensions==3.10.0.0 - # via - # aiohttp - # importlib-metadata - # yarl + # via aiohttp werkzeug==2.0.0 # via openapi-core yarl==1.6.3 # via aiohttp -zipp==3.4.1 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/api/tests/test_repo_data.py b/api/tests/test_repo_data.py index 85afc6e9bfd..67289d9f836 100644 --- a/api/tests/test_repo_data.py +++ b/api/tests/test_repo_data.py @@ -90,8 +90,8 @@ def test_project_against_schema(data_path, project_schema, this_repo_root_dir): } data = prj - assert any(isinstance(data, _type) for _type in [List, Dict]) - if isinstance(data, Dict): + assert any(isinstance(data, _type) for _type in [list, dict]) + if isinstance(data, dict): data = [ data, ] diff --git a/mypy.ini b/mypy.ini index 5cdc84ab399..1a2777bca8f 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,6 +1,6 @@ # Global options: [mypy] -python_version = 3.6 +python_version = 3.8 warn_return_any = True warn_unused_configs = True diff --git a/packages/models-library/requirements/_base.txt b/packages/models-library/requirements/_base.txt index d67a7840c54..72784e70f42 100644 --- a/packages/models-library/requirements/_base.txt +++ b/packages/models-library/requirements/_base.txt @@ -4,8 +4,6 @@ # # pip-compile --output-file=requirements/_base.txt requirements/_base.in # -dataclasses==0.8 - # via pydantic dnspython==2.1.0 # via email-validator email-validator==1.1.2 @@ -15,6 +13,8 @@ idna==2.10 # -r requirements/_base.in # email-validator pydantic[email]==1.8.2 - # via -r requirements/_base.in + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_base.in typing-extensions==3.10.0.0 # via pydantic diff --git a/packages/models-library/requirements/_test.txt b/packages/models-library/requirements/_test.txt index 19efa076a7c..d67dcc4b17c 100644 --- a/packages/models-library/requirements/_test.txt +++ b/packages/models-library/requirements/_test.txt @@ -31,21 +31,11 @@ docopt==0.6.2 # via coveralls icdiff==1.9.1 # via pytest-icdiff -idna-ssl==1.1.0 - # via aiohttp idna==2.10 # via # -c requirements/_base.txt - # idna-ssl # requests # yarl -importlib-metadata==4.0.1 - # via - # pint - # pluggy - # pytest -importlib-resources==5.1.3 - # via pint iniconfig==1.1.1 # via pytest isort==5.8.0 @@ -110,14 +100,10 @@ toml==0.10.2 # via # pylint # pytest -typed-ast==1.4.3 - # via astroid typing-extensions==3.10.0.0 # via # -c requirements/_base.txt # aiohttp - # importlib-metadata - # yarl urllib3==1.26.4 # via # -c requirements/../../../requirements/constraints.txt @@ -126,7 +112,3 @@ wrapt==1.12.1 # via astroid yarl==1.6.3 # via aiohttp -zipp==3.4.1 - # via - # importlib-metadata - # importlib-resources diff --git a/packages/models-library/requirements/_tools.txt b/packages/models-library/requirements/_tools.txt index 3569562929d..0ff896e0d4b 100644 --- a/packages/models-library/requirements/_tools.txt +++ b/packages/models-library/requirements/_tools.txt @@ -18,27 +18,12 @@ click==8.0.0 # via # black # pip-tools -dataclasses==0.8 - # via - # -c requirements/_base.txt - # black distlib==0.3.1 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.4 # via pre-commit -importlib-metadata==4.0.1 - # via - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.3 - # via - # -c requirements/_test.txt - # pre-commit - # virtualenv isort==5.8.0 # via # -c requirements/_test.txt @@ -70,24 +55,8 @@ toml==0.10.2 # black # pep517 # pre-commit -typed-ast==1.4.3 - # via - # -c requirements/_test.txt - # black -typing-extensions==3.10.0.0 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black - # importlib-metadata virtualenv==20.4.6 # via pre-commit -zipp==3.4.1 - # via - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/packages/models-library/setup.py b/packages/models-library/setup.py index beac34f00d6..8014c69d20d 100644 --- a/packages/models-library/setup.py +++ b/packages/models-library/setup.py @@ -33,7 +33,7 @@ def read_reqs(reqs_path: Path): "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", - "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.8", ], long_description=readme, license="MIT license", diff --git a/packages/models-library/src/models_library/projects.py b/packages/models-library/src/models_library/projects.py index 97d5fa4d9db..2d960b77af6 100644 --- a/packages/models-library/src/models_library/projects.py +++ b/packages/models-library/src/models_library/projects.py @@ -62,7 +62,7 @@ class ProjectCommons(BaseModel): last_change_date: datetime = Field(...) # Pipeline of nodes (SEE projects_nodes.py) - workbench: Workbench = Field(...) + workbench: Workbench = Field(..., description="Project's pipeline") @validator("thumbnail", always=True, pre=True) @classmethod diff --git a/packages/models-library/tests/test_project_nodes_io.py b/packages/models-library/tests/test_project_nodes_io.py index 6799b490fca..2777c636ebf 100644 --- a/packages/models-library/tests/test_project_nodes_io.py +++ b/packages/models-library/tests/test_project_nodes_io.py @@ -2,9 +2,8 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -import sys from pprint import pformat -from typing import Any, Dict, Tuple +from typing import Any, Dict import pytest from models_library.projects_nodes import Node, PortLink @@ -50,13 +49,6 @@ def test_project_nodes_io_model_examples(model_cls, model_cls_examples): print(name, ":", model_instance) -def get_args(annotation) -> Tuple: - assert ( # nosec - sys.version_info.major == 3 and sys.version_info.minor < 8 # nosec - ), "TODO: py3.8 replace __args__ with typings.get_args" - return annotation.__args__ - - def test_store_discriminator(): workbench = { "89f95b67-a2a3-4215-a794-2356684deb61": { diff --git a/packages/postgres-database/requirements/_base.txt b/packages/postgres-database/requirements/_base.txt index e12efe445d0..3a2924b78fa 100644 --- a/packages/postgres-database/requirements/_base.txt +++ b/packages/postgres-database/requirements/_base.txt @@ -16,7 +16,5 @@ sqlalchemy[postgresql_psycopg2binary]==1.3.24 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in -typing-extensions==3.10.0.0 - # via yarl yarl==1.6.3 # via -r requirements/_base.in diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt index c22afb06123..04ce99fc76c 100644 --- a/packages/postgres-database/requirements/_test.txt +++ b/packages/postgres-database/requirements/_test.txt @@ -22,8 +22,6 @@ attrs==20.3.0 # pytest-docker bcrypt==3.2.0 # via paramiko -cached-property==1.5.2 - # via docker-compose certifi==2020.12.5 # via # -c requirements/_migration.txt @@ -67,20 +65,12 @@ docopt==0.6.2 # docker-compose faker==8.1.4 # via -r requirements/_test.in -idna-ssl==1.1.0 - # via aiohttp idna==2.10 # via # -c requirements/_base.txt # -c requirements/_migration.txt - # idna-ssl # requests # yarl -importlib-metadata==4.0.1 - # via - # jsonschema - # pluggy - # pytest iniconfig==1.1.1 # via pytest isort==5.8.0 @@ -177,14 +167,8 @@ toml==0.10.2 # via # pylint # pytest -typed-ast==1.4.3 - # via astroid typing-extensions==3.10.0.0 - # via - # -c requirements/_base.txt - # aiohttp - # importlib-metadata - # yarl + # via aiohttp urllib3==1.26.4 # via # -c requirements/../../../requirements/constraints.txt @@ -201,8 +185,6 @@ yarl==1.6.3 # via # -c requirements/_base.txt # aiohttp -zipp==3.4.1 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/packages/postgres-database/requirements/_tools.txt b/packages/postgres-database/requirements/_tools.txt index 07341e09a14..ca966010688 100644 --- a/packages/postgres-database/requirements/_tools.txt +++ b/packages/postgres-database/requirements/_tools.txt @@ -18,24 +18,12 @@ click==8.0.0 # via # black # pip-tools -dataclasses==0.8 - # via black distlib==0.3.1 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.4 # via pre-commit -importlib-metadata==4.0.1 - # via - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.3 - # via - # pre-commit - # virtualenv isort==5.8.0 # via # -c requirements/_test.txt @@ -69,24 +57,8 @@ toml==0.10.2 # black # pep517 # pre-commit -typed-ast==1.4.3 - # via - # -c requirements/_test.txt - # black -typing-extensions==3.10.0.0 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black - # importlib-metadata virtualenv==20.4.6 # via pre-commit -zipp==3.4.1 - # via - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/packages/postgres-database/setup.py b/packages/postgres-database/setup.py index da2202634d9..b5bda1bdc81 100644 --- a/packages/postgres-database/setup.py +++ b/packages/postgres-database/setup.py @@ -19,7 +19,7 @@ def read_reqs(reqs_path: Path): install_requirements = read_reqs(current_dir / "requirements" / "_base.in") # Strong dependencies -migration_requirements = read_reqs(current_dir / "requirements" / "_migration.txt") +migration_requirements = read_reqs(current_dir / "requirements" / "_migration.in") test_requirements = read_reqs(current_dir / "requirements" / "_test.txt") @@ -34,7 +34,7 @@ def read_reqs(reqs_path: Path): "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", - "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.8", ], long_description=readme, license="MIT license", diff --git a/packages/pytest-simcore/setup.py b/packages/pytest-simcore/setup.py index 62106d9188f..142fe1d07bc 100644 --- a/packages/pytest-simcore/setup.py +++ b/packages/pytest-simcore/setup.py @@ -1,4 +1,4 @@ -from setuptools import setup, find_packages +from setuptools import find_packages, setup setup( name="pytest-simcore", @@ -6,7 +6,8 @@ maintainer="pcrespov, sanderegg", description="pytest plugin with fixtures and test helpers for osparc-simcore repo modules", py_modules=["pytest_simcore"], - python_requires=">=3.6.*", + # WARNING: this is used in frozen services as well + python_requires=">=3.6", # TODO create partial extensions: install_requires=["pytest>=3.5.0"], extras_require={ diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_docker.py b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_docker.py index 0c64a497d51..69323530811 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_docker.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_docker.py @@ -97,7 +97,8 @@ def run_docker_compose_config( - All environments are interpolated from a custom env-file at 'env_file_path' - Saves resolved output config to 'destination_path' (if given) """ - if not isinstance(docker_compose_paths, List): + + if not isinstance(docker_compose_paths, list): docker_compose_paths = [ docker_compose_paths, ] diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_mock.py b/packages/pytest-simcore/src/pytest_simcore/helpers/utils_mock.py deleted file mode 100644 index fa3f4461b2c..00000000000 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/utils_mock.py +++ /dev/null @@ -1,7 +0,0 @@ -from asyncio import Future - - -def future_with_result(result) -> Future: - f = Future() - f.set_result(result) - return f diff --git a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py index fd299b4f129..26f03f50a73 100644 --- a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py +++ b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py @@ -1,13 +1,23 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + import re from pathlib import Path -from typing import Dict, List +from typing import Any, Dict, List import pytest -from aioresponses import aioresponses +from aioresponses import aioresponses as AioResponsesMock from aioresponses.core import CallbackResult from models_library.projects_state import RunningState from yarl import URL +# WARNING: any request done through the client will go through aioresponses. It is +# unfortunate but that means any valid request (like calling the test server) prefix must be set as passthrough. +# Other than that it seems to behave nicely +PASSTHROUGH_REQUESTS_PREFIXES = ["http://127.0.0.1", "ws://"] + + # The adjacency list is defined as a dictionary with the key to the node and its list of successors FULL_PROJECT_PIPELINE_ADJACENCY: Dict[str, List[str]] = { "62bca361-8594-48c8-875e-b8577e868aec": [ @@ -21,7 +31,7 @@ "e83a359a-1efe-41d3-83aa-a285afbfaf12": [], } -FULL_PROJECT_NODE_STATES: Dict[str, Dict[str, str]] = { +FULL_PROJECT_NODE_STATES: Dict[str, Dict[str, Any]] = { "62bca361-8594-48c8-875e-b8577e868aec": {"modified": True, "dependencies": []}, "e0d7a1a5-0700-42c7-b033-97f72ac4a5cd": { "modified": True, @@ -46,6 +56,25 @@ } +@pytest.fixture +def aioresponses_mocker() -> AioResponsesMock: + """Generick aioresponses mock + + SEE https://github.com/pnuckowski/aioresponses + + Usage + + async def test_this(aioresponses_mocker): + aioresponses_mocker.get("https://foo.io") + + async with aiohttp.ClientSession() as session: + async with session.get("https://foo.aio") as response: + assert response.status == 200 + """ + with AioResponsesMock(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: + yield mock + + def creation_cb(url, **kwargs) -> CallbackResult: assert "json" in kwargs, f"missing body in call to {url}" @@ -113,14 +142,10 @@ def get_computation_cb(url, **kwargs) -> CallbackResult: @pytest.fixture -async def director_v2_service_mock() -> aioresponses: - - """uses aioresponses to mock all calls of an aiohttpclient - WARNING: any request done through the client will go through aioresponses. It is - unfortunate but that means any valid request (like calling the test server) prefix must be set as passthrough. - Other than that it seems to behave nicely - """ - PASSTHROUGH_REQUESTS_PREFIXES = ["http://127.0.0.1", "ws://"] +async def director_v2_service_mock( + aioresponses_mocker: AioResponsesMock, +) -> AioResponsesMock: + """mocks responses of director-v2""" create_computation_pattern = re.compile( r"^http://[a-z\-_]*director-v2:[0-9]+/v2/computations$" ) @@ -132,37 +157,33 @@ async def director_v2_service_mock() -> aioresponses: r"^http://[a-z\-_]*director-v2:[0-9]+/v2/computations/.*:stop$" ) delete_computation_pattern = get_computation_pattern - with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: - mock.post( - create_computation_pattern, - callback=creation_cb, - repeat=True, - ) - mock.post( - stop_computation_pattern, - status=204, - repeat=True, - ) - mock.get( - get_computation_pattern, - status=202, - callback=get_computation_cb, - repeat=True, - ) - mock.delete(delete_computation_pattern, status=204, repeat=True) - yield mock + aioresponses_mocker.post( + create_computation_pattern, + callback=creation_cb, + repeat=True, + ) + aioresponses_mocker.post( + stop_computation_pattern, + status=204, + repeat=True, + ) + aioresponses_mocker.get( + get_computation_pattern, + status=202, + callback=get_computation_cb, + repeat=True, + ) + aioresponses_mocker.delete(delete_computation_pattern, status=204, repeat=True) + return aioresponses_mocker -@pytest.fixture -async def storage_v0_service_mock() -> aioresponses: - """uses aioresponses to mock all calls of an aiohttpclient - WARNING: any request done through the client will go through aioresponses. It is - unfortunate but that means any valid request (like calling the test server) prefix must be set as passthrough. - Other than that it seems to behave nicely - """ - PASSTHROUGH_REQUESTS_PREFIXES = ["http://127.0.0.1", "ws://"] +@pytest.fixture +async def storage_v0_service_mock( + aioresponses_mocker: AioResponsesMock, +) -> AioResponsesMock: + """mocks responses of storage API""" def get_download_link_cb(url: URL, **kwargs) -> CallbackResult: file_id = url.path.rsplit("/files/")[1] @@ -179,11 +200,12 @@ def get_download_link_cb(url: URL, **kwargs) -> CallbackResult: r"^http://[a-z\-_]*storage:[0-9]+/v0/locations.*$" ) - with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: - mock.get(get_download_link_pattern, callback=get_download_link_cb, repeat=True) - mock.get( - get_locations_link_pattern, - status=200, - payload={"data": [{"name": "simcore.s3", "id": "0"}]}, - ) - yield mock + aioresponses_mocker.get( + get_download_link_pattern, callback=get_download_link_cb, repeat=True + ) + aioresponses_mocker.get( + get_locations_link_pattern, + status=200, + payload={"data": [{"name": "simcore.s3", "id": "0"}]}, + ) + return aioresponses_mocker diff --git a/packages/service-integration/README.md b/packages/service-integration/README.md index 971c0c69fad..08d74a2a0a4 100644 --- a/packages/service-integration/README.md +++ b/packages/service-integration/README.md @@ -1,8 +1,8 @@ # simcore service integration library -This package is intended to be installed as an external library to help integrating services in osparc-simcore. Here "integration" means -that the resulting service can be reliably deployed and run as a part of a node in the study pipeline. This library defines requirements +This package is intended to be installed as an external library **to help** (notice that it is NOT required) integrating services in osparc-simcore. +Here "integration" means that the resulting service can be reliably deployed and run as a part of a node in the study pipeline. This library defines requirements on this services as well as tools to assist for their development and validation. diff --git a/packages/service-integration/requirements/_base.in b/packages/service-integration/requirements/_base.in index d0768906bcd..c49553bb6c2 100644 --- a/packages/service-integration/requirements/_base.in +++ b/packages/service-integration/requirements/_base.in @@ -9,7 +9,6 @@ pyyaml click - # pytest-plugin pytest docker diff --git a/packages/service-integration/requirements/_base.txt b/packages/service-integration/requirements/_base.txt index 3972aac3d86..8797561940b 100644 --- a/packages/service-integration/requirements/_base.txt +++ b/packages/service-integration/requirements/_base.txt @@ -14,8 +14,6 @@ chardet==4.0.0 # via requests click==8.0.0 # via -r requirements/_base.in -dataclasses==0.8 - # via pydantic dnspython==2.1.0 # via email-validator docker==5.0.0 @@ -27,11 +25,6 @@ idna==2.10 # -r requirements/../../../packages/models-library/requirements/_base.in # email-validator # requests -importlib-metadata==4.0.1 - # via - # jsonschema - # pluggy - # pytest iniconfig==1.1.1 # via pytest jsonschema==3.2.0 @@ -43,7 +36,10 @@ pluggy==0.13.1 py==1.10.0 # via pytest pydantic[email]==1.8.2 - # via -r requirements/../../../packages/models-library/requirements/_base.in + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in pyparsing==2.4.7 # via packaging pyrsistent==0.17.3 @@ -64,9 +60,7 @@ six==1.16.0 toml==0.10.2 # via pytest typing-extensions==3.10.0.0 - # via - # importlib-metadata - # pydantic + # via pydantic urllib3==1.26.4 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -74,8 +68,6 @@ urllib3==1.26.4 # requests websocket-client==0.59.0 # via docker -zipp==3.4.1 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/packages/service-integration/requirements/_test.txt b/packages/service-integration/requirements/_test.txt index bb81b29d313..951eff1744d 100644 --- a/packages/service-integration/requirements/_test.txt +++ b/packages/service-integration/requirements/_test.txt @@ -31,11 +31,6 @@ idna==2.10 # via # -c requirements/_base.txt # requests -importlib-metadata==4.0.1 - # via - # -c requirements/_base.txt - # pluggy - # pytest iniconfig==1.1.1 # via # -c requirements/_base.txt @@ -91,12 +86,6 @@ toml==0.10.2 # -c requirements/_base.txt # pylint # pytest -typed-ast==1.4.3 - # via astroid -typing-extensions==3.10.0.0 - # via - # -c requirements/_base.txt - # importlib-metadata urllib3==1.26.4 # via # -c requirements/../../../requirements/constraints.txt @@ -104,7 +93,3 @@ urllib3==1.26.4 # requests wrapt==1.12.1 # via astroid -zipp==3.4.1 - # via - # -c requirements/_base.txt - # importlib-metadata diff --git a/packages/service-integration/requirements/_tools.txt b/packages/service-integration/requirements/_tools.txt index 06c74432f72..ae8e4964ff1 100644 --- a/packages/service-integration/requirements/_tools.txt +++ b/packages/service-integration/requirements/_tools.txt @@ -19,27 +19,12 @@ click==8.0.0 # -c requirements/_base.txt # black # pip-tools -dataclasses==0.8 - # via - # -c requirements/_base.txt - # black distlib==0.3.1 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.4 # via pre-commit -importlib-metadata==4.0.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.3 - # via - # pre-commit - # virtualenv isort==5.8.0 # via # -c requirements/_test.txt @@ -74,25 +59,8 @@ toml==0.10.2 # black # pep517 # pre-commit -typed-ast==1.4.3 - # via - # -c requirements/_test.txt - # black -typing-extensions==3.10.0.0 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black - # importlib-metadata virtualenv==20.4.6 # via pre-commit -zipp==3.4.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/packages/service-integration/setup.py b/packages/service-integration/setup.py index f94c6b422d2..7a6ebcf95c4 100644 --- a/packages/service-integration/setup.py +++ b/packages/service-integration/setup.py @@ -33,11 +33,11 @@ def read_reqs(reqs_path: Path): "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", - "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.8", "Framework :: Pytest", ], long_description=readme, - python_requires=">=3.6, <3.7", + python_requires=">=3.6", license="MIT license", install_requires=install_requirements, packages=find_packages(where="src"), diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt index a50dbbdd8c8..9a3d9a4fe17 100644 --- a/packages/service-library/requirements/_base.txt +++ b/packages/service-library/requirements/_base.txt @@ -28,19 +28,10 @@ attrs==20.3.0 # openapi-core chardet==4.0.0 # via aiohttp -dataclasses==0.8 - # via - # pydantic - # werkzeug -idna-ssl==1.1.0 - # via aiohttp idna==2.10 # via # -r requirements/_base.in - # idna-ssl # yarl -importlib-metadata==4.0.1 - # via jsonschema isodate==0.6.0 # via # openapi-core @@ -108,17 +99,13 @@ trafaret==2.1.0 typing-extensions==3.10.0.0 # via # aiohttp - # importlib-metadata # pydantic - # yarl ujson==4.0.2 # via -r requirements/_base.in werkzeug==2.0.0 # via -r requirements/_base.in yarl==1.6.3 # via aiohttp -zipp==3.4.1 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt index df0a613d39e..6a150285e7c 100644 --- a/packages/service-library/requirements/_test.txt +++ b/packages/service-library/requirements/_test.txt @@ -23,8 +23,6 @@ attrs==20.3.0 # pytest-docker bcrypt==3.2.0 # via paramiko -cached-property==1.5.2 - # via docker-compose certifi==2020.12.5 # via requests cffi==1.14.5 @@ -62,22 +60,11 @@ docopt==0.6.2 # via # coveralls # docker-compose -idna-ssl==1.1.0 - # via - # -c requirements/_base.txt - # aiohttp idna==2.10 # via # -c requirements/_base.txt - # idna-ssl # requests # yarl -importlib-metadata==4.0.1 - # via - # -c requirements/_base.txt - # jsonschema - # pluggy - # pytest iniconfig==1.1.1 # via pytest isort==5.8.0 @@ -170,14 +157,10 @@ toml==0.10.2 # via # pylint # pytest -typed-ast==1.4.3 - # via astroid typing-extensions==3.10.0.0 # via # -c requirements/_base.txt # aiohttp - # importlib-metadata - # yarl urllib3==1.26.4 # via # -c requirements/../../../requirements/constraints.txt @@ -192,10 +175,6 @@ yarl==1.6.3 # via # -c requirements/_base.txt # aiohttp -zipp==3.4.1 - # via - # -c requirements/_base.txt - # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/packages/service-library/requirements/_tools.txt b/packages/service-library/requirements/_tools.txt index 3fb6a9d5273..7b161bed7ce 100644 --- a/packages/service-library/requirements/_tools.txt +++ b/packages/service-library/requirements/_tools.txt @@ -18,27 +18,12 @@ click==8.0.0 # via # black # pip-tools -dataclasses==0.8 - # via - # -c requirements/_base.txt - # black distlib==0.3.1 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.4 # via pre-commit -importlib-metadata==4.0.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.3 - # via - # pre-commit - # virtualenv isort==5.8.0 # via # -c requirements/_test.txt @@ -74,25 +59,8 @@ toml==0.10.2 # black # pep517 # pre-commit -typed-ast==1.4.3 - # via - # -c requirements/_test.txt - # black -typing-extensions==3.10.0.0 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black - # importlib-metadata virtualenv==20.4.6 # via pre-commit -zipp==3.4.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/packages/service-library/setup.py b/packages/service-library/setup.py index ce1b58ff9b0..20f090ccd67 100644 --- a/packages/service-library/setup.py +++ b/packages/service-library/setup.py @@ -33,10 +33,11 @@ def read_reqs(reqs_path: Path): "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", - "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.8", ], long_description=readme, license="MIT license", + python_requires="~=3.8", install_requires=install_requirements, packages=find_packages(where="src"), package_dir={"": "src"}, diff --git a/packages/service-library/src/servicelib/rest_responses.py b/packages/service-library/src/servicelib/rest_responses.py index 3d0cc9582ee..55e2d1f7423 100644 --- a/packages/service-library/src/servicelib/rest_responses.py +++ b/packages/service-library/src/servicelib/rest_responses.py @@ -29,6 +29,7 @@ def is_enveloped_from_text(text: str) -> bool: def is_enveloped(payload: Union[Mapping, str]) -> bool: + # pylint: disable=isinstance-second-argument-not-valid-type if isinstance(payload, Mapping): return is_enveloped_from_map(payload) if isinstance(payload, str): diff --git a/packages/service-library/src/servicelib/utils_debug_asyncio.py b/packages/service-library/src/servicelib/utils_debug_asyncio.py new file mode 100644 index 00000000000..5e8d1c56dd7 --- /dev/null +++ b/packages/service-library/src/servicelib/utils_debug_asyncio.py @@ -0,0 +1,10 @@ +import asyncio +from io import StringIO + + +def get_loop_info() -> str: + stream = StringIO() + for n, task in enumerate(asyncio.all_tasks()): + prefix = "*" if task == asyncio.current_task() else " " + print(f"{prefix}{n+1:2d}) {task}", file=stream) + return stream.getvalue() diff --git a/packages/service-library/tests/test_observer.py b/packages/service-library/tests/test_observer.py index a27c3af5dfa..05df3e25b09 100644 --- a/packages/service-library/tests/test_observer.py +++ b/packages/service-library/tests/test_observer.py @@ -2,15 +2,13 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -from asyncio import Future from servicelib.observer import emit, observe async def test_observer(loop, mocker): - # register a cb function - cb_function = mocker.Mock(return_value=Future()) - cb_function.return_value.set_result(None) + # register a couroutine as callback function + cb_function = mocker.AsyncMock(return_value=None) decorated_fct = observe(event="my_test_event")(cb_function) diff --git a/packages/service-library/tests/test_utils.py b/packages/service-library/tests/test_utils.py index a903f98ef83..04b7ef6a6ae 100644 --- a/packages/service-library/tests/test_utils.py +++ b/packages/service-library/tests/test_utils.py @@ -5,7 +5,6 @@ import asyncio import pytest - from servicelib.utils import logged_gather @@ -63,8 +62,8 @@ async def test_logged_gather(loop, coros, mock_logger): # NOTE: only first error in the list is raised, since it is not RuntimeError, that task assert isinstance(excinfo.value, ValueError) - for task in asyncio.Task.all_tasks(loop): - if task is not asyncio.Task.current_task(): + for task in asyncio.all_tasks(loop): + if task is not asyncio.current_task(): # info task.print_stack() diff --git a/packages/service-library/tests/with_postgres/test_aiopg_utils.py b/packages/service-library/tests/with_postgres/test_aiopg_utils.py index 7c42ed33587..63076b37818 100644 --- a/packages/service-library/tests/with_postgres/test_aiopg_utils.py +++ b/packages/service-library/tests/with_postgres/test_aiopg_utils.py @@ -10,9 +10,9 @@ from pathlib import Path import aiopg.sa -import psycopg2 import pytest import sqlalchemy as sa +import sqlalchemy.exc as sa_exceptions from aiohttp import web from servicelib.aiopg_utils import ( DatabaseError, @@ -64,6 +64,7 @@ async def _create_table(engine: aiopg.sa.Engine): def test_dsn_uri_with_query(postgres_service_with_fake_data): uri = postgres_service_with_fake_data.to_uri(with_query=True) + sa_engine = None try: sa_engine = sa.create_engine(uri, echo=True, echo_pool=True) assert sa_engine.name == "postgresql" @@ -73,10 +74,12 @@ def test_dsn_uri_with_query(postgres_service_with_fake_data): metadata.create_all(sa_engine) metadata.drop_all(sa_engine) - except sa.exc.SQLAlchemyError as ee: + except sa_exceptions.SQLAlchemyError as ee: pytest.fail(f"Cannot connect with {uri}: {ee}") + finally: - sa_engine.dispose() + if sa_engine: + sa_engine.dispose() async def test_create_pg_engine(postgres_service_with_fake_data): @@ -130,22 +133,10 @@ async def test_engine_when_idle_for_some_time(): # by default docker swarm kills connections that are idle for more than 15 minutes await asyncio.sleep(901) - # import pdb; pdb.set_trace() async with engine.acquire() as conn: await conn.execute(tbl.insert().values(val="third")) - # import pdb; pdb.set_trace() - - -async def test_engine_when_pg_not_reachable(loop): - dsn = DataSourceName( - database="db", user="foo", password="foo", host="127.0.0.1", port=123 - ) - - with pytest.raises(psycopg2.OperationalError): - await create_pg_engine(dsn) - def test_init_tables(postgres_service_with_fake_data): dsn = postgres_service_with_fake_data diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt index bc995206a88..9a33d7b56ce 100644 --- a/packages/simcore-sdk/requirements/_base.txt +++ b/packages/simcore-sdk/requirements/_base.txt @@ -37,28 +37,19 @@ attrs==20.3.0 # openapi-core chardet==4.0.0 # via aiohttp -dataclasses==0.8 - # via - # pydantic - # werkzeug decorator==4.4.2 # via networkx dnspython==2.1.0 # via email-validator email-validator==1.1.2 # via pydantic -idna-ssl==1.1.0 - # via aiohttp idna==2.10 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # email-validator - # idna-ssl # yarl -importlib-metadata==4.0.1 - # via jsonschema isodate==0.6.0 # via # openapi-core @@ -154,9 +145,7 @@ trafaret==2.1.0 typing-extensions==3.10.0.0 # via # aiohttp - # importlib-metadata # pydantic - # yarl ujson==4.0.2 # via -r requirements/../../../packages/service-library/requirements/_base.in werkzeug==2.0.0 @@ -165,8 +154,6 @@ yarl==1.6.3 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # aiohttp -zipp==3.4.1 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt index 4c924360c90..181101cd50e 100644 --- a/packages/simcore-sdk/requirements/_test.txt +++ b/packages/simcore-sdk/requirements/_test.txt @@ -52,21 +52,11 @@ faker==8.1.4 # via -r requirements/_test.in icdiff==1.9.1 # via pytest-icdiff -idna-ssl==1.1.0 - # via - # -c requirements/_base.txt - # aiohttp idna==2.10 # via # -c requirements/_base.txt - # idna-ssl # requests # yarl -importlib-metadata==4.0.1 - # via - # -c requirements/_base.txt - # pluggy - # pytest iniconfig==1.1.1 # via pytest isort==5.8.0 @@ -171,14 +161,10 @@ toml==0.10.2 # via # pylint # pytest -typed-ast==1.4.3 - # via astroid typing-extensions==3.10.0.0 # via # -c requirements/_base.txt # aiohttp - # importlib-metadata - # yarl urllib3==1.26.4 # via # -c requirements/../../../requirements/constraints.txt @@ -192,7 +178,3 @@ yarl==1.6.3 # via # -c requirements/_base.txt # aiohttp -zipp==3.4.1 - # via - # -c requirements/_base.txt - # importlib-metadata diff --git a/packages/simcore-sdk/requirements/_tools.txt b/packages/simcore-sdk/requirements/_tools.txt index 27d60d5bb2e..351d756f1c0 100644 --- a/packages/simcore-sdk/requirements/_tools.txt +++ b/packages/simcore-sdk/requirements/_tools.txt @@ -18,27 +18,12 @@ click==8.0.0 # via # black # pip-tools -dataclasses==0.8 - # via - # -c requirements/_base.txt - # black distlib==0.3.1 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.4 # via pre-commit -importlib-metadata==4.0.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.3 - # via - # pre-commit - # virtualenv isort==5.8.0 # via # -c requirements/_test.txt @@ -73,25 +58,8 @@ toml==0.10.2 # black # pep517 # pre-commit -typed-ast==1.4.3 - # via - # -c requirements/_test.txt - # black -typing-extensions==3.10.0.0 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black - # importlib-metadata virtualenv==20.4.6 # via pre-commit -zipp==3.4.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/packages/simcore-sdk/tests/helpers/utils_docker.py b/packages/simcore-sdk/tests/helpers/utils_docker.py index ede5e27056a..3ebc2427eba 100644 --- a/packages/simcore-sdk/tests/helpers/utils_docker.py +++ b/packages/simcore-sdk/tests/helpers/utils_docker.py @@ -76,7 +76,7 @@ def run_docker_compose_config( """ # FIXME: use instead packages/pytest-simcore/src/pytest_simcore/helpers/utils_docker.py - if not isinstance(docker_compose_paths, List): + if not isinstance(docker_compose_paths, list): docker_compose_paths = [ docker_compose_paths, ] diff --git a/packages/simcore-sdk/tests/helpers/utils_futures.py b/packages/simcore-sdk/tests/helpers/utils_futures.py deleted file mode 100644 index fa3f4461b2c..00000000000 --- a/packages/simcore-sdk/tests/helpers/utils_futures.py +++ /dev/null @@ -1,7 +0,0 @@ -from asyncio import Future - - -def future_with_result(result) -> Future: - f = Future() - f.set_result(result) - return f diff --git a/packages/simcore-sdk/tests/unit/test_data_manager.py b/packages/simcore-sdk/tests/unit/test_data_manager.py index d69d3d3fbc6..5c64fef8095 100644 --- a/packages/simcore-sdk/tests/unit/test_data_manager.py +++ b/packages/simcore-sdk/tests/unit/test_data_manager.py @@ -3,7 +3,6 @@ # pylint:disable=redefined-outer-name import asyncio -from asyncio import Future from filecmp import cmpfiles from pathlib import Path from shutil import copy, make_archive, unpack_archive @@ -52,8 +51,7 @@ async def test_push_folder( mock_filemanager = mocker.patch( "simcore_sdk.node_data.data_manager.filemanager", spec=True ) - mock_filemanager.upload_file.return_value = Future() - mock_filemanager.upload_file.return_value.set_result("") + mock_filemanager.upload_file.return_value = "" mock_config = mocker.patch("simcore_sdk.node_data.data_manager.config", spec=True) mock_config.PROJECT_ID = "some funky ID" mock_config.NODE_UUID = "another funky ID" @@ -103,8 +101,7 @@ async def test_push_file( mock_filemanager = mocker.patch( "simcore_sdk.node_data.data_manager.filemanager", spec=True ) - mock_filemanager.upload_file.return_value = Future() - mock_filemanager.upload_file.return_value.set_result("") + mock_filemanager.upload_file.return_value = "" mock_config = mocker.patch("simcore_sdk.node_data.data_manager.config", spec=True) mock_config.PROJECT_ID = "some funky ID" mock_config.NODE_UUID = "another funky ID" @@ -162,8 +159,7 @@ async def test_pull_folder( mock_filemanager = mocker.patch( "simcore_sdk.node_data.data_manager.filemanager", spec=True ) - mock_filemanager.download_file_from_s3.return_value = Future() - mock_filemanager.download_file_from_s3.return_value.set_result(fake_zipped_folder) + mock_filemanager.download_file_from_s3.return_value = fake_zipped_folder mock_config = mocker.patch("simcore_sdk.node_data.data_manager.config", spec=True) mock_config.PROJECT_ID = "some funky ID" mock_config.NODE_UUID = "another funky ID" @@ -209,8 +205,7 @@ async def test_pull_file( mock_filemanager = mocker.patch( "simcore_sdk.node_data.data_manager.filemanager", spec=True ) - mock_filemanager.download_file_from_s3.return_value = Future() - mock_filemanager.download_file_from_s3.return_value.set_result(fake_downloaded_file) + mock_filemanager.download_file_from_s3.return_value = fake_downloaded_file mock_config = mocker.patch("simcore_sdk.node_data.data_manager.config", spec=True) mock_config.PROJECT_ID = "some funky ID" mock_config.NODE_UUID = "another funky ID" diff --git a/packages/simcore-sdk/tests/unit/test_item.py b/packages/simcore-sdk/tests/unit/test_item.py index 3d802c9efec..e3b2182fb70 100644 --- a/packages/simcore-sdk/tests/unit/test_item.py +++ b/packages/simcore-sdk/tests/unit/test_item.py @@ -11,7 +11,6 @@ from simcore_sdk.node_ports._data_item import DataItem from simcore_sdk.node_ports._item import DataItemValue, Item, ItemConcreteValue from simcore_sdk.node_ports._schema_item import SchemaItem -from utils_futures import future_with_result @pytest.fixture @@ -225,8 +224,8 @@ async def test_set_new_value( item_value_to_set: ItemConcreteValue, expected_value: ItemConcreteValue, mocker, -): # pylint: disable=W0613 - mock_method = mocker.Mock(return_value=future_with_result("")) +): + mock_method = mocker.AsyncMock(return_value="") item = create_item(item_type, None) item.new_data_cb = mock_method assert await item.get() is None diff --git a/packages/simcore-sdk/tests/unit/test_itemstlist.py b/packages/simcore-sdk/tests/unit/test_itemstlist.py index 7c4723c1b4b..44e0a985861 100644 --- a/packages/simcore-sdk/tests/unit/test_itemstlist.py +++ b/packages/simcore-sdk/tests/unit/test_itemstlist.py @@ -12,7 +12,6 @@ from simcore_sdk.node_ports._items_list import ItemsList from simcore_sdk.node_ports._schema_item import SchemaItem from simcore_sdk.node_ports._schema_items_list import SchemaItemsList -from utils_futures import future_with_result def create_item( @@ -90,7 +89,7 @@ def test_access_by_wrong_key(): async def test_modifying_items_triggers_cb(mocker): # pylint: disable=C0103 - mock_method = mocker.Mock(return_value=future_with_result("")) + mock_method = mocker.AsyncMock(return_value="") itemslist = create_items_list( [("1", "integer", 333), ("2", "integer", 333), ("3", "integer", 333)] diff --git a/packages/simcore-sdk/tests/unit/test_nodeports_v2.py b/packages/simcore-sdk/tests/unit/test_nodeports_v2.py index 841b4b22ce7..90c66fb1a1d 100644 --- a/packages/simcore-sdk/tests/unit/test_nodeports_v2.py +++ b/packages/simcore-sdk/tests/unit/test_nodeports_v2.py @@ -2,7 +2,6 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -from asyncio import Future from pathlib import Path from typing import Any, Callable, Dict @@ -124,9 +123,8 @@ def e_tag() -> str: async def mock_upload_file(mocker, e_tag): mock = mocker.patch( "simcore_sdk.node_ports.filemanager.upload_file", - return_value=Future(), + return_value=("0", e_tag), ) - mock.return_value.set_result(("0", e_tag)) yield mock diff --git a/packages/simcore-sdk/tests/unit/test_port.py b/packages/simcore-sdk/tests/unit/test_port.py index b688e19775e..07baa0e11d8 100644 --- a/packages/simcore-sdk/tests/unit/test_port.py +++ b/packages/simcore-sdk/tests/unit/test_port.py @@ -7,7 +7,6 @@ import re import shutil import tempfile -from asyncio import Future from collections import namedtuple from pathlib import Path from typing import Any, Dict, Optional, Type, Union @@ -159,9 +158,8 @@ def e_tag_fixture() -> str: async def mock_upload_file(mocker, e_tag): mock = mocker.patch( "simcore_sdk.node_ports.filemanager.upload_file", - return_value=Future(), + return_value=(simcore_store_id(), e_tag), ) - mock.return_value.set_result((simcore_store_id(), e_tag)) yield mock diff --git a/requirements/PYTHON_VERSION b/requirements/PYTHON_VERSION index d70c8f8d89f..cc1923a40b1 100644 --- a/requirements/PYTHON_VERSION +++ b/requirements/PYTHON_VERSION @@ -1 +1 @@ -3.6 +3.8 diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 08de7178ae9..798f92114fa 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -23,6 +23,17 @@ docker-compose==1.29.1 # vulnerability https://github.com/advisories/GHSA-rhm9-p9w5-fwm7 Feb.2021 cryptography>=3.3.2 - # constraint since https://github.com/MagicStack/uvloop/releases/tag/v0.15.0: drops support for 3.5/3.6 Feb.2021 uvloop<0.15.0 ; python_version < '3.7' + +# All backports libraries add environ markers +# NOTE: If >second dependency, this will annotate a marker in the compiled requirements file +# +async-exit-stack ; python_version < '3.7' +async-generator ; python_version < '3.7' +contextvars ; python_version < '3.7' +dataclasses ; python_version < '3.7' +importlib-metadata ; python_version < '3.8' +importlib-resources ; python_version < '3.9' +typing-extensions ; python_version < '3.7' +zipp ; python_version < '3.7' diff --git a/requirements/packages-notes.md b/requirements/packages-notes.md index 7df31fac0a8..93994e23b7b 100644 --- a/requirements/packages-notes.md +++ b/requirements/packages-notes.md @@ -36,3 +36,4 @@ Keeps a list notes with relevant information about releases of python package. S - currently it only covers two packages of this repository - due to the hybrid nature of this repo, we cannot take full advantage of this great tool - Several backports above show some of the dependencies that will dissapear with a python upgrade! +- TODO: compare pip-compile output vs freeze to check whether some dependencies are NOT pinned! diff --git a/requirements/tools/Dockerfile b/requirements/tools/Dockerfile index 4a1902b1dd4..4828fe82ad7 100644 --- a/requirements/tools/Dockerfile +++ b/requirements/tools/Dockerfile @@ -2,7 +2,12 @@ # to perform operations like pip-compile or auto-formatting # that preserves identical environment across developer machines # -ARG PYTHON_VERSION="3.6.10" +# Python version can be upgraded if: +# - Has been patched several times (avoid using the very first release for production) +# - Can be installed with pyenv (SEE pyenv install --list ) +# +# +ARG PYTHON_VERSION="3.8.10" FROM python:${PYTHON_VERSION}-slim-buster as base diff --git a/requirements/tools/Makefile b/requirements/tools/Makefile index ef8a3b80067..c090ff179f0 100644 --- a/requirements/tools/Makefile +++ b/requirements/tools/Makefile @@ -14,6 +14,8 @@ # .DEFAULT_GOAL := help +PYTHON_VERSION=3.8.10 + # locations REPODIR := $(shell git rev-parse --show-toplevel) PACKAGES_DIR := $(abspath $(REPODIR)/packages) @@ -25,7 +27,7 @@ UPGRADE_OPTION := $(if $(upgrade),upgrade=$(upgrade),) MAKE_C := $(MAKE) --directory -.PHONY: touch reqs _check_py36_version +.PHONY: touch reqs info # requirements in packages and services _compiled-all = $(shell find $(REPODIR) -path "*/requirements/*.txt") @@ -40,12 +42,18 @@ _target-inputs += $(shell find $(REPODIR)/tests/ -type f -name "*.in") # services tests libraries, tools and fixtures _target-inputs += $(shell find ${SERVICES_DIR} -type f -name "_test.in") -$(info Found $(words $(_target-inputs)) inputs) - -_check_py36_version: - # Checking that runs with correct python version - @python3 -c "import sys; assert sys.version_info[:2]==(3,6), f'Expected python 3.6, got {sys.version_info}'" +info: ## displays some info + # dev tools version + @echo ' make : $(shell make --version 2>&1 | head -n 1)' + @echo ' jq : $(shell jq --version 2> /dev/null || echo missing)' + @echo ' awk : $(shell awk -W version 2>&1 | head -n 1)' + @echo ' node : $(shell node --version 2> /dev/null || echo missing)' + @echo ' python : $(shell python3 --version)' + # installed in .venv + @pip list + # target reqs found + @echo "Found $(words $(_target-inputs)) inputs" touch: ## touches all package requirement inputs @@ -55,19 +63,23 @@ touch: ## touches all package requirement inputs # Touchs all target input requirements @$(foreach p,${_target-inputs},touch $(p);) -reqs: _check_py36_version ## updates requirements of all package libraries +reqs: ## updates requirements of all package libraries # Upgrading $(upgrade) requirements @$(foreach p,${_target-inputs},echo Touching $(p);touch $(p);$(MAKE_C) $(dir $(p)) reqs $(UPGRADE_OPTION);) +IMAGE_NAME:=local/python-devkit:${PYTHON_VERSION} + # SEE https://medium.com/faun/set-current-host-user-for-docker-container-4e521cef9ffc .PHONY: build -build build-nc: - docker build $(if $(findstring -nc,$@),--no-cache,) --tag local/python-devkit:latest . +build build-nc: ## builds tooling image ${IMAGE_NAME} + docker build $(if $(findstring -nc,$@),--no-cache,) \ + --build-arg PYTHON_VERSION="${PYTHON_VERSION}" \ + --tag ${IMAGE_NAME} . .PHONY: run -run: build ## Runs upgrade in a container +run: build ## Runs upgrade in a container [WARNING! UNDER DEV. USE CAREFULY] docker run -it \ --workdir="/home/$(USER)" \ --volume="/etc/group:/etc/group:ro" \ @@ -76,12 +88,12 @@ run: build ## Runs upgrade in a container --volume=$(REPODIR):/home/$(USER) \ --user=$(shell id -u):$(shell id -g) \ --entrypoint=/bin/bash \ - local/python-devkit:latest \ + ${IMAGE_NAME} \ -c "cd requirements/tools; make reqs $(if $(upgrade),upgrade=$(upgrade),)" .PHONY: shell -shell: build +shell: build ## Opens shell in ${IMAGE_NAME} docker run -it \ --workdir="/home/$(USER)/requirements/tools" \ --volume="/etc/group:/etc/group:ro" \ @@ -90,7 +102,7 @@ shell: build --volume=$(REPODIR):/home/$(USER) \ --user=$(shell id -u):$(shell id -g) \ --entrypoint=/bin/bash \ - local/python-devkit:latest + ${IMAGE_NAME} .PHONY: help diff --git a/scripts/common.Makefile b/scripts/common.Makefile index 428786c2eb2..a3700df01d3 100644 --- a/scripts/common.Makefile +++ b/scripts/common.Makefile @@ -39,6 +39,7 @@ REPO_BASE_DIR := $(shell git rev-parse --show-toplevel) SCRIPTS_DIR := $(abspath $(REPO_BASE_DIR)/scripts) # virtual env +EXPECTED_PYTHON_VERSION := $(shell cat $(REPO_BASE_DIR)/requirements/PYTHON_VERSION) VENV_DIR := $(abspath $(REPO_BASE_DIR)/.venv) # @@ -158,7 +159,7 @@ version-major: ## commits version with backwards-INcompatible addition or change _check_python_version: # Checking that runs with correct python version - @python3 -c "import sys; assert sys.version_info[:2]==(3,6), f'Expected python 3.6, got {sys.version_info}'" + @python3 -c "import sys; current_version=[int(d) for d in '$(EXPECTED_PYTHON_VERSION)'.split('.')]; assert sys.version_info[:2]==tuple(current_version[:2]), f'Expected python $(EXPECTED_PYTHON_VERSION), got {sys.version_info}'" _check_venv_active: _check_python_version diff --git a/scripts/mypy.bash b/scripts/mypy.bash index b3da6b2042f..fe0067066ad 100755 --- a/scripts/mypy.bash +++ b/scripts/mypy.bash @@ -10,7 +10,7 @@ image_name="$(basename $0):latest" docker build --tag "$image_name" -<=3.7 -async-generator # not needed when python>=3.7 # data models pydantic[dotenv] diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt index b385ef199fe..79ddc7303b3 100644 --- a/services/api-server/requirements/_base.txt +++ b/services/api-server/requirements/_base.txt @@ -39,9 +39,7 @@ aiozipkin==0.7.1 aniso8601==7.0.0 # via graphene async-exit-stack==1.0.1 - # via - # -r requirements/_base.in - # fastapi + # via fastapi async-generator==1.10 # via # -r requirements/_base.in @@ -74,6 +72,8 @@ click==7.1.2 # via uvicorn contextvars==2.4 # via sniffio +configparser==5.0.2 + # via minio cryptography==3.4.7 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -85,10 +85,6 @@ cryptography==3.4.7 # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in -dataclasses==0.8 - # via - # pydantic - # werkzeug decorator==4.4.2 # via networkx dnspython==2.1.0 @@ -128,16 +124,25 @@ idna==2.10 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # email-validator - # idna-ssl # requests # rfc3986 # yarl immutables==0.15 # via contextvars -importlib-metadata==4.0.1 - # via jsonschema -importlib-resources==5.1.3 +importlib-resources==5.1.3 ; python_version < "3.9" # via -r requirements/_base.in +isodate==0.6.0 + # via + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/s3wrapper/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_base.in isodate==0.6.0 # via # openapi-core @@ -359,10 +364,6 @@ yarl==1.6.3 # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # aiohttp -zipp==3.4.1 - # via - # importlib-metadata - # importlib-resources # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt index 249eca98ea8..33587923aff 100644 --- a/services/api-server/requirements/_test.txt +++ b/services/api-server/requirements/_test.txt @@ -10,14 +10,6 @@ asgi-lifespan==1.0.1 # via -r requirements/_test.in astroid==2.5.6 # via pylint -async-exit-stack==1.0.1 - # via - # -c requirements/_base.txt - # asgi-lifespan -async-generator==1.10 - # via - # -c requirements/_base.txt - # httpx attrs==20.3.0 # via # -c requirements/_base.txt @@ -28,8 +20,6 @@ bcrypt==3.2.0 # via # paramiko # passlib -cached-property==1.5.2 - # via docker-compose certifi==2020.12.5 # via # -c requirements/_base.txt @@ -51,10 +41,6 @@ click==7.1.2 # -r requirements/_test.in codecov==2.1.11 # via -r requirements/_test.in -contextvars==2.4 - # via - # -c requirements/_base.txt - # sniffio coverage[toml]==5.5 # via # codecov @@ -106,16 +92,6 @@ idna==2.10 # requests # rfc3986 # yarl -immutables==0.15 - # via - # -c requirements/_base.txt - # contextvars -importlib-metadata==4.0.1 - # via - # -c requirements/_base.txt - # jsonschema - # pluggy - # pytest iniconfig==1.1.1 # via pytest isort==5.8.0 @@ -254,13 +230,6 @@ toml==0.10.2 # coverage # pylint # pytest -typed-ast==1.4.3 - # via astroid -typing-extensions==3.10.0.0 - # via - # -c requirements/_base.txt - # importlib-metadata - # yarl urllib3==1.26.4 # via # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt @@ -277,10 +246,6 @@ yarl==1.6.3 # via # -c requirements/_base.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in -zipp==3.4.1 - # via - # -c requirements/_base.txt - # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/api-server/requirements/_tools.txt b/services/api-server/requirements/_tools.txt index 7752f3775f7..b0685b6370f 100644 --- a/services/api-server/requirements/_tools.txt +++ b/services/api-server/requirements/_tools.txt @@ -24,28 +24,12 @@ click==7.1.2 # -c requirements/_test.txt # black # pip-tools -dataclasses==0.8 - # via - # -c requirements/_base.txt - # black distlib==0.3.1 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.4 # via pre-commit -importlib-metadata==4.0.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.3 - # via - # -c requirements/_base.txt - # pre-commit - # virtualenv isort==5.8.0 # via # -c requirements/_test.txt @@ -94,27 +78,10 @@ toml==0.10.2 # black # pep517 # pre-commit -typed-ast==1.4.3 - # via - # -c requirements/_test.txt - # black -typing-extensions==3.10.0.0 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black - # importlib-metadata virtualenv==20.4.6 # via pre-commit watchdog[watchmedo]==2.1.2 # via -r requirements/_tools.in -zipp==3.4.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/services/api-server/setup.py b/services/api-server/setup.py index 161992ce279..c43cc01494b 100644 --- a/services/api-server/setup.py +++ b/services/api-server/setup.py @@ -6,9 +6,9 @@ from setuptools import find_packages, setup -if sys.version_info.major != 3 and sys.version_info.minor >= 6: +if not (sys.version_info.major == 3 and sys.version_info.minor == 8): raise RuntimeError( - "Expected ~=3.6, got %s (Tip: did you forget to 'source .venv/bin/activate' or 'pyenv local'?)" + "Expected ~=3.8, got %s (Tip: did you forget to 'source .venv/bin/activate' or 'pyenv local'?)" % str(sys.version_info) ) @@ -43,11 +43,11 @@ def read_reqs(reqs_path: Path): "Development Status :: 1 - Planning", "License :: OSI Approved :: MIT License", "Natural Language :: English", - "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.8", ], long_description=readme, license="MIT license", - python_requires="~=3.6", + python_requires="~=3.8", packages=find_packages(where="src"), package_dir={ "": "src", diff --git a/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py b/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py index 25f51140cc6..d5bb030df43 100644 --- a/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py +++ b/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py @@ -22,7 +22,7 @@ from ..models.schemas.jobs import ArgumentType, Job, JobInputs, JobStatus, TaskStates from ..models.schemas.solvers import Solver, SolverKeyId, VersionStr from ..modules.director_v2 import ComputationTaskOut -from .typing_extra import get_args +from .typing_extra import get_types # UTILS ------ _BASE_UUID = uuid.UUID("231e13db-6bc6-4f64-ba56-2ee2c73b9f09") @@ -58,7 +58,7 @@ def create_node_inputs_from_job_inputs(inputs: JobInputs) -> Dict[InputID, Input node_inputs: Dict[InputID, InputTypes] = {} for name, value in inputs.values.items(): - assert isinstance(value, get_args(ArgumentType)) # nosec + assert isinstance(value, get_types(ArgumentType)) # nosec if isinstance(value, File): # FIXME: ensure this aligns with storage policy @@ -84,8 +84,8 @@ def create_job_inputs_from_node_inputs(inputs: Dict[InputID, InputTypes]) -> Job input_values: Dict[str, ArgumentType] = {} for name, value in inputs.items(): - assert isinstance(name, get_args(InputID)) # nosec - assert isinstance(value, get_args(InputTypes)) # nosec + assert isinstance(name, get_types(InputID)) # nosec + assert isinstance(value, get_types(InputTypes)) # nosec if isinstance(value, SimCoreFileLink): # FIXME: ensure this aligns with storage policy diff --git a/services/api-server/src/simcore_service_api_server/utils/solver_job_outputs.py b/services/api-server/src/simcore_service_api_server/utils/solver_job_outputs.py index c80612ca5cd..5ff650e34c7 100644 --- a/services/api-server/src/simcore_service_api_server/utils/solver_job_outputs.py +++ b/services/api-server/src/simcore_service_api_server/utils/solver_job_outputs.py @@ -11,7 +11,7 @@ from simcore_sdk.node_ports.dbmanager import DBManager from simcore_sdk.node_ports_v2 import Nodeports -from .typing_extra import get_args +from .typing_extra import get_types log = logging.getLogger(__name__) @@ -38,7 +38,7 @@ async def get_solver_output_results( solver_output_results = {} for port in (await solver.outputs).values(): log.debug("Getting %s [%s]: %s", port.key, port.property_type, port.value) - assert isinstance(port.value, get_args(ResultsTypes)) # nosec + assert isinstance(port.value, get_types(ResultsTypes)) # nosec solver_output_results[port.key] = port.value return solver_output_results diff --git a/services/api-server/src/simcore_service_api_server/utils/typing_extra.py b/services/api-server/src/simcore_service_api_server/utils/typing_extra.py index 8b590e08e59..b19c9c288f3 100644 --- a/services/api-server/src/simcore_service_api_server/utils/typing_extra.py +++ b/services/api-server/src/simcore_service_api_server/utils/typing_extra.py @@ -1,15 +1,17 @@ -import sys -from typing import Tuple +from typing import Dict, Tuple, Union, get_args, get_origin -def get_args(annotation) -> Tuple: - assert ( # nosec - sys.version_info.major == 3 and sys.version_info.minor < 8 # nosec - ), "TODO: py3.8 replace __args__ with typings.get_args" +def get_types(annotation) -> Tuple: + # WARNING: use for testing ONLY - try: - annotated_types = annotation.__args__ # works for unions - except AttributeError: + assert get_origin(Dict[str, int]) is dict # nosec + assert get_args(Dict[int, str]) == (int, str) # nosec + assert get_origin(Union[int, str]) is Union # nosec + assert get_args(Union[int, str]) == (int, str) # nosec + + if get_origin(annotation) is Union: + annotated_types = get_args(annotation) + else: annotated_types = (annotation,) def _transform(annotated_type): diff --git a/services/api-server/tests/unit/test_utils_solver_job_models_converters.py b/services/api-server/tests/unit/test_utils_solver_job_models_converters.py index 49de0989e97..98ec0b2a409 100644 --- a/services/api-server/tests/unit/test_utils_solver_job_models_converters.py +++ b/services/api-server/tests/unit/test_utils_solver_job_models_converters.py @@ -17,7 +17,7 @@ create_jobstatus_from_task, create_new_project_for_job, create_node_inputs_from_job_inputs, - get_args, + get_types, ) @@ -78,7 +78,7 @@ def test_job_to_node_inputs_conversion(): } ) for name, value in job_inputs.values.items(): - assert isinstance(value, get_args(ArgumentType)), f"Invalid type in {name}" + assert isinstance(value, get_types(ArgumentType)), f"Invalid type in {name}" node_inputs: Inputs = { "x": 4.33, @@ -95,7 +95,7 @@ def test_job_to_node_inputs_conversion(): for name, value in node_inputs.items(): # TODO: py3.8 use typings.get_args - assert isinstance(value, get_args(InputTypes)), f"Invalid type in {name}" + assert isinstance(value, get_types(InputTypes)), f"Invalid type in {name}" # test transformations in both directions got_node_inputs = create_node_inputs_from_job_inputs(inputs=job_inputs) diff --git a/services/api-server/tests/unit/test_utils_solver_job_outputs.py b/services/api-server/tests/unit/test_utils_solver_job_outputs.py index 46e8e7fff3a..0e61c2ed1d2 100644 --- a/services/api-server/tests/unit/test_utils_solver_job_outputs.py +++ b/services/api-server/tests/unit/test_utils_solver_job_outputs.py @@ -8,15 +8,15 @@ BaseFileLink, ResultsTypes, ) -from simcore_service_api_server.utils.typing_extra import get_args +from simcore_service_api_server.utils.typing_extra import get_types def test_result_type_mapped(): # I/O types returned by node-ports must be one-to-one mapped # with those returned as output results - api_arg_types = list(get_args(ArgumentType)) - output_arg_types = list(get_args(ResultsTypes)) + api_arg_types = list(get_types(ArgumentType)) + output_arg_types = list(get_types(ResultsTypes)) assert File in api_arg_types assert BaseFileLink in output_arg_types diff --git a/services/api-server/tools/templates/test_endpoints.py.jinja2 b/services/api-server/tools/templates/test_endpoints.py.jinja2 index 9bee32645e4..3e5027f2582 100644 --- a/services/api-server/tools/templates/test_endpoints.py.jinja2 +++ b/services/api-server/tools/templates/test_endpoints.py.jinja2 @@ -44,7 +44,7 @@ def test_standard_operations_on_resource(client, fake_data_dag_in): assert response.status_code == status.HTTP_200_OK got = response.json() - assert isinstance(got, List) + assert isinstance(got, list) assert len(got) == 1 # TODO: data_in is not the same as data_out?? diff --git a/services/catalog/Dockerfile b/services/catalog/Dockerfile index a86007dcdfe..98ae84cb26a 100644 --- a/services/catalog/Dockerfile +++ b/services/catalog/Dockerfile @@ -1,4 +1,4 @@ -ARG PYTHON_VERSION="3.6.10" +ARG PYTHON_VERSION="3.8.10" FROM python:${PYTHON_VERSION}-slim-buster as base # # diff --git a/services/catalog/requirements/_base.in b/services/catalog/requirements/_base.in index 827bd2b6447..4e3aacf5ee5 100644 --- a/services/catalog/requirements/_base.in +++ b/services/catalog/requirements/_base.in @@ -12,7 +12,7 @@ pyyaml # fastapi and extensions fastapi[all] -async-exit-stack; python_version < '3.7' +async-exit-stack ; python_version < '3.7' async-generator ; python_version < '3.7' dataclasses ; python_version < '3.7' diff --git a/services/catalog/requirements/_base.txt b/services/catalog/requirements/_base.txt index 8a2f2407ba6..b3bb06e9699 100644 --- a/services/catalog/requirements/_base.txt +++ b/services/catalog/requirements/_base.txt @@ -10,15 +10,10 @@ aiopg[sa]==1.2.1 # via -r requirements/_base.in aniso8601==7.0.0 # via graphene -async-exit-stack==1.0.1 ; python_version < "3.7" - # via - # -r requirements/_base.in - # fastapi -async-generator==1.10 ; python_version < "3.7" - # via - # -r requirements/_base.in - # fastapi - # httpx +async-exit-stack==1.0.1 + # via fastapi +async-generator==1.10 + # via fastapi async-timeout==3.0.1 # via aiopg certifi==2020.12.5 @@ -29,12 +24,6 @@ chardet==3.0.4 # via requests click==7.1.2 # via uvicorn -contextvars==2.4 - # via sniffio -dataclasses==0.8 ; python_version < "3.7" - # via - # -r requirements/_base.in - # pydantic dnspython==2.1.0 # via email-validator email-validator==1.1.2 @@ -69,8 +58,6 @@ idna==2.10 # requests # rfc3986 # yarl -immutables==0.15 - # via contextvars itsdangerous==1.1.0 # via fastapi jinja2==2.11.3 @@ -159,12 +146,8 @@ urllib3==1.26.4 # requests uvicorn[standard]==0.13.3 # via fastapi -uvloop==0.14.0 ; python_version < "3.7" - # via - # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt - # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt - # -c requirements/../../../requirements/constraints.txt - # uvicorn +uvloop==0.14.0 + # via uvicorn watchgod==0.6 # via uvicorn websockets==8.1 diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt index f7974f1d471..332e8a98f4f 100644 --- a/services/catalog/requirements/_test.txt +++ b/services/catalog/requirements/_test.txt @@ -10,10 +10,6 @@ alembic==1.6.2 # via -r requirements/_test.in astroid==2.5.6 # via pylint -async-generator==1.10 ; python_version < "3.7" - # via - # -c requirements/_base.txt - # httpx async-timeout==3.0.1 # via # -c requirements/_base.txt @@ -26,8 +22,6 @@ attrs==20.3.0 # pytest-docker bcrypt==3.2.0 # via paramiko -cached-property==1.5.2 - # via docker-compose certifi==2020.12.5 # via # -c requirements/_base.txt @@ -49,11 +43,7 @@ click==7.1.2 # -r requirements/_test.in codecov==2.1.11 # via -r requirements/_test.in -contextvars==2.4 - # via - # -c requirements/_base.txt - # sniffio -coverage==5.5 +coverage==5.4 # via # codecov # coveralls @@ -94,24 +84,12 @@ httpx==0.18.1 # via # -c requirements/_base.txt # respx -idna-ssl==1.1.0 - # via aiohttp idna==2.10 # via # -c requirements/_base.txt - # idna-ssl # requests # rfc3986 # yarl -immutables==0.15 - # via - # -c requirements/_base.txt - # contextvars -importlib-metadata==4.0.1 - # via - # jsonschema - # pluggy - # pytest iniconfig==1.1.1 # via pytest isort==5.8.0 @@ -235,14 +213,10 @@ toml==0.10.2 # via # pylint # pytest -typed-ast==1.4.3 - # via astroid typing-extensions==3.7.4.3 # via # -c requirements/_base.txt # aiohttp - # importlib-metadata - # yarl urllib3==1.26.4 # via # -c requirements/../../../requirements/constraints.txt @@ -258,8 +232,6 @@ yarl==1.6.3 # via # -c requirements/_base.txt # aiohttp -zipp==3.4.1 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/catalog/requirements/_tools.txt b/services/catalog/requirements/_tools.txt index b314fb3a7ad..f93b0f756ae 100644 --- a/services/catalog/requirements/_tools.txt +++ b/services/catalog/requirements/_tools.txt @@ -22,26 +22,12 @@ click==7.1.2 # -c requirements/_test.txt # black # pip-tools -dataclasses==0.8 ; python_version < "3.7" - # via - # -c requirements/_base.txt - # black distlib==0.3.1 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.4 # via pre-commit -importlib-metadata==4.0.1 - # via - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.3 - # via - # pre-commit - # virtualenv isort==5.8.0 # via # -c requirements/_test.txt @@ -77,26 +63,10 @@ toml==0.10.2 # black # pep517 # pre-commit -typed-ast==1.4.3 - # via - # -c requirements/_test.txt - # black -typing-extensions==3.7.4.3 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black - # importlib-metadata virtualenv==20.4.6 # via pre-commit watchdog[watchmedo]==2.1.1 # via -r requirements/_tools.in -zipp==3.4.1 - # via - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/services/catalog/setup.py b/services/catalog/setup.py index 29815eba85c..ba249ae499f 100644 --- a/services/catalog/setup.py +++ b/services/catalog/setup.py @@ -6,9 +6,9 @@ from setuptools import find_packages, setup -if sys.version_info.major != 3 and sys.version_info.minor >= 6: +if not (sys.version_info.major == 3 and sys.version_info.minor == 8): raise RuntimeError( - "Expected ~=3.6, got %s (Tip: did you forget to 'source .venv/bin/activate' or 'pyenv local'?)" + "Expected ~=3.8, got %s (Tip: did you forget to 'source .venv/bin/activate' or 'pyenv local'?)" % str(sys.version_info) ) @@ -41,11 +41,11 @@ def read_reqs(reqs_path: Path): "Development Status :: 3 - Alpha", "License :: OSI Approved :: MIT License", "Natural Language :: English", - "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.8", ], long_description=readme, license="MIT license", - python_requires="~=3.6", + python_requires="~=3.8", packages=find_packages(where="src"), package_dir={ "": "src", diff --git a/services/catalog/src/simcore_service_catalog/core/background_tasks.py b/services/catalog/src/simcore_service_catalog/core/background_tasks.py index b8114f02781..435c1c929ca 100644 --- a/services/catalog/src/simcore_service_catalog/core/background_tasks.py +++ b/services/catalog/src/simcore_service_catalog/core/background_tasks.py @@ -11,7 +11,7 @@ import asyncio import logging -from asyncio.futures import CancelledError +from asyncio import CancelledError from pprint import pformat from typing import Dict, Set, Tuple diff --git a/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py b/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py index e7f9ba474d0..acc286cab52 100644 --- a/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py +++ b/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py @@ -2,7 +2,7 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -from typing import Dict, List +from typing import Dict import pytest from simcore_service_catalog.meta import api_version @@ -61,7 +61,7 @@ def test_standard_operations_on_resource( assert response.status_code == 200 got = response.json() - assert isinstance(got, List) + assert isinstance(got, list) assert len(got) == 1 # TODO: data_in is not the same as data_out?? diff --git a/services/catalog/tests/unit/with_dbs/test_entrypoint_services.py b/services/catalog/tests/unit/with_dbs/test_entrypoint_services.py index 55a665f9377..03d823c2570 100644 --- a/services/catalog/tests/unit/with_dbs/test_entrypoint_services.py +++ b/services/catalog/tests/unit/with_dbs/test_entrypoint_services.py @@ -161,10 +161,10 @@ async def test_director_mockup( @pytest.mark.skip( reason="Not ready, depency injection does not work, using monkeypatch. still issue with setting up database" ) -def test_list_services( +async def test_list_services( director_mockup, db_mockup, app: FastAPI, client: TestClient, user_id: int ): - asyncio.sleep(10) + await asyncio.sleep(10) url = URL("/v0/services").with_query(user_id=user_id) response = client.get(str(url)) diff --git a/services/catalog/tests/unit/with_dbs/test_services_access_rights.py b/services/catalog/tests/unit/with_dbs/test_services_access_rights.py index c9b3ff83a55..96feeba7a3f 100644 --- a/services/catalog/tests/unit/with_dbs/test_services_access_rights.py +++ b/services/catalog/tests/unit/with_dbs/test_services_access_rights.py @@ -7,7 +7,6 @@ from aiopg.sa.engine import Engine from fastapi import FastAPI from models_library.services import ServiceAccessRightsAtDB, ServiceDockerData -from pytest_simcore.helpers.utils_mock import future_with_result from simcore_service_catalog.db.repositories.services import ServicesRepository from simcore_service_catalog.models.domain.group import GroupAtDB from simcore_service_catalog.services.access_rights import ( @@ -93,18 +92,18 @@ async def test_auto_upgrade_policy( # Avoids calls to director API mocker.patch( "simcore_service_catalog.services.access_rights._is_old_service", - return_value=future_with_result(False), + return_value=False, ) # Avoids creating a users + user_to_group table data = GroupAtDB.Config.schema_extra["example"] data["gid"] = everyone_gid mocker.patch( "simcore_service_catalog.services.access_rights.GroupsRepository.get_everyone_group", - return_value=future_with_result(GroupAtDB.parse_obj(data)), + return_value=GroupAtDB.parse_obj(data), ) mocker.patch( "simcore_service_catalog.services.access_rights.GroupsRepository.get_user_gid_from_email", - return_value=future_with_result(user_gid), + return_value=user_gid, ) # SETUP --- diff --git a/services/director-v2/Dockerfile b/services/director-v2/Dockerfile index 6707f524d49..c5f463f0c29 100644 --- a/services/director-v2/Dockerfile +++ b/services/director-v2/Dockerfile @@ -1,4 +1,4 @@ -ARG PYTHON_VERSION="3.6.10" +ARG PYTHON_VERSION="3.8.10" FROM python:${PYTHON_VERSION}-slim-buster as base # diff --git a/services/director-v2/requirements/_base.in b/services/director-v2/requirements/_base.in index 4b114845705..4672f926752 100644 --- a/services/director-v2/requirements/_base.in +++ b/services/director-v2/requirements/_base.in @@ -4,10 +4,6 @@ # NOTE: ALL version constraints MUST be commented -c ../../../requirements/constraints.txt -async-exit-stack; python_version < '3.7' -async-generator ; python_version < '3.7' -dataclasses ; python_version < '3.7' - # fastapi and extensions fastapi[all] diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt index c180c399fbd..ac921f9024f 100644 --- a/services/director-v2/requirements/_base.txt +++ b/services/director-v2/requirements/_base.txt @@ -16,15 +16,10 @@ amqp==5.0.2 # via kombu aniso8601==7.0.0 # via graphene -async-exit-stack==1.0.1 ; python_version < "3.7" - # via - # -r requirements/_base.in - # fastapi -async-generator==1.10 ; python_version < "3.7" - # via - # -r requirements/_base.in - # fastapi - # httpx +async-exit-stack==1.0.1 + # via fastapi +async-generator==1.10 + # via fastapi async-timeout==3.0.1 # via # aiohttp @@ -53,12 +48,6 @@ click==7.1.2 # click-didyoumean # click-repl # uvicorn -contextvars==2.4 - # via sniffio -dataclasses==0.7 ; python_version < "3.7" - # via - # -r requirements/_base.in - # pydantic decorator==4.4.2 # via networkx dnspython==2.0.0 @@ -85,19 +74,12 @@ httptools==0.1.1 # via uvicorn httpx==0.18.1 # via -r requirements/_base.in -idna-ssl==1.1.0 - # via aiohttp idna==2.10 # via # email-validator - # idna-ssl # requests # rfc3986 # yarl -immutables==0.14 - # via contextvars -importlib-metadata==2.0.0 - # via kombu itsdangerous==1.1.0 # via fastapi jinja2==2.11.3 @@ -188,10 +170,8 @@ urllib3==1.26.4 # requests uvicorn[standard]==0.13.4 # via fastapi -uvloop==0.14.0 ; python_version < "3.7" - # via - # -c requirements/../../../requirements/constraints.txt - # uvicorn +uvloop==0.14.0 + # via uvicorn vine==5.0.0 # via # amqp @@ -204,5 +184,3 @@ websockets==8.1 # via uvicorn yarl==1.6.2 # via aiohttp -zipp==3.4.0 - # via importlib-metadata diff --git a/services/director-v2/requirements/_test.txt b/services/director-v2/requirements/_test.txt index 6a67bdd09d9..2250d523359 100644 --- a/services/director-v2/requirements/_test.txt +++ b/services/director-v2/requirements/_test.txt @@ -26,14 +26,6 @@ asgi-lifespan==1.0.1 # via -r requirements/_test.in astroid==2.5.6 # via pylint -async-exit-stack==1.0.1 ; python_version < "3.7" - # via - # -c requirements/_base.txt - # asgi-lifespan -async-generator==1.10 ; python_version < "3.7" - # via - # -c requirements/_base.txt - # httpx async-timeout==3.0.1 # via # -c requirements/_base.txt @@ -52,8 +44,6 @@ billiard==3.6.3.0 # via # -c requirements/_base.txt # celery -cached-property==1.5.2 - # via docker-compose celery[redis]==5.0.2 # via # -c requirements/_base.txt @@ -89,11 +79,7 @@ click==7.1.2 # click-repl codecov==2.1.11 # via -r requirements/_test.in -contextvars==2.4 - # via - # -c requirements/_base.txt - # sniffio -coverage==5.5 +coverage==5.4 # via # codecov # coveralls @@ -140,28 +126,12 @@ httpx==0.18.1 # respx icdiff==1.9.1 # via pytest-icdiff -idna-ssl==1.1.0 - # via - # -c requirements/_base.txt - # aiohttp idna==2.10 # via # -c requirements/_base.txt - # idna-ssl # requests # rfc3986 # yarl -immutables==0.14 - # via - # -c requirements/_base.txt - # contextvars -importlib-metadata==2.0.0 - # via - # -c requirements/_base.txt - # jsonschema - # kombu - # pluggy - # pytest iniconfig==1.1.1 # via pytest isort==5.8.0 @@ -314,13 +284,10 @@ toml==0.10.2 # via # pylint # pytest -typed-ast==1.4.3 - # via astroid typing-extensions==3.7.4.3 # via # -c requirements/_base.txt # aiohttp - # yarl urllib3==1.26.4 # via # -c requirements/../../../requirements/constraints.txt @@ -347,10 +314,6 @@ yarl==1.6.2 # aio-pika # aiohttp # aiormq -zipp==3.4.0 - # via - # -c requirements/_base.txt - # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/director-v2/requirements/_tools.txt b/services/director-v2/requirements/_tools.txt index f53f6620cca..900db86dbfa 100644 --- a/services/director-v2/requirements/_tools.txt +++ b/services/director-v2/requirements/_tools.txt @@ -22,27 +22,12 @@ click==7.1.2 # -c requirements/_test.txt # black # pip-tools -dataclasses==0.7 ; python_version < "3.7" - # via - # -c requirements/_base.txt - # black distlib==0.3.1 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.4 # via pre-commit -importlib-metadata==2.0.0 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.3 - # via - # pre-commit - # virtualenv isort==5.8.0 # via # -c requirements/_test.txt @@ -79,26 +64,10 @@ toml==0.10.2 # black # pep517 # pre-commit -typed-ast==1.4.3 - # via - # -c requirements/_test.txt - # black -typing-extensions==3.7.4.3 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black virtualenv==20.4.6 # via pre-commit watchdog[watchmedo]==2.1.1 # via -r requirements/_tools.in -zipp==3.4.0 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/services/director-v2/setup.py b/services/director-v2/setup.py index 87ebce5d308..36e1bb32f68 100644 --- a/services/director-v2/setup.py +++ b/services/director-v2/setup.py @@ -6,9 +6,9 @@ from setuptools import find_packages, setup -if sys.version_info.major != 3 and sys.version_info.minor >= 6: +if not (sys.version_info.major == 3 and sys.version_info.minor == 8): raise RuntimeError( - "Expected ~=3.6, got %s (Tip: did you forget to 'source .venv/bin/activate' or 'pyenv local'?)" + "Expected ~=3.8, got %s (Tip: did you forget to 'source .venv/bin/activate' or 'pyenv local'?)" % str(sys.version_info) ) @@ -41,11 +41,11 @@ def read_reqs(reqs_path: Path): "Development Status :: 1 - Planning", "License :: OSI Approved :: MIT License", "Natural Language :: English", - "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.8", ], long_description=readme, license="MIT license", - python_requires="~=3.6", + python_requires="~=3.8", packages=find_packages(where="src"), package_dir={ "": "src", diff --git a/services/director/setup.py b/services/director/setup.py index 73d165eda90..b9fefde11b3 100644 --- a/services/director/setup.py +++ b/services/director/setup.py @@ -6,9 +6,9 @@ here = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -if sys.version_info < (3, 6): +if not (sys.version_info.major == 3 and sys.version_info.minor == 6): raise RuntimeError( - "Requires >=3.6, got %s. Did you forget to activate virtualenv?" + "Requires <=3.6, got %s. Did you forget to activate virtualenv?" % sys.version_info ) @@ -35,7 +35,7 @@ def read_reqs(reqs_path: Path): version="0.1.0", description="oSparc Director webserver service", author="Sylvain Anderegg (sanderegg)", - python_requires=">=3.6", + python_requires="~=3.6", packages=find_packages(where="src"), package_dir={ "": "src", diff --git a/services/dynamic-sidecar/Dockerfile b/services/dynamic-sidecar/Dockerfile index d7fd0ebddf6..a5bfb7e5559 100644 --- a/services/dynamic-sidecar/Dockerfile +++ b/services/dynamic-sidecar/Dockerfile @@ -1,4 +1,4 @@ -ARG PYTHON_VERSION="3.6.10" +ARG PYTHON_VERSION="3.8.10" FROM python:${PYTHON_VERSION}-slim-buster as base # # USAGE: diff --git a/services/dynamic-sidecar/requirements/_base.txt b/services/dynamic-sidecar/requirements/_base.txt index eb5c964a240..cdb86c9462e 100644 --- a/services/dynamic-sidecar/requirements/_base.txt +++ b/services/dynamic-sidecar/requirements/_base.txt @@ -22,8 +22,6 @@ attrs==20.3.0 # jsonschema bcrypt==3.2.0 # via paramiko -cached-property==1.5.2 - # via docker-compose certifi==2020.12.5 # via requests cffi==1.14.5 @@ -43,8 +41,6 @@ cryptography==3.4.7 # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # paramiko -dataclasses==0.8 - # via pydantic distro==1.5.0 # via docker-compose dnspython==2.1.0 @@ -67,18 +63,13 @@ fastapi==0.63.0 # via -r requirements/_base.in h11==0.12.0 # via uvicorn -idna-ssl==1.1.0 - # via aiohttp idna==2.10 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in # email-validator - # idna-ssl # requests # yarl -importlib-metadata==4.0.1 - # via jsonschema jsonschema==3.2.0 # via docker-compose multidict==5.1.0 @@ -134,10 +125,7 @@ typing-extensions==3.7.4.3 # via # aiodocker # aiohttp - # importlib-metadata # pydantic - # uvicorn - # yarl urllib3==1.26.4 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -154,8 +142,6 @@ yarl==1.6.3 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # aiohttp -zipp==3.4.1 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/dynamic-sidecar/requirements/_test.txt b/services/dynamic-sidecar/requirements/_test.txt index aa2e613a285..68e287b5a14 100644 --- a/services/dynamic-sidecar/requirements/_test.txt +++ b/services/dynamic-sidecar/requirements/_test.txt @@ -26,11 +26,6 @@ idna==2.10 # via # -c requirements/_base.txt # requests -importlib-metadata==4.0.1 - # via - # -c requirements/_base.txt - # pluggy - # pytest iniconfig==1.1.1 # via pytest multidict==5.1.0 @@ -71,16 +66,8 @@ text-unidecode==1.3 # via faker toml==0.10.2 # via pytest -typing-extensions==3.7.4.3 - # via - # -c requirements/_base.txt - # importlib-metadata urllib3==1.26.4 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # requests -zipp==3.4.1 - # via - # -c requirements/_base.txt - # importlib-metadata diff --git a/services/dynamic-sidecar/requirements/_tools.txt b/services/dynamic-sidecar/requirements/_tools.txt index 405fa064520..4392e1b0e73 100644 --- a/services/dynamic-sidecar/requirements/_tools.txt +++ b/services/dynamic-sidecar/requirements/_tools.txt @@ -23,27 +23,12 @@ click==7.1.2 # -c requirements/_base.txt # black # pip-tools -dataclasses==0.8 - # via - # -c requirements/_base.txt - # black distlib==0.3.1 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.4 # via pre-commit -importlib-metadata==4.0.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.3 - # via - # pre-commit - # virtualenv isort==5.8.0 # via # -r requirements/../../../requirements/devenv.txt @@ -91,28 +76,15 @@ toml==0.10.2 # pre-commit # pylint typed-ast==1.4.3 - # via - # astroid - # black - # mypy + # via mypy typing-extensions==3.7.4.3 # via # -c requirements/_base.txt - # -c requirements/_test.txt - # black - # importlib-metadata # mypy virtualenv==20.4.6 # via pre-commit wrapt==1.12.1 # via astroid -zipp==3.4.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/services/dynamic-sidecar/setup.py b/services/dynamic-sidecar/setup.py index d3f0477062d..bd69b1af782 100644 --- a/services/dynamic-sidecar/setup.py +++ b/services/dynamic-sidecar/setup.py @@ -30,7 +30,7 @@ def read_reqs(reqs_path: Path): "": "src", }, include_package_data=True, - python_requires=">=3.6", + python_requires="~=3.8", install_requires=install_requires, tests_require=tests_require, setup_requires=["setuptools_scm"], diff --git a/services/migration/Dockerfile b/services/migration/Dockerfile index 2bd31f85d67..68b9749625a 100644 --- a/services/migration/Dockerfile +++ b/services/migration/Dockerfile @@ -1,4 +1,4 @@ -ARG PYTHON_VERSION="3.6.10" +ARG PYTHON_VERSION="3.8.10" FROM python:${PYTHON_VERSION}-slim-buster as base LABEL maintainer=sanderegg diff --git a/services/sidecar/Dockerfile b/services/sidecar/Dockerfile index 1bfcd2a4d05..8a329177208 100644 --- a/services/sidecar/Dockerfile +++ b/services/sidecar/Dockerfile @@ -1,4 +1,4 @@ -ARG PYTHON_VERSION="3.6.10" +ARG PYTHON_VERSION="3.8.10" FROM python:${PYTHON_VERSION}-slim-buster as base # # USAGE: diff --git a/services/sidecar/requirements/_base.in b/services/sidecar/requirements/_base.in index 4b9dbd6ab45..ffb2a41f4dc 100644 --- a/services/sidecar/requirements/_base.in +++ b/services/sidecar/requirements/_base.in @@ -13,7 +13,6 @@ -c ../../../packages/postgres-database/requirements/_base.in -c ../../../packages/service-library/requirements/_base.in - aio-pika aiodocker aiofile diff --git a/services/sidecar/requirements/_base.txt b/services/sidecar/requirements/_base.txt index 0610a4f75ba..de9be6946dd 100644 --- a/services/sidecar/requirements/_base.txt +++ b/services/sidecar/requirements/_base.txt @@ -48,8 +48,6 @@ chardet==3.0.4 # via aiohttp click==7.1.2 # via -r requirements/_base.in -dataclasses==0.7 - # via pydantic decorator==4.4.2 # via networkx dnspython==2.0.0 @@ -58,18 +56,13 @@ email-validator==1.1.1 # via pydantic hiredis==1.1.0 # via aioredis -idna-ssl==1.1.0 - # via aiohttp idna==2.10 # via # -c requirements/../../../packages/models-library/requirements/_base.in # -c requirements/../../../packages/postgres-database/requirements/_base.in # -c requirements/../../../packages/service-library/requirements/_base.in # email-validator - # idna-ssl # yarl -importlib-metadata==2.0.0 - # via kombu kombu==4.6.11 # via celery multidict==4.7.6 @@ -143,5 +136,3 @@ yarl==1.5.1 # aio-pika # aiohttp # aiormq -zipp==3.2.0 - # via importlib-metadata diff --git a/services/sidecar/requirements/_packages.txt b/services/sidecar/requirements/_packages.txt index 5beeaf304c2..ac2894a8ba9 100644 --- a/services/sidecar/requirements/_packages.txt +++ b/services/sidecar/requirements/_packages.txt @@ -38,10 +38,6 @@ chardet==3.0.4 # via # -c requirements/_base.txt # aiohttp -dataclasses==0.7 - # via - # -c requirements/_base.txt - # pydantic dnspython==2.0.0 # via # -c requirements/_base.txt @@ -50,10 +46,6 @@ email-validator==1.1.1 # via # -c requirements/_base.txt # pydantic -idna-ssl==1.1.0 - # via - # -c requirements/_base.txt - # aiohttp idna==2.10 # via # -c requirements/_base.txt @@ -61,12 +53,7 @@ idna==2.10 # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # email-validator - # idna-ssl # yarl -importlib-metadata==2.0.0 - # via - # -c requirements/_base.txt - # jsonschema isodate==0.6.0 # via openapi-core jsonschema==3.2.0 @@ -158,10 +145,6 @@ yarl==1.5.1 # -c requirements/_base.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # aiohttp -zipp==3.2.0 - # via - # -c requirements/_base.txt - # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/sidecar/requirements/_test.txt b/services/sidecar/requirements/_test.txt index 1b3cb9baddb..d5f62c391b6 100644 --- a/services/sidecar/requirements/_test.txt +++ b/services/sidecar/requirements/_test.txt @@ -55,24 +55,12 @@ faker==8.1.4 # via -r requirements/_test.in icdiff==1.9.1 # via pytest-icdiff -idna-ssl==1.1.0 - # via - # -c requirements/_base.txt - # -c requirements/_packages.txt - # aiohttp idna==2.10 # via # -c requirements/_base.txt # -c requirements/_packages.txt - # idna-ssl # requests # yarl -importlib-metadata==2.0.0 - # via - # -c requirements/_base.txt - # -c requirements/_packages.txt - # pluggy - # pytest iniconfig==1.1.1 # via pytest isort==5.8.0 @@ -178,14 +166,11 @@ toml==0.10.2 # via # pylint # pytest -typed-ast==1.4.3 - # via astroid typing-extensions==3.7.4.3 # via # -c requirements/_base.txt # -c requirements/_packages.txt # aiohttp - # yarl urllib3==1.26.4 # via # -c requirements/../../../requirements/constraints.txt @@ -201,8 +186,3 @@ yarl==1.5.1 # -c requirements/_base.txt # -c requirements/_packages.txt # aiohttp -zipp==3.2.0 - # via - # -c requirements/_base.txt - # -c requirements/_packages.txt - # importlib-metadata diff --git a/services/sidecar/requirements/_tools.txt b/services/sidecar/requirements/_tools.txt index aa2a1118b16..9146604dac1 100644 --- a/services/sidecar/requirements/_tools.txt +++ b/services/sidecar/requirements/_tools.txt @@ -21,29 +21,12 @@ click==7.1.2 # -c requirements/_base.txt # black # pip-tools -dataclasses==0.7 - # via - # -c requirements/_base.txt - # -c requirements/_packages.txt - # black distlib==0.3.1 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.4 # via pre-commit -importlib-metadata==2.0.0 - # via - # -c requirements/_base.txt - # -c requirements/_packages.txt - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.3 - # via - # pre-commit - # virtualenv isort==5.8.0 # via # -c requirements/_test.txt @@ -80,28 +63,10 @@ toml==0.10.2 # black # pep517 # pre-commit -typed-ast==1.4.3 - # via - # -c requirements/_test.txt - # black -typing-extensions==3.7.4.3 - # via - # -c requirements/_base.txt - # -c requirements/_packages.txt - # -c requirements/_test.txt - # black virtualenv==20.4.6 # via pre-commit watchdog[watchmedo]==2.1.1 # via -r requirements/_tools.in -zipp==3.2.0 - # via - # -c requirements/_base.txt - # -c requirements/_packages.txt - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/services/sidecar/setup.py b/services/sidecar/setup.py index ce57988a1c7..96058b326b2 100644 --- a/services/sidecar/setup.py +++ b/services/sidecar/setup.py @@ -4,9 +4,9 @@ from setuptools import find_packages, setup -if sys.version_info.major != 3 and sys.version_info.minor >= 6: +if not (sys.version_info.major == 3 and sys.version_info.minor == 8): raise RuntimeError( - "Expected ~=3.6, got %s (Tip: did you forget to 'source .venv/bin/activate' or 'pyenv local'?)" + "Expected ~=3.8, got %s (Tip: did you forget to 'source .venv/bin/activate' or 'pyenv local'?)" % str(sys.version_info) ) @@ -43,11 +43,11 @@ def read_reqs(reqs_path: Path): "Development Status :: 1 - Planning", "License :: OSI Approved :: MIT License", "Natural Language :: English", - "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.8", }, long_description=readme, license="MIT license", - python_requires="~=3.6", + python_requires="~=3.8", packages=find_packages(where="src"), package_dir={ "": "src", diff --git a/services/sidecar/src/simcore_service_sidecar/celery_task_utils.py b/services/sidecar/src/simcore_service_sidecar/celery_task_utils.py index da412ae80a1..b5a7e3acc99 100644 --- a/services/sidecar/src/simcore_service_sidecar/celery_task_utils.py +++ b/services/sidecar/src/simcore_service_sidecar/celery_task_utils.py @@ -42,7 +42,7 @@ def on_task_success_handler( def cancel_task(function: Callable) -> None: - tasks = asyncio.Task.all_tasks() + tasks = asyncio.all_tasks() for task in tasks: # pylint: disable=protected-access if task._coro.__name__ == function.__name__: diff --git a/services/sidecar/src/simcore_service_sidecar/log_parser.py b/services/sidecar/src/simcore_service_sidecar/log_parser.py index 7de59b4fe23..03c404d5b03 100644 --- a/services/sidecar/src/simcore_service_sidecar/log_parser.py +++ b/services/sidecar/src/simcore_service_sidecar/log_parser.py @@ -1,7 +1,6 @@ import asyncio import logging import re -import sys from enum import Enum from pathlib import Path from typing import Awaitable, Callable, Optional, Tuple, Union @@ -105,9 +104,7 @@ async def _monitor_docker_container( finally: if not out_log_file and log_file: - # TODO: Update missing_ok=True in py3.8 - assert sys.version_info < (3, 8) # nosec - log_file.unlink() + log_file.unlink(missing_ok=True) @log_decorator(logger=log) @@ -124,5 +121,4 @@ async def _monitor_log_file( await asyncio.sleep(1) continue log_type, parsed_line = await parse_line(line) - await log_cb(log_type, parsed_line) diff --git a/services/sidecar/src/simcore_service_sidecar/rabbitmq.py b/services/sidecar/src/simcore_service_sidecar/rabbitmq.py index c23db3518a5..9c1aafa920d 100644 --- a/services/sidecar/src/simcore_service_sidecar/rabbitmq.py +++ b/services/sidecar/src/simcore_service_sidecar/rabbitmq.py @@ -1,7 +1,7 @@ import json import logging import socket -from asyncio.futures import CancelledError +from asyncio import CancelledError from typing import Any, Dict, List, Optional, Union import aio_pika diff --git a/services/sidecar/tests/unit/test_celery_configurator.py b/services/sidecar/tests/unit/test_celery_configurator.py index 9290c54df6a..1fb430213cb 100644 --- a/services/sidecar/tests/unit/test_celery_configurator.py +++ b/services/sidecar/tests/unit/test_celery_configurator.py @@ -1,5 +1,4 @@ # pylint: disable=unused-argument,redefined-outer-name,no-member -import asyncio from pathlib import Path import aiodocker @@ -12,15 +11,13 @@ def _toggle_gpu_mock(mocker, has_gpu: bool) -> None: - containers_get = mocker.patch( - "aiodocker.containers.DockerContainers.run", return_value=asyncio.Future() - ) - class FakeContainer: async def wait(self, **kwargs): return {"StatusCode": 0 if has_gpu else 127} - containers_get.return_value.set_result(FakeContainer()) + containers_get = mocker.patch( + "aiodocker.containers.DockerContainers.run", return_value=FakeContainer() + ) if not has_gpu: containers_get.side_effect = aiodocker.exceptions.DockerError( @@ -95,7 +92,7 @@ def test_force_start_gpu_mode(mocker, force_gpu_mode, gpu_support) -> None: mocked_configure_gpu_mode.assert_called_with(BootMode.GPU) -def test_auto_detects_gpu(mocker, mock_node_with_gpu) -> None: +def test_auto_detects_gpu(mocker, mock_node_with_gpu, loop) -> None: mocked_configure_gpu_mode = mocker.patch( "simcore_service_sidecar.celery_configurator.configure_node" ) diff --git a/services/sidecar/tests/unit/test_log_parser.py b/services/sidecar/tests/unit/test_log_parser.py index 5d9a04d21cc..28ed974bdb3 100644 --- a/services/sidecar/tests/unit/test_log_parser.py +++ b/services/sidecar/tests/unit/test_log_parser.py @@ -1,4 +1,4 @@ -from asyncio import Future, ensure_future, sleep +import asyncio from pathlib import Path import pytest @@ -42,21 +42,20 @@ async def test_parse_line(log, expected_log_type, expected_parsed_message): assert log_message == expected_parsed_message -def future_with_result(result): - f = Future() - f.set_result(result) - return f - - async def test_monitor_log_task(temp_folder: Path, mocker): - mock_cb = mocker.Mock(return_value=future_with_result("")) + mock_awaitable_callback = mocker.AsyncMock(return_value="") log_file = temp_folder / "test_log.txt" log_file.touch() assert log_file.exists() - task = ensure_future(monitor_logs_task(log_file, mock_cb)) + + task = asyncio.create_task(monitor_logs_task(log_file, mock_awaitable_callback)) assert task - await sleep(2) + + await asyncio.sleep(2) log_file.write_text("this is a test") - await sleep(2) - mock_cb.assert_called_once() + + await asyncio.sleep(2) + mock_awaitable_callback.assert_called_once() + mock_awaitable_callback.assert_awaited() + assert task.cancel() diff --git a/services/storage/Dockerfile b/services/storage/Dockerfile index 614b74e6d90..946b76f4b4b 100644 --- a/services/storage/Dockerfile +++ b/services/storage/Dockerfile @@ -1,4 +1,4 @@ -ARG PYTHON_VERSION="3.6.10" +ARG PYTHON_VERSION="3.8.10" FROM python:${PYTHON_VERSION}-slim-buster as base # # USAGE: diff --git a/services/storage/requirements/_base.in b/services/storage/requirements/_base.in index 1c28fc30455..551d51e9f3a 100644 --- a/services/storage/requirements/_base.in +++ b/services/storage/requirements/_base.in @@ -11,7 +11,6 @@ blackfynn==4.0.0 aiobotocore==1.0.7 - aiohttp aiohttp-swagger[performance] diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index f57d3a6f612..1d70160d840 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -65,8 +65,6 @@ click==7.1.2 # via typer configparser==5.0.2 # via blackfynn -dataclasses==0.8 - # via pydantic deprecated==1.2.12 # via blackfynn dnspython==2.1.0 @@ -79,19 +77,14 @@ email-validator==1.1.2 # via pydantic future==0.18.2 # via blackfynn -idna-ssl==1.1.0 - # via aiohttp idna==2.10 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # email-validator - # idna-ssl # requests # yarl -importlib-metadata==4.0.1 - # via jsonschema isodate==0.6.0 # via # openapi-core @@ -214,9 +207,7 @@ typing-extensions==3.10.0.0 # via # aiohttp # aioitertools - # importlib-metadata # pydantic - # yarl ujson==4.0.2 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -239,8 +230,6 @@ yarl==1.6.3 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # aiohttp -zipp==3.4.1 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt index fe077e09b2e..1924aa19319 100644 --- a/services/storage/requirements/_test.txt +++ b/services/storage/requirements/_test.txt @@ -23,8 +23,6 @@ attrs==20.3.0 # pytest-docker bcrypt==3.2.0 # via paramiko -cached-property==1.5.2 - # via docker-compose certifi==2020.12.5 # via # -c requirements/_base.txt @@ -70,22 +68,11 @@ docopt==0.6.2 # docker-compose faker==8.5.1 # via -r requirements/_test.in -idna-ssl==1.1.0 - # via - # -c requirements/_base.txt - # aiohttp idna==2.10 # via # -c requirements/_base.txt - # idna-ssl # requests # yarl -importlib-metadata==4.0.1 - # via - # -c requirements/_base.txt - # jsonschema - # pluggy - # pytest iniconfig==1.1.1 # via pytest isort==5.8.0 @@ -200,15 +187,10 @@ toml==0.10.2 # via # pylint # pytest - # pytest-cov -typed-ast==1.4.3 - # via astroid typing-extensions==3.10.0.0 # via # -c requirements/_base.txt # aiohttp - # importlib-metadata - # yarl urllib3==1.25.11 # via # -c requirements/_base.txt @@ -226,10 +208,6 @@ yarl==1.6.3 # via # -c requirements/_base.txt # aiohttp -zipp==3.4.1 - # via - # -c requirements/_base.txt - # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/storage/requirements/_tools.txt b/services/storage/requirements/_tools.txt index de137d08748..23108d82153 100644 --- a/services/storage/requirements/_tools.txt +++ b/services/storage/requirements/_tools.txt @@ -21,27 +21,12 @@ click==7.1.2 # -c requirements/_base.txt # black # pip-tools -dataclasses==0.8 - # via - # -c requirements/_base.txt - # black distlib==0.3.2 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.9 # via pre-commit -importlib-metadata==4.0.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.4 - # via - # pre-commit - # virtualenv isort==5.8.0 # via # -c requirements/_test.txt @@ -78,27 +63,10 @@ toml==0.10.2 # black # pep517 # pre-commit -typed-ast==1.4.3 - # via - # -c requirements/_test.txt - # black -typing-extensions==3.10.0.0 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black - # importlib-metadata virtualenv==20.4.7 # via pre-commit watchdog[watchmedo]==2.1.2 # via -r requirements/_tools.in -zipp==3.4.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/services/storage/setup.py b/services/storage/setup.py index 13f1a0130ba..f04a6719348 100644 --- a/services/storage/setup.py +++ b/services/storage/setup.py @@ -6,9 +6,9 @@ here = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -if sys.version_info < (3, 6): +if not (sys.version_info.major == 3 and sys.version_info.minor == 8): raise RuntimeError( - "Requires 3.6, got %s. Did you forget to activate virtualenv?" + "Requires ~=3.8, got %s. Did you forget to activate virtualenv?" % sys.version_info ) @@ -33,7 +33,7 @@ def read_reqs(reqs_path: Path): version="0.2.1", description="Service to manage data storage in simcore", author="Manuel Guidon (mguidon)", - python_requires=">3.6, <3.7", + python_requires="~=3.8", packages=find_packages(where="src"), package_dir={"": "src"}, include_package_data=True, diff --git a/services/storage/src/simcore_service_storage/dsm.py b/services/storage/src/simcore_service_storage/dsm.py index bdb75931e9a..e296fca6123 100644 --- a/services/storage/src/simcore_service_storage/dsm.py +++ b/services/storage/src/simcore_service_storage/dsm.py @@ -570,8 +570,10 @@ async def download_link_s3(self, file_uuid: str, user_id: int) -> str: link = self.s3_client.create_presigned_get_url(bucket_name, object_name) return link - async def download_link_datcore(self, user_id: str, file_id: str) -> Dict[str, str]: - link = "" + async def download_link_datcore( + self, user_id: str, file_id: str + ) -> Tuple[str, str]: + link, filename = "", "" api_token, api_secret = self._get_datcore_tokens(user_id) dcw = DatcoreWrapper(api_token, api_secret, self.loop, self.pool) link, filename = await dcw.download_link_by_id(file_id) diff --git a/services/storage/tests/test_rest.py b/services/storage/tests/test_rest.py index 93491170a13..1814261b65b 100644 --- a/services/storage/tests/test_rest.py +++ b/services/storage/tests/test_rest.py @@ -5,7 +5,6 @@ import json import os import sys -from asyncio import Future from pathlib import Path from typing import Any, Dict from urllib.parse import quote @@ -313,9 +312,10 @@ async def _fake_download_to_file_or_raise(session, url, dest_path): assert dsm assert isinstance(dsm, DataStorageManager) - mock = mocker.patch.object(dsm, "download_link_datcore") - mock.return_value = Future() - mock.return_value.set_result(("https://httpbin.org/image", "foo.txt")) + async def mock_download_link_datcore(*args, **kwargs): + return ["https://httpbin.org/image", "foo.txt"] + + mocker.patch.object(dsm, "download_link_datcore", mock_download_link_datcore) @pytest.fixture @@ -325,7 +325,6 @@ def mock_get_project_access_rights(mocker) -> None: mock = mocker.patch( f"simcore_service_storage.{module}.get_project_access_rights" ) - mock.return_value = Future() mock.return_value.set_result(AccessRights.all()) diff --git a/services/web/Dockerfile b/services/web/Dockerfile index 3a357690492..a6116d0b97a 100644 --- a/services/web/Dockerfile +++ b/services/web/Dockerfile @@ -1,4 +1,4 @@ -ARG PYTHON_VERSION="3.6.10" +ARG PYTHON_VERSION="3.8.10" FROM python:${PYTHON_VERSION}-slim-buster as base # # USAGE: diff --git a/services/web/server/requirements/_base.in b/services/web/server/requirements/_base.in index 20324f899ea..b814c076274 100644 --- a/services/web/server/requirements/_base.in +++ b/services/web/server/requirements/_base.in @@ -62,6 +62,6 @@ expiringdict semantic_version # import/export excel -parfive +parfive==1.0.2 ## See note in simcore_service_webserver/exporter/file_downloader.py openpyxl python-magic diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt index 723379899a7..8aeeef9687a 100644 --- a/services/web/server/requirements/_base.txt +++ b/services/web/server/requirements/_base.txt @@ -67,8 +67,6 @@ attrs==20.3.0 # openapi-core billiard==3.6.4.0 # via celery -cached-property==1.5.2 - # via kombu celery[redis]==5.1.0 # via -r requirements/_base.in cffi==1.14.5 @@ -97,10 +95,6 @@ cryptography==3.4.7 # -c requirements/../../../../requirements/constraints.txt # -r requirements/_base.in # aiohttp-session -dataclasses==0.8 - # via - # pydantic - # werkzeug dnspython==2.1.0 # via email-validator email-validator==1.1.3 @@ -111,20 +105,13 @@ expiringdict==1.2.1 # via -r requirements/_base.in hiredis==2.0.0 # via aioredis -idna-ssl==1.1.0 - # via aiohttp idna==2.10 # via # -r requirements/../../../../packages/models-library/requirements/_base.in # -r requirements/../../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../../packages/service-library/requirements/_base.in # email-validator - # idna-ssl # yarl -importlib-metadata==4.5.0 - # via - # jsonschema - # kombu isodate==0.6.0 # via # openapi-core @@ -257,10 +244,7 @@ typing-extensions==3.10.0.0 # via # aiohttp # aiohttp-jinja2 - # asyncpg - # importlib-metadata # pydantic - # yarl ujson==4.0.2 # via # -r requirements/../../../../packages/service-library/requirements/_base.in @@ -281,8 +265,6 @@ yarl==1.5.1 # aio-pika # aiohttp # aiormq -zipp==3.4.1 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt index f45fc088eaa..0d75a9b4074 100644 --- a/services/web/server/requirements/_test.txt +++ b/services/web/server/requirements/_test.txt @@ -36,11 +36,6 @@ billiard==3.6.4.0 # via # -c requirements/_base.txt # celery -cached-property==1.5.2 - # via - # -c requirements/_base.txt - # docker-compose - # kombu celery[redis]==5.1.0 # via # -c requirements/_base.txt @@ -113,23 +108,11 @@ faker==8.8.1 # via -r requirements/_test.in icdiff==1.9.1 # via pytest-icdiff -idna-ssl==1.1.0 - # via - # -c requirements/_base.txt - # aiohttp idna==2.10 # via # -c requirements/_base.txt - # idna-ssl # requests # yarl -importlib-metadata==4.5.0 - # via - # -c requirements/_base.txt - # jsonschema - # kombu - # pluggy - # pytest iniconfig==1.1.1 # via pytest isodate==0.6.0 @@ -303,14 +286,10 @@ toml==0.10.2 # pylint # pytest # pytest-cov -typed-ast==1.4.3 - # via astroid typing-extensions==3.10.0.0 # via # -c requirements/_base.txt # aiohttp - # importlib-metadata - # yarl urllib3==1.26.5 # via # -c requirements/../../../../requirements/constraints.txt @@ -337,10 +316,6 @@ yarl==1.5.1 # via # -c requirements/_base.txt # aiohttp -zipp==3.4.1 - # via - # -c requirements/_base.txt - # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/web/server/requirements/_tools.txt b/services/web/server/requirements/_tools.txt index 76250038b1b..efc7fa59aa9 100644 --- a/services/web/server/requirements/_tools.txt +++ b/services/web/server/requirements/_tools.txt @@ -22,27 +22,12 @@ click==7.1.2 # -c requirements/_test.txt # black # pip-tools -dataclasses==0.8 - # via - # -c requirements/_base.txt - # black distlib==0.3.2 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.10 # via pre-commit -importlib-metadata==4.5.0 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.4 - # via - # pre-commit - # virtualenv isort==4.3.21 # via # -c requirements/_test.txt @@ -79,27 +64,10 @@ toml==0.10.2 # black # pep517 # pre-commit -typed-ast==1.4.3 - # via - # -c requirements/_test.txt - # black -typing-extensions==3.10.0.0 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black - # importlib-metadata virtualenv==20.4.7 # via pre-commit watchdog[watchmedo]==2.1.2 # via -r requirements/_tools.in -zipp==3.4.1 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/services/web/server/setup.py b/services/web/server/setup.py index 1096d8d421b..3314254b7ef 100644 --- a/services/web/server/setup.py +++ b/services/web/server/setup.py @@ -45,7 +45,7 @@ def read_reqs(reqs_path: Path): "simcore-service-webserver=simcore_service_webserver.__main__:main", ] }, - python_requires=">=3.6", + python_requires="~=3.8", install_requires=install_requirements, tests_require=test_requirements, setup_requires=["pytest-runner"], diff --git a/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py b/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py index 87dc52d4918..1e31a18c2f9 100644 --- a/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py +++ b/services/web/server/src/simcore_service_webserver/computation_comp_tasks_listening_task.py @@ -193,7 +193,7 @@ async def comp_tasks_listening_task(app: web.Application) -> None: async def setup_comp_tasks_listening_task(app: web.Application): - task = asyncio.get_event_loop().create_task(comp_tasks_listening_task(app)) + task = asyncio.create_task(comp_tasks_listening_task(app)) yield task.cancel() await task diff --git a/services/web/server/src/simcore_service_webserver/db.py b/services/web/server/src/simcore_service_webserver/db.py index d15a0d6bee3..14ef9f1cda5 100644 --- a/services/web/server/src/simcore_service_webserver/db.py +++ b/services/web/server/src/simcore_service_webserver/db.py @@ -3,7 +3,7 @@ """ import logging -from typing import Any, Dict, Optional +from typing import Any, Dict, Iterator, Optional from aiohttp import web from aiopg.sa import Engine @@ -38,7 +38,7 @@ async def pg_engine(app: web.Application): password=pg_cfg["password"], host=pg_cfg["host"], port=pg_cfg["port"], - ) + ) # type: ignore log.info("Creating pg engine for %s", dsn) async for attempt in AsyncRetrying( @@ -84,7 +84,7 @@ async def _create_pg_engine( return engine # type: ignore # tenacity rules guarantee exit with exc -async def pg_engines(app: web.Application) -> None: +async def pg_engines(app: web.Application) -> Iterator[None]: cfg = app[APP_CONFIG_KEY][CONFIG_SECTION_NAME] pg_cfg = cfg["postgres"] @@ -147,7 +147,6 @@ def setup(app: web.Application): app[APP_DB_ENGINE_KEY] = None # async connection to db - # app.on_startup.append(_init_db) # TODO: review how is this disposed app.cleanup_ctx.append(pg_engines) diff --git a/services/web/server/src/simcore_service_webserver/diagnostics_handlers.py b/services/web/server/src/simcore_service_webserver/diagnostics_handlers.py index 6596ff4bf24..7558939756c 100644 --- a/services/web/server/src/simcore_service_webserver/diagnostics_handlers.py +++ b/services/web/server/src/simcore_service_webserver/diagnostics_handlers.py @@ -48,7 +48,7 @@ async def get_app_diagnostics(request: web.Request): /v0/status/diagnostics?top_tracemalloc=10 with display top 10 files allocating the most memory """ # tasks in loop - data = {"loop_tasks": [get_task_info(task) for task in asyncio.Task.all_tasks()]} + data = {"loop_tasks": [get_task_info(task) for task in asyncio.all_tasks()]} # allocated memory if request.query.get("top_tracemalloc", False): diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index 6218151613e..bfabf8e7820 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -8,6 +8,7 @@ """ # pylint: disable=too-many-arguments +import contextlib import json import logging from collections import defaultdict @@ -195,33 +196,32 @@ async def retrieve_and_notify_project_locked_state( await notify_project_state_update(app, project) -# TODO: Once python 3.8 is in use this -# @contextlib.asynccontextmanager -# async def lock_with_notification( -# app: web.Application, -# project_uuid: str, -# status: ProjectStatus, -# user_id: int, -# user_name: Dict[str, str], -# notify_users: bool = True, -# ): -# try: -# async with await lock_project( -# app, -# project_uuid, -# status, -# user_id, -# user_name, -# ): -# if notify_users: -# await retrieve_and_notify_project_locked_state( -# user_id, project_uuid, app -# ) -# yield - -# finally: -# if notify_users: -# await retrieve_and_notify_project_locked_state(user_id, project_uuid, app) +@contextlib.asynccontextmanager +async def lock_with_notification( + app: web.Application, + project_uuid: str, + status: ProjectStatus, + user_id: int, + user_name: Dict[str, str], + notify_users: bool = True, +): + try: + async with await lock_project( + app, + project_uuid, + status, + user_id, + user_name, + ): + if notify_users: + await retrieve_and_notify_project_locked_state( + user_id, project_uuid, app + ) + yield + + finally: + if notify_users: + await retrieve_and_notify_project_locked_state(user_id, project_uuid, app) async def remove_project_interactive_services( diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_db.py b/services/web/server/src/simcore_service_webserver/projects/projects_db.py index 8962bf7f974..e6dab821184 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_db.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_db.py @@ -189,12 +189,6 @@ def __init__(self, app: web.Application): self._app = app self._engine = app.get(APP_DB_ENGINE_KEY) - @classmethod - def init_from_engine(cls, engine: Engine): - db_api = ProjectDBAPI({}) - db_api._engine = engine # pylint: disable=protected-access - return db_api - def _init_engine(self): # Delays creation of engine because it setup_db does it on_startup self._engine = self._app.get(APP_DB_ENGINE_KEY) diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py b/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py index dc61e89834a..c8a6ca27265 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/garbage_collector.py @@ -1,6 +1,5 @@ import asyncio import logging -from contextlib import suppress from itertools import chain from typing import Any, Dict, List, Optional, Set, Tuple @@ -40,22 +39,48 @@ logger = logging.getLogger(__name__) database_errors = (psycopg2.DatabaseError, asyncpg.exceptions.PostgresError) +TASK_NAME = f"{__name__}.collect_garbage_periodically" +TASK_CONFIG = f"{TASK_NAME}.config" + def setup_garbage_collector(app: web.Application): async def _setup_background_task(app: web.Application): - # on_startup + # SETUP ------ # create a background task to collect garbage periodically - loop = asyncio.get_event_loop() - cgp_task = loop.create_task(collect_garbage_periodically(app)) + assert not any( # nosec + t.get_name() == TASK_NAME for t in asyncio.all_tasks() + ), "Garbage collector task already running. ONLY ONE expected" # nosec + + gc_bg_task = asyncio.create_task( + collect_garbage_periodically(app), name=TASK_NAME + ) + + # FIXME: added this config to overcome the state in which the + # task cancelation is ignored and the exceptions enter in a loop + # that never stops the background task. This flag is an additional + # mechanism to enforce stopping the background task + # + # Implemented with a mutable dict to avoid + # DeprecationWarning: Changing state of started or joined application is deprecated + # + app[TASK_CONFIG] = {"force_stop": False, "name": TASK_NAME} yield - # on_cleanup - # controlled cancelation of the gc tas - with suppress(asyncio.CancelledError): + # TEAR-DOWN ----- + # controlled cancelation of the gc task + try: logger.info("Stopping garbage collector...") - cgp_task.cancel() - await cgp_task + + ack = gc_bg_task.cancel() + assert ack # nosec + + app[TASK_CONFIG]["force_stop"] = True + + await gc_bg_task + + except asyncio.CancelledError: + assert gc_bg_task.cancelled() # nosec app.cleanup_ctx.append(_setup_background_task) @@ -68,6 +93,10 @@ async def collect_garbage_periodically(app: web.Application): interval = get_garbage_collector_interval(app) while True: await collect_garbage(app) + + if app[TASK_CONFIG].get("force_stop", False): + raise Exception("Forced to stop garbage collection") + await asyncio.sleep(interval) except asyncio.CancelledError: @@ -80,6 +109,11 @@ async def collect_garbage_periodically(app: web.Application): "There was an error during garbage collection, restarting...", exc_info=True, ) + + if app[TASK_CONFIG].get("force_stop", False): + logger.warning("Forced to stop garbage collection") + break + # will wait 5 seconds to recover before restarting to avoid restart loops # - it might be that db/redis is down, etc # @@ -448,7 +482,6 @@ async def remove_all_projects_for_user(app: web.Application, user_id: int) -> No user_id, ) return - user_primary_gid = int(project_owner["primary_gid"]) # fetch all projects for the user diff --git a/services/web/server/src/simcore_service_webserver/security_access_model.py b/services/web/server/src/simcore_service_webserver/security_access_model.py index fc0b4182a7b..84759a10a01 100644 --- a/services/web/server/src/simcore_service_webserver/security_access_model.py +++ b/services/web/server/src/simcore_service_webserver/security_access_model.py @@ -42,7 +42,7 @@ def from_rawdata(cls, role: Union[str, UserRole], value: Dict) -> "RolePermissio allowed = set() check = dict() for item in value.get("can", list()): - if isinstance(item, Dict): + if isinstance(item, dict): check[item["name"]] = item["check"] elif isinstance(item, str): allowed.add(item) diff --git a/services/web/server/src/simcore_service_webserver/socketio/handlers.py b/services/web/server/src/simcore_service_webserver/socketio/handlers.py index a44efaee1a2..c19bcc8c854 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/handlers.py +++ b/services/web/server/src/simcore_service_webserver/socketio/handlers.py @@ -11,10 +11,9 @@ from typing import Any, Dict, List, Optional from aiohttp import web -from socketio.exceptions import ConnectionRefusedError as SocketIOConnectionError - from servicelib.observer import observe from servicelib.utils import fire_and_forget_task, logged_gather +from socketio.exceptions import ConnectionRefusedError as SocketIOConnectionError from ..groups_api import list_user_groups from ..login.decorators import RQT_USERID_KEY, login_required @@ -68,8 +67,7 @@ async def connect(sid: str, environ: Dict, app: web.Application) -> bool: async def authenticate_user( sid: str, app: web.Application, request: web.Request ) -> None: - """throws web.HTTPUnauthorized when the user is not recognized. Keeps the original request. - """ + """throws web.HTTPUnauthorized when the user is not recognized. Keeps the original request.""" user_id = request.get(RQT_USERID_KEY, ANONYMOUS_USER_ID) log.debug("client %s authenticated", user_id) client_session_id = request.query.get("client_session_id", None) @@ -116,7 +114,7 @@ async def disconnect_other_sockets(sio, sockets: List[str]) -> None: @observe(event="SIGNAL_USER_LOGOUT") -async def user_logged_out( +async def on_user_logout( user_id: str, client_session_id: Optional[str], app: web.Application ) -> None: log.debug("user %s must be disconnected", user_id) diff --git a/services/web/server/src/simcore_service_webserver/users_api.py b/services/web/server/src/simcore_service_webserver/users_api.py index 104399eb555..8f4afab00cf 100644 --- a/services/web/server/src/simcore_service_webserver/users_api.py +++ b/services/web/server/src/simcore_service_webserver/users_api.py @@ -10,6 +10,7 @@ import sqlalchemy as sa from aiohttp import web +from aiopg.sa.engine import Engine from aiopg.sa.result import RowProxy from servicelib.application_keys import APP_DB_ENGINE_KEY from simcore_postgres_database.models.users import UserRole @@ -30,7 +31,7 @@ async def get_user_profile(app: web.Application, user_id: int) -> Dict[str, Any]: - engine = app[APP_DB_ENGINE_KEY] + engine: Engine = app[APP_DB_ENGINE_KEY] user_profile: Dict[str, Any] = {} user_primary_group = all_group = {} user_standard_groups = [] diff --git a/services/web/server/src/simcore_service_webserver/utils.py b/services/web/server/src/simcore_service_webserver/utils.py index ef3f336a8f8..87bc22d2826 100644 --- a/services/web/server/src/simcore_service_webserver/utils.py +++ b/services/web/server/src/simcore_service_webserver/utils.py @@ -27,9 +27,9 @@ def is_osparc_repo_dir(path: Path) -> bool: def search_osparc_repo_dir(max_iter=8): - """ Returns path to root repo dir or None + """Returns path to root repo dir or None - NOTE: assumes this file within repo, i.e. only happens in edit mode! + NOTE: assumes this file within repo, i.e. only happens in edit mode! """ root_dir = current_dir if "services/web/server" in str(root_dir): @@ -44,7 +44,8 @@ def search_osparc_repo_dir(max_iter=8): def as_list(obj) -> List: - if isinstance(obj, Iterable): + # TODO: disabled because of bug in https://github.com/PyCQA/pylint/issues/3507 + if isinstance(obj, Iterable): # pylint: disable=typecheck return list(obj) return [ obj, @@ -61,7 +62,7 @@ def gravatar_url(gravatarhash, size=100, default="identicon", rating="g") -> URL def generate_password(length: int = 8, more_secure: bool = False) -> str: - """ generate random passord + """generate random passord :param length: password length, defaults to 8 :type length: int, optional @@ -134,8 +135,7 @@ def format_datetime(snapshot: datetime) -> str: def now_str() -> str: - """ Returns formatted time snapshot in UTC - """ + """Returns formatted time snapshot in UTC""" return format_datetime(now()) @@ -203,12 +203,12 @@ def compose_error_msg(msg: str) -> str: return f"{msg}. Please send this message to support@osparc.io [{now_str()}]" - # ----------------------------------------------- # # FORMATTING # + def snake_to_camel(subject: str) -> str: parts = subject.lower().split("_") return parts[0] + "".join(x.title() for x in parts[1:]) diff --git a/services/web/server/tests/integration/01/test_exporter.py b/services/web/server/tests/integration/01/test_exporter.py index 3a3b9cd7c45..7cadc5ec43a 100644 --- a/services/web/server/tests/integration/01/test_exporter.py +++ b/services/web/server/tests/integration/01/test_exporter.py @@ -194,39 +194,6 @@ def get_exported_projects() -> List[Path]: return exported_files -@pytest.fixture -async def monkey_patch_asyncio_subprocess(mocker): - # TODO: The below bug is not allowing me to fully test, - # mocking and waiting for an update - # https://bugs.python.org/issue35621 - # this issue was patched in 3.8, no need - if sys.version_info.major == 3 and sys.version_info.minor >= 8: - raise RuntimeError( - "Issue no longer present in this version of python, " - "please remote this mock on python >= 3.8" - ) - - import subprocess - - async def create_subprocess_exec(*command, **extra_params): - class MockResponse: - def __init__(self, command, **kwargs): - self.proc = subprocess.Popen(command, **extra_params) - - async def communicate(self): - return self.proc.communicate() - - @property - def returncode(self): - return self.proc.returncode - - mock_response = MockResponse(command, **extra_params) - - return mock_response - - mocker.patch("asyncio.create_subprocess_exec", side_effect=create_subprocess_exec) - - @pytest.fixture async def apply_access_rights(aiopg_engine: aiopg.sa.Engine) -> Coroutine: async def grant_rights_to_services(services: List[Tuple[str, str]]) -> None: @@ -520,7 +487,6 @@ async def test_import_export_import_duplicate( aiopg_engine, redis_client, export_version, - monkey_patch_asyncio_subprocess, simcore_services, monkey_patch_aiohttp_request_url, grant_access_rights, diff --git a/services/web/server/tests/integration/01/test_project_workflow.py b/services/web/server/tests/integration/01/test_project_workflow.py index 79d01981a97..b7685251ef2 100644 --- a/services/web/server/tests/integration/01/test_project_workflow.py +++ b/services/web/server/tests/integration/01/test_project_workflow.py @@ -1,13 +1,15 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + """ TODO: move to system testing: shall test different workflows on framework studies (=project) e.g. run, pull, push ,... pipelines This one here is too similar to unit/with_postgres/test_projects.py """ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name + +import asyncio import json -from asyncio import Future, Task, wait_for from copy import deepcopy from pathlib import Path from pprint import pprint @@ -114,9 +116,8 @@ async def _mock_copy_data_from_project(*args): # mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.delete_data_folders_of_project', return_value=None) mock1 = mocker.patch( "simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project", - return_value=Future(), + return_value="", ) - mock1.return_value.set_result("") return mock, mock1 @@ -267,13 +268,13 @@ async def test_workflow( await _request_delete(client, pid) # wait for delete tasks to finish - tasks = Task.all_tasks() + tasks = asyncio.all_tasks() for task in tasks: # TODO: 'async_generator_asend' has no __name__ attr. Python 3.8 gets coros names # Expects "delete_project" coros to have __name__ attrs # pylint: disable=protected-access - if "delete_project" in getattr(task._coro, "__name__", ""): - await wait_for(task, timeout=60.0) + if "delete_project" in getattr(task.get_coro(), "__name__", ""): + await asyncio.wait_for(task, timeout=60.0) # list empty projects = await _request_list(client) diff --git a/services/web/server/tests/integration/02/test_computation.py b/services/web/server/tests/integration/02/test_computation.py index d3ed258e657..5e1f0c0de75 100644 --- a/services/web/server/tests/integration/02/test_computation.py +++ b/services/web/server/tests/integration/02/test_computation.py @@ -11,7 +11,6 @@ import sqlalchemy as sa from _helpers import ExpectedResponse, standard_role_response from aiohttp import web -from aiohttp.web_exceptions import HTTPCreated from models_library.settings.rabbit import RabbitConfig from models_library.settings.redis import RedisConfig from pytest_simcore.helpers.utils_assert import assert_status diff --git a/services/web/server/tests/integration/conftest.py b/services/web/server/tests/integration/conftest.py index fdd3d660fa6..a9380f403b0 100644 --- a/services/web/server/tests/integration/conftest.py +++ b/services/web/server/tests/integration/conftest.py @@ -14,7 +14,6 @@ import logging import sys -from asyncio import Future from copy import deepcopy from pathlib import Path from pprint import pprint @@ -171,9 +170,8 @@ def app_config(_webserver_dev_config: Dict, aiohttp_unused_port) -> Dict: def mock_orphaned_services(mocker): remove_orphaned_services = mocker.patch( "simcore_service_webserver.resource_manager.garbage_collector.remove_orphaned_services", - return_value=Future(), + return_value="", ) - remove_orphaned_services.return_value.set_result("") return remove_orphaned_services diff --git a/services/web/server/tests/unit/conftest.py b/services/web/server/tests/unit/conftest.py index c0c53c5b54d..95a664f1a7f 100644 --- a/services/web/server/tests/unit/conftest.py +++ b/services/web/server/tests/unit/conftest.py @@ -11,7 +11,6 @@ import json import logging import sys -from asyncio import Future from pathlib import Path from typing import Any, Callable, Dict, Iterable @@ -27,6 +26,7 @@ pytest_plugins = [ "pytest_simcore.environment_configs", + "pytest_simcore.monkeypatch_extra", "pytest_simcore.pydantic_models", "pytest_simcore.repository_paths", "pytest_simcore.schemas", @@ -80,9 +80,8 @@ def test_tags_data(fake_data_dir: Path) -> Iterable[Dict[str, Any]]: def mock_orphaned_services(mocker): remove_orphaned_services = mocker.patch( "simcore_service_webserver.resource_manager.garbage_collector.remove_orphaned_services", - return_value=Future(), + return_value="", ) - remove_orphaned_services.return_value.set_result("") return remove_orphaned_services @@ -91,5 +90,5 @@ def disable_gc_manual_guest_users(mocker): """Disable to avoid an almost instant cleanup of GUEST users with their projects""" mocker.patch( "simcore_service_webserver.resource_manager.garbage_collector.remove_users_manually_marked_as_guests", - return_value=Future(), + return_value=None, ) diff --git a/services/web/server/tests/unit/isolated/test_activity.py b/services/web/server/tests/unit/isolated/test_activity.py index b7759b4811e..e06b33c1cb7 100644 --- a/services/web/server/tests/unit/isolated/test_activity.py +++ b/services/web/server/tests/unit/isolated/test_activity.py @@ -11,7 +11,6 @@ from aiohttp.client_exceptions import ClientConnectionError from celery import Celery from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_mock import future_with_result from servicelib.application import create_safe_application from simcore_service_webserver.activity import handlers, setup_activity from simcore_service_webserver.computation_config import ComputationSettings @@ -43,26 +42,26 @@ def mocked_monitoring(loop, mocker, activity_data): cpu_ret = prometheus_data.get("cpu_return") mocker.patch( "simcore_service_webserver.activity.handlers.get_cpu_usage", - return_value=future_with_result(cpu_ret), + return_value=cpu_ret, ) mem_ret = prometheus_data.get("memory_return") mocker.patch( "simcore_service_webserver.activity.handlers.get_memory_usage", - return_value=future_with_result(mem_ret), + return_value=mem_ret, ) labels_ret = prometheus_data.get("labels_return") mocker.patch( "simcore_service_webserver.activity.handlers.get_container_metric_for_labels", - return_value=future_with_result(labels_ret), + return_value=labels_ret, ) celery_data = activity_data.get("celery") celery_ret = celery_data.get("celery_return") mocker.patch( "simcore_service_webserver.activity.handlers.get_celery_reserved", - return_value=future_with_result(celery_ret), + return_value=celery_ret, ) diff --git a/services/web/server/tests/unit/isolated/test_catalog_setup.py b/services/web/server/tests/unit/isolated/test_catalog_setup.py index 96367adda0e..58d24918fc5 100644 --- a/services/web/server/tests/unit/isolated/test_catalog_setup.py +++ b/services/web/server/tests/unit/isolated/test_catalog_setup.py @@ -2,7 +2,6 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -from asyncio import Future import pytest from servicelib.application import create_safe_application @@ -43,10 +42,7 @@ async def __aenter__(self): # Mocks aiohttp.ClientResponse # https://docs.aiohttp.org/en/stable/client_reference.html#aiohttp.ClientResponse resp = mocker.Mock() - - f = Future() - f.set_result({}) - resp.json.return_value = f + resp.json.return_value = {} resp.status = 200 return resp diff --git a/services/web/server/tests/unit/isolated/test_exporter_archiving.py b/services/web/server/tests/unit/isolated/test_exporter_archiving.py index 9161a6b5ce1..94ad843d1a4 100644 --- a/services/web/server/tests/unit/isolated/test_exporter_archiving.py +++ b/services/web/server/tests/unit/isolated/test_exporter_archiving.py @@ -1,18 +1,16 @@ # pylint:disable=redefined-outer-name,unused-argument +import asyncio +import hashlib import os -import sys +import random +import secrets +import string import tempfile import uuid -import hashlib -import random -from pathlib import Path -import asyncio -from typing import Set, List, Dict, Iterator from concurrent.futures import ProcessPoolExecutor -import string -import secrets - +from pathlib import Path +from typing import Dict, Iterator, List, Set, Tuple import pytest from simcore_service_webserver.exporter.archiving import ( @@ -24,47 +22,14 @@ from simcore_service_webserver.exporter.exceptions import ExporterException -@pytest.fixture -async def monkey_patch_asyncio_subporcess(loop, mocker): - # TODO: The below bug is not allowing me to fully test, - # mocking and waiting for an update - # https://bugs.python.org/issue35621 - # this issue was patched in 3.8, no need - if sys.version_info.major == 3 and sys.version_info.minor >= 8: - raise RuntimeError( - "Issue no longer present in this version of python, " - "please remote this mock on python >= 3.8" - ) - - import subprocess - - async def create_subprocess_exec(*command, **extra_params): - class MockResponse: - def __init__(self, command, **kwargs): - self.proc = subprocess.Popen(command, **extra_params) - - async def communicate(self): - return self.proc.communicate() - - @property - def returncode(self): - return self.proc.returncode - - mock_response = MockResponse(command, **extra_params) - - return mock_response - - mocker.patch("asyncio.create_subprocess_exec", side_effect=create_subprocess_exec) - - @pytest.fixture def temp_dir(tmpdir) -> Path: # cast to Path object - yield Path(tmpdir) + return Path(tmpdir) @pytest.fixture -def temp_dir2() -> Path: +def temp_dir2() -> Iterator[Path]: with tempfile.TemporaryDirectory() as temp_dir: temp_dir_path = Path(temp_dir) extract_dir_path = temp_dir_path / "extract_dir" @@ -73,7 +38,7 @@ def temp_dir2() -> Path: @pytest.fixture -def temp_file() -> Path: +def temp_file() -> Iterator[Path]: file_path = Path("/") / f"tmp/{next(tempfile._get_candidate_names())}" file_path.write_text("test_data") yield file_path @@ -85,6 +50,62 @@ def project_uuid(): return str(uuid.uuid4()) +@pytest.fixture +def dir_with_random_content() -> Iterator[Path]: + def random_string(length: int) -> str: + return "".join(secrets.choice(string.ascii_letters) for i in range(length)) + + def make_files_in_dir(dir_path: Path, file_count: int) -> None: + for _ in range(file_count): + (dir_path / f"{random_string(8)}.bin").write_bytes( + os.urandom(random.randint(1, 10)) + ) + + def ensure_dir(path_to_ensure: Path) -> Path: + path_to_ensure.mkdir(parents=True, exist_ok=True) + return path_to_ensure + + def make_subdirectory_with_content(subdir_name: Path, max_file_count: int) -> None: + subdir_name = ensure_dir(subdir_name) + make_files_in_dir( + dir_path=subdir_name, + file_count=random.randint(1, max_file_count), + ) + + def make_subdirectories_with_content( + subdir_name: Path, max_subdirectories_count: int, max_file_count: int + ) -> None: + subdirectories_count = random.randint(1, max_subdirectories_count) + for _ in range(subdirectories_count): + make_subdirectory_with_content( + subdir_name=subdir_name / f"{random_string(4)}", + max_file_count=max_file_count, + ) + + def get_dirs_and_subdris_in_path(path_to_scan: Path) -> List[Path]: + return [path for path in path_to_scan.rglob("*") if path.is_dir()] + + with tempfile.TemporaryDirectory() as temp_dir: + temp_dir_path = Path(temp_dir) + data_container = ensure_dir(temp_dir_path / "study_data") + + make_subdirectories_with_content( + subdir_name=data_container, max_subdirectories_count=5, max_file_count=5 + ) + make_files_in_dir(dir_path=data_container, file_count=5) + + # creates a good amount of files + for _ in range(4): + for subdirectory_path in get_dirs_and_subdris_in_path(data_container): + make_subdirectories_with_content( + subdir_name=subdirectory_path, + max_subdirectories_count=3, + max_file_count=3, + ) + + yield temp_dir_path + + def temp_dir_with_existing_archive(temp_dir, project_uui) -> Path: nested_dir = temp_dir / "nested" nested_dir.mkdir(parents=True, exist_ok=True) @@ -116,8 +137,8 @@ def temp_dir_to_compress_with_too_many_targets(temp_dir, project_uuid) -> Path: def strip_directory_from_path(input_path: Path, to_strip: Path) -> Path: - to_strip = f"{str(to_strip)}/" - return Path(str(input_path).replace(to_strip, "")) + _to_strip = f"{str(to_strip)}/" + return Path(str(input_path).replace(_to_strip, "")) def get_all_files_in_dir(dir_path: Path) -> Set[Path]: @@ -128,7 +149,7 @@ def get_all_files_in_dir(dir_path: Path) -> Set[Path]: } -def _compute_hash(file_path: Path) -> str: +def _compute_hash(file_path: Path) -> Tuple[Path, str]: with open(file_path, "rb") as file_to_hash: file_hash = hashlib.md5() chunk = file_to_hash.read(8192) @@ -189,9 +210,6 @@ async def assert_same_directory_content( assert dir_to_compress_hashes[key] == output_dir_hashes[key] -# end utils - - def test_validate_osparc_file_name_ok(): algorithm, digest_sum = validate_osparc_import_name( "v1#SHA256=80e69a0973e15f4a9c3c180d00a39ee0b0dfafe43356f867983e1180e9b5a892.osparc" @@ -270,9 +288,7 @@ async def test_archive_already_exists(loop, temp_dir, project_uuid): ) -async def test_unzip_found_too_many_project_targets( - loop, temp_dir, project_uuid, monkey_patch_asyncio_subporcess -): +async def test_unzip_found_too_many_project_targets(loop, temp_dir, project_uuid): tmp_dir_to_compress = temp_dir_to_compress_with_too_many_targets( temp_dir, project_uuid ) diff --git a/services/web/server/tests/unit/isolated/test_projects_models.py b/services/web/server/tests/unit/isolated/test_projects_models.py index d89006d8e45..90057838476 100644 --- a/services/web/server/tests/unit/isolated/test_projects_models.py +++ b/services/web/server/tests/unit/isolated/test_projects_models.py @@ -3,10 +3,10 @@ # pylint:disable=redefined-outer-name import datetime -from asyncio import Future -from unittest.mock import MagicMock import pytest +from aiohttp import web +from simcore_service_webserver.constants import APP_DB_ENGINE_KEY from simcore_service_webserver.projects.projects_db import ( ProjectDBAPI, _convert_to_db_names, @@ -57,52 +57,27 @@ def test_convert_to_schema_names(fake_db_dict): @pytest.fixture -def user_id(): - return -1 +def mock_pg_engine(mocker): + connection = mocker.AsyncMock(name="Connection") + mc = mocker.Mock(name="ManagedConnection") + mc.__aenter__ = mocker.AsyncMock(name="Enter", return_value=connection) + mc.__aexit__ = mocker.AsyncMock(name="Exit", return_value=False) -class MockAsyncContextManager(MagicMock): - mock_object = None + engine = mocker.Mock(name="Engine") + engine.acquire.return_value = mc + return engine, connection - async def __aenter__(self): - return self.mock_object - async def __aexit__(self, *args): - pass +async def test_add_projects(fake_project, mock_pg_engine): + engine, connection = mock_pg_engine + app = web.Application() + app[APP_DB_ENGINE_KEY] = engine -@pytest.fixture -def mock_db_engine(mocker): - def create_engine(mock_result): - mock_connection = mocker.patch("aiopg.sa.SAConnection", spec=True) - mock_connection.execute.return_value = Future() - mock_connection.execute.return_value.set_result(mock_result) - mock_connection.scalar.return_value = Future() - mock_connection.scalar.return_value.set_result(mock_result) - - mock_context_manager = MockAsyncContextManager() - mock_context_manager.mock_object = mock_connection - - mock_db_engine = mocker.patch("aiopg.sa.engine.Engine", spec=True) - mock_db_engine.acquire.return_value = mock_context_manager - return mock_db_engine, mock_connection - - yield create_engine - - -async def test_add_projects(fake_project, user_id, mocker, mock_db_engine): - - mock_result_row = mocker.patch("aiopg.sa.result.RowProxy", spec=True) - - mock_result = mocker.patch("aiopg.sa.result.ResultProxy", spec=True) - mock_result.first.return_value = Future() - mock_result.first.return_value.set_result(mock_result_row) - - db_engine, mock_connection = mock_db_engine(mock_result) - - db = ProjectDBAPI.init_from_engine(db_engine) - await db.add_projects([fake_project], user_id=user_id) + db = ProjectDBAPI(app) + assert await db.add_projects([fake_project], user_id=-1) - db_engine.acquire.assert_called() - mock_connection.scalar.assert_called() - mock_connection.execute.assert_called() + engine.acquire.assert_called() + connection.scalar.assert_called() + connection.execute.assert_called_once() diff --git a/services/web/server/tests/unit/isolated/test_scicrunch_service_api.py b/services/web/server/tests/unit/isolated/test_scicrunch_service_api.py index 179782d345f..59f2328fd01 100644 --- a/services/web/server/tests/unit/isolated/test_scicrunch_service_api.py +++ b/services/web/server/tests/unit/isolated/test_scicrunch_service_api.py @@ -141,7 +141,7 @@ async def test_scicrunch_get_all_versions_with_invalid_rrids( pprint(versions) # invalid keys return success but an empty list of versions! - assert isinstance(versions, List) + assert isinstance(versions, list) assert len(versions) == 0 assert not versions diff --git a/services/web/server/tests/unit/with_dbs/01/test_comp_tasks_listening_task.py b/services/web/server/tests/unit/with_dbs/01/test_comp_tasks_listening_task.py index f9c8d77649c..799ad455fc2 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_comp_tasks_listening_task.py +++ b/services/web/server/tests/unit/with_dbs/01/test_comp_tasks_listening_task.py @@ -6,9 +6,7 @@ import asyncio import json import logging -from asyncio import Future from typing import Any, Dict, List -from unittest.mock import MagicMock import aiopg.sa import pytest @@ -22,11 +20,7 @@ ) from sqlalchemy.sql.elements import literal_column - -def future_with_result(result: Any) -> asyncio.Future: - f = Future() - f.set_result(result) - return f +logger = logging.getLogger(__name__) @pytest.fixture @@ -34,30 +28,30 @@ async def mock_project_subsystem(mocker) -> Dict: mocked_project_calls = { "_get_project_owner": mocker.patch( "simcore_service_webserver.computation_comp_tasks_listening_task._get_project_owner", - return_value=future_with_result(""), + return_value="", ), "_update_project_state": mocker.patch( "simcore_service_webserver.computation_comp_tasks_listening_task._update_project_state", - return_value=future_with_result(""), + return_value="", ), "_update_project_outputs": mocker.patch( "simcore_service_webserver.computation_comp_tasks_listening_task._update_project_outputs", - return_value=future_with_result(""), + return_value="", ), } yield mocked_project_calls -async def test_mock_project_api(loop, mock_project_subsystem: Dict): +async def test_mock_project_api(loop, mock_project_subsystem: Dict, mocker): from simcore_service_webserver.computation_comp_tasks_listening_task import ( _get_project_owner, _update_project_outputs, _update_project_state, ) - assert isinstance(_get_project_owner, MagicMock) - assert isinstance(_update_project_state, MagicMock) - assert isinstance(_update_project_outputs, MagicMock) + assert isinstance(_get_project_owner, mocker.AsyncMock) + assert isinstance(_update_project_state, mocker.AsyncMock) + assert isinstance(_update_project_outputs, mocker.AsyncMock) @pytest.fixture @@ -65,32 +59,18 @@ async def comp_task_listening_task( loop, mock_project_subsystem: Dict, client ) -> asyncio.Task: listening_task = loop.create_task(comp_tasks_listening_task(client.app)) + yield listening_task listening_task.cancel() await listening_task -MAX_TIMEOUT_S = 10 -logger = logging.getLogger(__name__) - - -@tenacity.retry( - wait=tenacity.wait_fixed(1), - stop=tenacity.stop_after_delay(MAX_TIMEOUT_S), - retry=tenacity.retry_if_exception_type(AssertionError), - before=tenacity.before_log(logger, logging.INFO), - reraise=True, -) -async def _wait_for_call(mock_fct): - mock_fct.assert_called() - - @pytest.mark.parametrize( "task_class", [NodeClass.COMPUTATIONAL, NodeClass.INTERACTIVE, NodeClass.FRONTEND] ) @pytest.mark.parametrize( - "upd_value, exp_calls", + "update_values, expected_calls", [ pytest.param( { @@ -120,8 +100,8 @@ async def test_listen_comp_tasks_task( mock_project_subsystem: Dict, comp_task_listening_task: asyncio.Task, client, - upd_value: Dict[str, Any], - exp_calls: List[str], + update_values: Dict[str, Any], + expected_calls: List[str], task_class: NodeClass, ): db_engine: aiopg.sa.Engine = client.app[APP_DB_ENGINE_KEY] @@ -138,11 +118,27 @@ async def test_listen_comp_tasks_task( # let's update some values await conn.execute( comp_tasks.update() - .values(**upd_value) + .values(**update_values) .where(comp_tasks.c.task_id == task["task_id"]) ) - for key, mock_fct in mock_project_subsystem.items(): - if key in exp_calls: - await _wait_for_call(mock_fct) + + # tests whether listener gets hooked calls executed + for call_name, mocked_call in mock_project_subsystem.items(): + if call_name in expected_calls: + async for attempt in _async_retry_if_fails(): + with attempt: + mocked_call.assert_awaited() + else: - mock_fct.assert_not_called() + mocked_call.assert_not_called() + + +def _async_retry_if_fails(): + # Helper that retries to account for some uncontrolled delays + return tenacity.AsyncRetrying( + wait=tenacity.wait_fixed(1), + stop=tenacity.stop_after_delay(10), + retry=tenacity.retry_if_exception_type(AssertionError), + before=tenacity.before_log(logger, logging.INFO), + reraise=True, + ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_access_to_studies.py b/services/web/server/tests/unit/with_dbs/02/test_access_to_studies.py index 746fc72c078..f4ada20af37 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_access_to_studies.py +++ b/services/web/server/tests/unit/with_dbs/02/test_access_to_studies.py @@ -20,7 +20,6 @@ from models_library.projects_state import ProjectLocked, ProjectStatus from pytest_simcore.helpers.utils_assert import assert_status from pytest_simcore.helpers.utils_login import UserRole -from pytest_simcore.helpers.utils_mock import future_with_result from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects from servicelib.rest_responses import unwrap_envelope from simcore_service_webserver import catalog @@ -200,9 +199,7 @@ def mocks_on_projects_api(mocker) -> None: """ mocker.patch( "simcore_service_webserver.projects.projects_api._get_project_lock_state", - return_value=future_with_result( - ProjectLocked(value=False, status=ProjectStatus.CLOSED) - ), + return_value=ProjectLocked(value=False, status=ProjectStatus.CLOSED), ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_db.py b/services/web/server/tests/unit/with_dbs/02/test_db.py index 2ed429af1e4..9ce34612ee2 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_db.py +++ b/services/web/server/tests/unit/with_dbs/02/test_db.py @@ -2,6 +2,7 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name + import io import yaml diff --git a/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_handlers.py index daa8516eead..9370c8ba099 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_handlers.py @@ -14,9 +14,15 @@ from aiohttp import ClientResponse, ClientSession, web from aioresponses import aioresponses from models_library.projects_state import ProjectLocked, ProjectStatus +from models_library.projects_state import ( + Owner, + ProjectLocked, + ProjectRunningState, + ProjectState, + RunningState, +) from pytest_simcore.helpers.utils_assert import assert_status from pytest_simcore.helpers.utils_login import UserRole -from pytest_simcore.helpers.utils_mock import future_with_result from simcore_service_webserver import catalog from simcore_service_webserver.log import setup_logging from simcore_service_webserver.studies_dispatcher._core import ViewerInfo @@ -290,9 +296,7 @@ def mocks_on_projects_api(mocker): """ mocker.patch( "simcore_service_webserver.projects.projects_api._get_project_lock_state", - return_value=future_with_result( - ProjectLocked(value=False, status=ProjectStatus.CLOSED) - ), + return_value=ProjectLocked(value=False, status=ProjectStatus.CLOSED), ) @@ -342,7 +346,7 @@ async def test_dispatch_viewer_anonymously( ): mock_client_director_v2_func = mocker.patch( "simcore_service_webserver.director_v2.create_or_update_pipeline", - return_value=future_with_result(result=None), + return_value=None, ) redirect_url = ( diff --git a/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_projects.py b/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_projects.py index a179ae677cc..535d423d2f8 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_projects.py +++ b/services/web/server/tests/unit/with_dbs/02/test_studies_dispatcher_projects.py @@ -9,12 +9,9 @@ import pytest from models_library.projects import Project from pytest_simcore.helpers.utils_login import NewUser -from pytest_simcore.helpers.utils_mock import future_with_result from pytest_simcore.helpers.utils_projects import delete_all_projects from pytest_simcore.helpers.utils_services import list_fake_file_consumers from simcore_service_webserver.groups_api import auto_add_user_to_groups - -# from simcore_postgres_database.models.projects import projects as projects_table from simcore_service_webserver.log import setup_logging from simcore_service_webserver.projects.projects_api import get_project_for_user from simcore_service_webserver.studies_dispatcher._projects import ( @@ -95,7 +92,7 @@ async def test_add_new_project_from_model_instance( mock_func = mocker.patch( "simcore_service_webserver.director_v2.create_or_update_pipeline", - return_value=future_with_result(result=None), + return_value=None, ) async with NewUser() as user_db: diff --git a/services/web/server/tests/unit/with_dbs/03/test_director_v2.py b/services/web/server/tests/unit/with_dbs/03/test_director_v2.py index 3f99336760c..35d0a726e52 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_director_v2.py +++ b/services/web/server/tests/unit/with_dbs/03/test_director_v2.py @@ -3,6 +3,7 @@ # pylint:disable=redefined-outer-name +import sys from typing import Dict from uuid import UUID, uuid4 @@ -13,7 +14,6 @@ from models_library.projects_state import RunningState from pydantic.types import PositiveInt from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import LoggedUser from simcore_service_webserver import director_v2 from simcore_service_webserver.db_models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/04/test_catalog_api.py b/services/web/server/tests/unit/with_dbs/04/test_catalog_api.py index 08e5c78d2df..bfd38fd4d4c 100644 --- a/services/web/server/tests/unit/with_dbs/04/test_catalog_api.py +++ b/services/web/server/tests/unit/with_dbs/04/test_catalog_api.py @@ -2,13 +2,12 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -from asyncio import Future +import re from copy import deepcopy import pytest from aiohttp import web from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import LoggedUser from simcore_service_webserver.application import ( create_safe_application, setup_catalog, @@ -19,6 +18,7 @@ setup_security, setup_session, ) +from simcore_service_webserver.catalog_client import KCATALOG_ORIGIN from simcore_service_webserver.db_models import UserRole @@ -50,40 +50,18 @@ def client(loop, app_cfg, aiohttp_client, postgres_db): @pytest.fixture -def mock_api_client_session(client, mocker): - get_client_session = mocker.patch( - "simcore_service_webserver.catalog_client.get_client_session" - ) - - class MockClientSession(mocker.MagicMock): - async def __aenter__(self): - # Mocks aiohttp.ClientResponse - # https://docs.aiohttp.org/en/stable/client_reference.html#aiohttp.ClientResponse - resp = mocker.Mock() - - f = Future() - f.set_result({}) - resp.json.return_value = f - - resp.status = 200 - return resp - - async def __aexit__(self, exc_type, exc_val, exc_tb): - pass - - client_session = mocker.Mock() - - context_mock = mocker.Mock(return_value=MockClientSession()) - client_session.get = context_mock - client_session.post = context_mock - client_session.request = context_mock +def mock_catalog_service_api_responses(client, aioresponses_mocker): + origin = client.app[KCATALOG_ORIGIN] - get_client_session.return_value = client_session + url_pattern = re.compile(f"^{origin}+/.*$") - yield context_mock + aioresponses_mocker.get(url_pattern, payload={"data": {}}) + aioresponses_mocker.post(url_pattern, payload={"data": {}}) + aioresponses_mocker.put(url_pattern, payload={"data": {}}) + aioresponses_mocker.patch(url_pattern, payload={"data": {}}) + aioresponses_mocker.delete(url_pattern) -# TODO: with different user roles, i.e. access rights @pytest.mark.parametrize( "user_role,expected", [ @@ -94,7 +72,11 @@ async def __aexit__(self, exc_type, exc_val, exc_tb): ], ) async def test_dag_entrypoints( - client, logged_user, api_version_prefix, mock_api_client_session, expected + client, + logged_user, + api_version_prefix, + mock_catalog_service_api_responses, + expected, ): vx = api_version_prefix diff --git a/services/web/server/tests/unit/with_dbs/05/test_project_db.py b/services/web/server/tests/unit/with_dbs/05/test_project_db.py index af067496519..a44c3364e03 100644 --- a/services/web/server/tests/unit/with_dbs/05/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/05/test_project_db.py @@ -1,16 +1,18 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=no-value-for-parameter -# pylint:disable=redefined-outer-name +# pylint: disable=no-value-for-parameter +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable import asyncio import datetime import json import re +import sys from copy import deepcopy from itertools import combinations from random import randint -from typing import Any, Dict, List, Tuple +from typing import Any, Dict, List, Optional, Tuple from uuid import UUID, uuid5 import pytest @@ -118,12 +120,10 @@ def all_permission_combinations() -> List[str]: return res -@pytest.mark.parametrize("project_access_rights", [e for e in ProjectAccessRights]) @pytest.mark.parametrize("wanted_permissions", all_permission_combinations()) def test_check_project_permissions( user_id: int, group_id: int, - project_access_rights: ProjectAccessRights, wanted_permissions: str, ): project = {"access_rights": {}} @@ -139,7 +139,7 @@ def test_check_project_permissions( def _project_access_rights_from_permissions( permissions: str, invert: bool = False - ) -> ProjectAccessRights: + ) -> Dict[str, bool]: access_rights = {} for p in ["read", "write", "delete"]: access_rights[p] = ( @@ -240,7 +240,8 @@ def _create_project_db(client: TestClient) -> ProjectDBAPI: assert APP_PROJECT_DBAPI in client.app db_api = client.app[APP_PROJECT_DBAPI] assert db_api - # pylint:disable=protected-access + assert isinstance(db_api, ProjectDBAPI) + assert db_api._app == client.app assert db_api._engine return db_api @@ -250,16 +251,10 @@ async def test_setup_projects_db(client: TestClient): _create_project_db(client) -def test_project_db_engine_creation(postgres_db: sa.engine.Engine): - db_api = ProjectDBAPI.init_from_engine(postgres_db) - # pylint:disable=protected-access - assert db_api._app == {} - assert db_api._engine == postgres_db - - @pytest.fixture() -async def db_api(client: TestClient, postgres_db: sa.engine.Engine) -> ProjectDBAPI: +def db_api(client: TestClient, postgres_db: sa.engine.Engine) -> ProjectDBAPI: db_api = _create_project_db(client) + yield db_api # clean the projects @@ -294,7 +289,7 @@ def _assert_added_project( def _assert_project_db_row( postgres_db: sa.engine.Engine, project: Dict[str, Any], **kwargs ): - row: RowProxy = postgres_db.execute( + row: Optional[RowProxy] = postgres_db.execute( f"SELECT * FROM projects WHERE \"uuid\"='{project['uuid']}'" ).fetchone() @@ -532,6 +527,7 @@ async def test_patch_user_project_workbench_concurrently( _NUMBER_OF_NODES = number_of_nodes BASE_UUID = UUID("ccc0839f-93b8-4387-ab16-197281060927") node_uuids = [str(uuid5(BASE_UUID, f"{n}")) for n in range(_NUMBER_OF_NODES)] + # create a project with a lot of nodes fake_project["workbench"] = { node_uuids[n]: { @@ -541,7 +537,7 @@ async def test_patch_user_project_workbench_concurrently( } for n in range(_NUMBER_OF_NODES) } - exp_project = deepcopy(fake_project) + expected_project = deepcopy(fake_project) # add the project original_project = deepcopy(fake_project) @@ -571,8 +567,9 @@ async def test_patch_user_project_workbench_concurrently( for n in range(_NUMBER_OF_NODES) ] for n in range(_NUMBER_OF_NODES): - exp_project["workbench"][node_uuids[n]].update(randomly_created_outputs[n]) - patched_projects: List[ + expected_project["workbench"][node_uuids[n]].update(randomly_created_outputs[n]) + + patched_projects: Tuple[ Tuple[Dict[str, Any], Dict[str, Any]] ] = await asyncio.gather( *[ @@ -602,7 +599,7 @@ async def test_patch_user_project_workbench_concurrently( # check the nodes are completely patched as expected _assert_project_db_row( postgres_db, - exp_project, + expected_project, prj_owner=logged_user["id"], access_rights={ str(primary_group["gid"]): {"read": True, "write": True, "delete": True} @@ -613,10 +610,9 @@ async def test_patch_user_project_workbench_concurrently( # now concurrently remove the outputs for n in range(_NUMBER_OF_NODES): - exp_project["workbench"][node_uuids[n]]["outputs"] = {} - patched_projects: List[ - Tuple[Dict[str, Any], Dict[str, Any]] - ] = await asyncio.gather( + expected_project["workbench"][node_uuids[n]]["outputs"] = {} + + patched_projects = await asyncio.gather( *[ db_api.patch_user_project_workbench( {node_uuids[n]: {"outputs": {}}}, @@ -635,7 +631,7 @@ async def test_patch_user_project_workbench_concurrently( # check the nodes are completely patched as expected _assert_project_db_row( postgres_db, - exp_project, + expected_project, prj_owner=logged_user["id"], access_rights={ str(primary_group["gid"]): {"read": True, "write": True, "delete": True} @@ -646,10 +642,9 @@ async def test_patch_user_project_workbench_concurrently( # now concurrently remove the outputs for n in range(_NUMBER_OF_NODES): - exp_project["workbench"][node_uuids[n]]["outputs"] = {} - patched_projects: List[ - Tuple[Dict[str, Any], Dict[str, Any]] - ] = await asyncio.gather( + expected_project["workbench"][node_uuids[n]]["outputs"] = {} + + patched_projects = await asyncio.gather( *[ db_api.patch_user_project_workbench( {node_uuids[n]: {"outputs": {}}}, @@ -668,7 +663,7 @@ async def test_patch_user_project_workbench_concurrently( # check the nodes are completely patched as expected _assert_project_db_row( postgres_db, - exp_project, + expected_project, prj_owner=logged_user["id"], access_rights={ str(primary_group["gid"]): {"read": True, "write": True, "delete": True} diff --git a/services/web/server/tests/unit/with_dbs/06/test_projects_02.py b/services/web/server/tests/unit/with_dbs/06/test_projects_02.py index f7d5ab3632e..ad594580828 100644 --- a/services/web/server/tests/unit/with_dbs/06/test_projects_02.py +++ b/services/web/server/tests/unit/with_dbs/06/test_projects_02.py @@ -1,11 +1,11 @@ # pylint:disable=unused-variable # pylint:disable=unused-argument # pylint:disable=redefined-outer-name + import asyncio import json import time import unittest.mock as mock -from asyncio import Future from copy import deepcopy from typing import Callable, Dict, Iterator, List, Optional, Tuple, Type, Union from unittest.mock import call @@ -26,7 +26,6 @@ ) from pytest_simcore.helpers.utils_assert import assert_status from pytest_simcore.helpers.utils_login import log_client_in -from pytest_simcore.helpers.utils_mock import future_with_result from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects from servicelib import async_utils from servicelib.application import create_safe_application @@ -66,7 +65,7 @@ def client( aiohttp_client, app_cfg, postgres_db, - mocked_director_subsystem, + mocked_director_api, mock_orphaned_services, redis_client, # this ensure redis is properly cleaned ): @@ -255,7 +254,7 @@ async def _assert_get_same_project( async def _new_project( client, - expected_response: web.Response, + expected_response: web.HTTPException, logged_user: Dict[str, str], primary_group: Dict[str, str], *, @@ -386,8 +385,8 @@ async def _open_project( client, client_id: str, project: Dict, - expected: Union[web.HTTPException, List[web.HTTPException]], -) -> Optional[Tuple[Dict, Dict]]: + expected: Union[Type[web.HTTPException], List[Type[web.HTTPException]]], +) -> Tuple[Dict, Dict]: url = client.app.router["open_project"].url_for(project_id=project["uuid"]) resp = await client.post(url, json=client_id) @@ -397,16 +396,17 @@ async def _open_project( data, error = await assert_status(resp, e) return data, error except AssertionError: - # re-raies if last item + # re-raise if last item if e == expected[-1]: raise continue else: - return await assert_status(resp, expected) + data, error = await assert_status(resp, expected) + return data, error async def _close_project( - client, client_id: str, project: Dict, expected: web.HTTPException + client, client_id: str, project: Dict, expected: Type[web.HTTPException] ): url = client.app.router["close_project"].url_for(project_id=project["uuid"]) resp = await client.post(url, json=client_id) @@ -416,7 +416,7 @@ async def _close_project( async def _state_project( client, project: Dict, - expected: web.HTTPException, + expected: Type[web.HTTPException], expected_project_state: ProjectState, ): url = client.app.router["state_project"].url_for(project_id=project["uuid"]) @@ -463,7 +463,9 @@ async def _assert_project_state_updated( handler.reset_mock() -async def _delete_project(client, project: Dict, expected: web.Response) -> None: +async def _delete_project( + client, project: Dict, expected: Type[web.HTTPException] +) -> None: url = client.app.router["delete_project"].url_for(project_id=project["uuid"]) assert str(url) == f"{API_PREFIX}/projects/{project['uuid']}" resp = await client.delete(url) @@ -490,7 +492,7 @@ async def test_share_project( user_role: UserRole, expected: ExpectedResponse, storage_subsystem_mock, - mocked_director_subsystem, + mocked_director_api, catalog_subsystem_mock, share_rights: Dict, project_db_cleaner, @@ -564,7 +566,7 @@ async def test_open_project( user_project, client_session_id_factory: Callable, expected, - mocked_director_subsystem, + mocked_director_api, ): # POST /v0/projects/{project_id}:open # open project @@ -590,7 +592,7 @@ async def test_open_project( user_id=logged_user["id"], ) ) - mocked_director_subsystem["start_service"].assert_has_calls(calls) + mocked_director_api["start_service"].assert_has_calls(calls) @pytest.mark.parametrize(*standard_role_response()) @@ -600,15 +602,13 @@ async def test_close_project( user_project, client_session_id_factory: Callable, expected, - mocked_director_subsystem, + mocked_director_api, fake_services, ): # POST /v0/projects/{project_id}:close fakes = fake_services(5) assert len(fakes) == 5 - mocked_director_subsystem[ - "get_running_interactive_services" - ].return_value = future_with_result(fakes) + mocked_director_api["get_running_interactive_services"].return_value = fakes # open project client_id = client_session_id_factory() @@ -616,24 +616,42 @@ async def test_close_project( resp = await client.post(url, json=client_id) if resp.status == web.HTTPOk.status_code: - calls = [ - call(client.server.app, user_project["uuid"], logged_user["id"]), - ] - mocked_director_subsystem["get_running_interactive_services"].has_calls(calls) - mocked_director_subsystem["get_running_interactive_services"].reset_mock() + mocked_director_api["get_running_interactive_services"].assert_any_call( + client.server.app, logged_user["id"], user_project["uuid"] + ) + mocked_director_api["get_running_interactive_services"].reset_mock() # close project url = client.app.router["close_project"].url_for(project_id=user_project["uuid"]) resp = await client.post(url, json=client_id) await assert_status(resp, expected.no_content) + if resp.status == web.HTTPNoContent.status_code: + # These checks are after a fire&forget, so we wait a moment + await asyncio.sleep(2) + calls = [ - call(client.server.app, user_project["uuid"], None), - call(client.server.app, user_project["uuid"], logged_user["id"]), + # call(client.server.app, user_id=None, project_id=user_project["uuid"]), <-- FIXME: SAN?? I had to comment this. Still valid? Looking at project_handler.py: _close_project_task, it does not seem the case. + call( + client.server.app, + user_id=logged_user["id"], + project_id=user_project["uuid"], + ), ] - mocked_director_subsystem["get_running_interactive_services"].has_calls(calls) - calls = [call(client.server.app, service["service_uuid"]) for service in fakes] - mocked_director_subsystem["stop_service"].has_calls(calls) + mocked_director_api["get_running_interactive_services"].assert_has_calls(calls) + + calls = [ + call( + # app= + client.server.app, + # service_uuid= + service["service_uuid"], + # save_state= + True, + ) + for service in fakes + ] + mocked_director_api["stop_service"].assert_has_calls(calls) @pytest.mark.parametrize( @@ -652,7 +670,7 @@ async def test_get_active_project( client_session_id_factory: Callable, expected, socketio_client_factory: Callable, - mocked_director_subsystem, + mocked_director_api, ): # login with socket using client session id client_id1 = client_session_id_factory() @@ -732,16 +750,15 @@ async def test_project_node_lifetime( create_exp, get_exp, deletion_exp, - mocked_director_subsystem, + mocked_director_api, storage_subsystem_mock, mocker, ): mock_storage_api_delete_data_folders_of_project_node = mocker.patch( "simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project_node", - return_value=Future(), + return_value="", ) - mock_storage_api_delete_data_folders_of_project_node.return_value.set_result("") # create a new dynamic node... url = client.app.router["create_node"].url_for(project_id=user_project["uuid"]) @@ -750,31 +767,30 @@ async def test_project_node_lifetime( data, errors = await assert_status(resp, create_exp) node_id = "wrong_node_id" if resp.status == web.HTTPCreated.status_code: - mocked_director_subsystem["start_service"].assert_called_once() + mocked_director_api["start_service"].assert_called_once() assert "node_id" in data node_id = data["node_id"] else: - mocked_director_subsystem["start_service"].assert_not_called() + mocked_director_api["start_service"].assert_not_called() + # create a new NOT dynamic node... - mocked_director_subsystem["start_service"].reset_mock() + mocked_director_api["start_service"].reset_mock() url = client.app.router["create_node"].url_for(project_id=user_project["uuid"]) body = {"service_key": "some/notdynamic/key", "service_version": "1.3.4"} resp = await client.post(url, json=body) data, errors = await assert_status(resp, create_exp) node_id_2 = "wrong_node_id" if resp.status == web.HTTPCreated.status_code: - mocked_director_subsystem["start_service"].assert_not_called() + mocked_director_api["start_service"].assert_not_called() assert "node_id" in data node_id_2 = data["node_id"] else: - mocked_director_subsystem["start_service"].assert_not_called() + mocked_director_api["start_service"].assert_not_called() # get the node state - mocked_director_subsystem[ - "get_running_interactive_services" - ].return_value = future_with_result( - [{"service_uuid": node_id, "service_state": "running"}] - ) + mocked_director_api["get_running_interactive_services"].return_value = [ + {"service_uuid": node_id, "service_state": "running"} + ] url = client.app.router["get_node"].url_for( project_id=user_project["uuid"], node_id=node_id ) @@ -785,9 +801,7 @@ async def test_project_node_lifetime( assert data["service_state"] == "running" # get the NOT dynamic node state - mocked_director_subsystem[ - "get_running_interactive_services" - ].return_value = future_with_result("") + mocked_director_api["get_running_interactive_services"].return_value = [] url = client.app.router["get_node"].url_for( project_id=user_project["uuid"], node_id=node_id_2 @@ -799,23 +813,23 @@ async def test_project_node_lifetime( assert data["service_state"] == "idle" # delete the node - mocked_director_subsystem[ - "get_running_interactive_services" - ].return_value = future_with_result([{"service_uuid": node_id}]) + mocked_director_api["get_running_interactive_services"].return_value = [ + {"service_uuid": node_id} + ] url = client.app.router["delete_node"].url_for( project_id=user_project["uuid"], node_id=node_id ) resp = await client.delete(url) data, errors = await assert_status(resp, deletion_exp) if resp.status == web.HTTPNoContent.status_code: - mocked_director_subsystem["stop_service"].assert_called_once() + mocked_director_api["stop_service"].assert_called_once() mock_storage_api_delete_data_folders_of_project_node.assert_called_once() else: - mocked_director_subsystem["stop_service"].assert_not_called() + mocked_director_api["stop_service"].assert_not_called() mock_storage_api_delete_data_folders_of_project_node.assert_not_called() # delete the NOT dynamic node - mocked_director_subsystem["stop_service"].reset_mock() + mocked_director_api["stop_service"].reset_mock() mock_storage_api_delete_data_folders_of_project_node.reset_mock() # mock_director_api_get_running_services.return_value.set_result([{"service_uuid": node_id}]) url = client.app.router["delete_node"].url_for( @@ -824,10 +838,10 @@ async def test_project_node_lifetime( resp = await client.delete(url) data, errors = await assert_status(resp, deletion_exp) if resp.status == web.HTTPNoContent.status_code: - mocked_director_subsystem["stop_service"].assert_not_called() + mocked_director_api["stop_service"].assert_not_called() mock_storage_api_delete_data_folders_of_project_node.assert_called_once() else: - mocked_director_subsystem["stop_service"].assert_not_called() + mocked_director_api["stop_service"].assert_not_called() mock_storage_api_delete_data_folders_of_project_node.assert_not_called() diff --git a/services/web/server/tests/unit/with_dbs/09/test_projects_01.py b/services/web/server/tests/unit/with_dbs/09/test_projects_01.py index 58b66886526..f47ebb11b6b 100644 --- a/services/web/server/tests/unit/with_dbs/09/test_projects_01.py +++ b/services/web/server/tests/unit/with_dbs/09/test_projects_01.py @@ -1,7 +1,8 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name -# pylint:disable=too-many-arguments +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + import asyncio import uuid as uuidlib from copy import deepcopy @@ -24,7 +25,6 @@ RunningState, ) from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_mock import future_with_result from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects from servicelib import async_utils from servicelib.application import create_safe_application @@ -64,7 +64,7 @@ def client( aiohttp_client, app_cfg, postgres_db, - mocked_director_subsystem, + mocked_director_api, mock_orphaned_services, redis_client, ): @@ -110,7 +110,7 @@ def ensure_run_in_sequence_context_is_empty(): @pytest.fixture -def mocks_on_projects_api(mocker, logged_user) -> Dict: +def mocks_on_projects_api(mocker, logged_user) -> None: """ All projects in this module are UNLOCKED @@ -132,7 +132,7 @@ def mocks_on_projects_api(mocker, logged_user) -> Dict: ) mocker.patch( "simcore_service_webserver.projects.projects_api._get_project_lock_state", - return_value=future_with_result(state), + return_value=state, ) @@ -437,7 +437,7 @@ async def _new_project( async def _replace_project( - client, project_update: Dict, expected: web.Response + client, project_update: Dict, expected: Type[web.HTTPException] ) -> Dict: # PUT /v0/projects/{project_id} url = client.app.router["replace_project"].url_for( @@ -451,7 +451,9 @@ async def _replace_project( return data -async def _delete_project(client, project: Dict, expected: web.Response) -> None: +async def _delete_project( + client, project: Dict, expected: Type[web.HTTPException] +) -> None: url = client.app.router["delete_project"].url_for(project_id=project["uuid"]) assert str(url) == f"{API_PREFIX}/projects/{project['uuid']}" resp = await client.delete(url) @@ -469,7 +471,7 @@ async def _delete_project(client, project: Dict, expected: web.Response) -> None ], ) async def test_list_projects( - client: aiohttp.test_utils.TestClient, + client: TestClient, logged_user: Dict[str, Any], user_project: Dict[str, Any], template_project: Dict[str, Any], @@ -853,28 +855,33 @@ async def test_delete_project( user_project, expected, storage_subsystem_mock, - mocked_director_subsystem, + mocked_director_api, catalog_subsystem_mock, fake_services, ): # DELETE /v0/projects/{project_id} fakes = fake_services(5) - mocked_director_subsystem[ - "get_running_interactive_services" - ].return_value = future_with_result(fakes) + mocked_director_api["get_running_interactive_services"].return_value = fakes await _delete_project(client, user_project, expected) await asyncio.sleep(2) # let some time fly for the background tasks to run if expected == web.HTTPNoContent: - mocked_director_subsystem[ - "get_running_interactive_services" - ].assert_called_once() - calls = [ - call(client.server.app, service["service_uuid"], True) for service in fakes + mocked_director_api["get_running_interactive_services"].assert_called_once() + + expected_calls = [ + call( + # app= + client.server.app, + # service_uuid= + service["service_uuid"], + # save_state= + True, + ) + for service in fakes ] - mocked_director_subsystem["stop_service"].has_calls(calls) + mocked_director_api["stop_service"].assert_has_calls(expected_calls) # wait for the fire&forget to run await asyncio.sleep(2) @@ -901,7 +908,6 @@ async def test_delete_multiple_opened_project_forbidden( user_role, expected_ok, expected_forbidden, - mocked_director_subsystem, ): # service in project = await mocked_dynamic_service(logged_user["id"], empty_user_project["uuid"]) service = await mocked_dynamic_service(logged_user["id"], user_project["uuid"]) diff --git a/services/web/server/tests/unit/with_dbs/10/test_resource_manager.py b/services/web/server/tests/unit/with_dbs/10/test_resource_manager.py index 6081f9885e4..928010a4727 100644 --- a/services/web/server/tests/unit/with_dbs/10/test_resource_manager.py +++ b/services/web/server/tests/unit/with_dbs/10/test_resource_manager.py @@ -5,6 +5,7 @@ import asyncio from asyncio import Future, sleep +import logging from copy import deepcopy from typing import Any, Callable, Dict from unittest.mock import call @@ -40,10 +41,26 @@ from simcore_service_webserver.session import setup_session from simcore_service_webserver.socketio import setup_socketio from simcore_service_webserver.users import setup_users +from tenacity import ( + after_log, + retry_if_exception_type, + stop_after_attempt, + wait_fixed, +) + +logger = logging.getLogger(__name__) + API_VERSION = "v0" GARBAGE_COLLECTOR_INTERVAL = 1 SERVICE_DELETION_DELAY = 1 +CHECK_BACKGROUND_RETRY_POLICY = dict( + stop=stop_after_attempt(2), + wait=wait_fixed(SERVICE_DELETION_DELAY + GARBAGE_COLLECTOR_INTERVAL), + retry=retry_if_exception_type(AssertionError), + after=after_log(logger, logging.INFO), + reraise=True, +) @pytest.fixture @@ -232,6 +249,8 @@ async def test_websocket_multiple_connections( client_session_id_factory: Callable[[], str], ): NUMBER_OF_SOCKETS = 5 + resource_key = {} + # connect multiple clients clients = [] for socket_count in range(1, NUMBER_OF_SOCKETS + 1): @@ -260,7 +279,7 @@ async def test_websocket_multiple_connections( sid = sio.sid await sio.disconnect() # need to attend the disconnect event to pass through the socketio internal queues - await sleep(0.1) # must be >= 0.01 to work without issues, added some padding + await asyncio.sleep(0.1) # must be >= 0.01 to work without issues, added some padding assert not sio.sid assert not await socket_registry.find_keys(("socket_id", sio.sid)) assert not sid in await socket_registry.find_resources( @@ -317,17 +336,17 @@ async def test_websocket_disconnected_after_logout( await assert_status(r, expected) # the socket2 should be gone - await sleep(1) + await asyncio.sleep(1) assert not sio2.sid socket_logout_mock_callable2.assert_not_called() # the others should receive a logout message through their respective sockets - await sleep(3) + await asyncio.sleep(3) socket_logout_mock_callable.assert_called_once() socket_logout_mock_callable2.assert_not_called() # note 2 should be not called ever socket_logout_mock_callable3.assert_called_once() - await sleep(3) + await asyncio.sleep(3) # first socket should be closed now assert not sio.sid # second socket also closed @@ -371,12 +390,20 @@ async def test_interactive_services_removed_after_logout( r = await client.post(logout_url, json={"client_session_id": client_session_id1}) assert r.url_obj.path == logout_url.path await assert_status(r, web.HTTPOk) - # ensure sufficient time is wasted here - await sleep(SERVICE_DELETION_DELAY + 1) + + # check result perfomed by background task + await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector.collect_garbage(client.app) + # assert dynamic service is removed - calls = [call(client.server.app, service["service_uuid"], exp_save_state)] - mocked_director_api["stop_service"].assert_has_calls(calls) + mocked_director_api["stop_service"].assert_awaited_with( + # app= + client.server.app, + # service_uuid= + service["service_uuid"], + # save_state= + exp_save_state, + ) @pytest.mark.parametrize( @@ -422,7 +449,7 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t # open project in second client await open_project(client, empty_user_project["uuid"], client_session_id2) # ensure sufficient time is wasted here - await sleep(SERVICE_DELETION_DELAY + 1) + await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector.collect_garbage(client.app) # assert dynamic service is still around mocked_director_api["stop_service"].assert_not_called() @@ -434,7 +461,7 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t # reconnect websocket sio2 = await socketio_client_factory(client_session_id2) # it should still be there even after waiting for auto deletion from garbage collector - await sleep(SERVICE_DELETION_DELAY + 1) + await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector.collect_garbage(client.app) mocked_director_api["stop_service"].assert_not_called() # now really disconnect @@ -442,7 +469,7 @@ async def test_interactive_services_remain_after_websocket_reconnection_from_2_t assert not sio2.sid # run the garbage collector # event after waiting some time - await sleep(SERVICE_DELETION_DELAY + 1) + await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector.collect_garbage(client.app) # assert dynamic service is gone calls = [call(client.server.app, service["service_uuid"], exp_save_state)] @@ -514,7 +541,7 @@ async def test_interactive_services_removed_per_project( # assert dynamic service is still around mocked_director_api["stop_service"].assert_not_called() # wait the defined delay - await sleep(SERVICE_DELETION_DELAY + 1) + await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector.collect_garbage(client.app) # assert dynamic service 1 is removed calls = [call(client.server.app, service["service_uuid"], exp_save_state)] @@ -527,7 +554,7 @@ async def test_interactive_services_removed_per_project( # assert dynamic services are still around mocked_director_api["stop_service"].assert_not_called() # wait the defined delay - await sleep(SERVICE_DELETION_DELAY + 1) + await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector.collect_garbage(client.app) # assert dynamic service 2,3 is removed calls = [ @@ -580,14 +607,14 @@ async def test_services_remain_after_closing_one_out_of_two_tabs( # close project in tab1 await close_project(client, empty_user_project["uuid"], client_session_id1) # wait the defined delay - await sleep(SERVICE_DELETION_DELAY + 1) + await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector.collect_garbage(client.app) # assert dynamic service is still around mocked_director_api["stop_service"].assert_not_called() # close project in tab2 await close_project(client, empty_user_project["uuid"], client_session_id2) # wait the defined delay - await sleep(SERVICE_DELETION_DELAY + 1) + await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector.collect_garbage(client.app) mocked_director_api["stop_service"].assert_has_calls( [call(client.server.app, service["service_uuid"], exp_save_state)] @@ -634,7 +661,7 @@ async def test_websocket_disconnected_remove_or_maintain_files_based_on_role( await assert_status(r, web.HTTPOk) # ensure sufficient time is wasted here - await sleep(SERVICE_DELETION_DELAY + 1) + await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await garbage_collector.collect_garbage(client.app) # assert dynamic service is removed diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index 05b8e4ba167..60c823ffc2e 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -5,18 +5,17 @@ IMPORTANT: remember that these are still unit-tests! """ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable import json import os import sys import textwrap -from asyncio import Future from copy import deepcopy from pathlib import Path -from typing import Callable, Dict, Iterator, List, Optional +from typing import Callable, Dict, Iterator, List from uuid import uuid4 import aioredis @@ -25,26 +24,23 @@ import simcore_postgres_database.cli as pg_cli import simcore_service_webserver.db_models as orm import simcore_service_webserver.utils -import socketio import sqlalchemy as sa import trafaret_config from aiohttp import web from aiohttp.test_utils import TestClient, TestServer from pydantic import BaseSettings -from pytest_simcore.helpers.utils_assert import assert_status from pytest_simcore.helpers.utils_login import NewUser -from pytest_simcore.helpers.utils_mock import future_with_result from servicelib.aiopg_utils import DSN -from servicelib.application_keys import APP_CONFIG_KEY +from servicelib.application_keys import APP_CONFIG_KEY, APP_DB_ENGINE_KEY from simcore_service_webserver.application import create_application from simcore_service_webserver.application_config import app_schema as app_schema +from simcore_service_webserver.constants import APP_DB_ENGINE_KEY, INDEX_RESOURCE_NAME from simcore_service_webserver.groups_api import ( add_user_in_group, create_user_group, delete_user_group, list_user_groups, ) -from simcore_service_webserver.constants import INDEX_RESOURCE_NAME from yarl import URL # current directory @@ -76,30 +72,27 @@ def default_app_cfg(osparc_simcore_root_dir): @pytest.fixture(scope="session") -def docker_compose_file(default_app_cfg): +def docker_compose_file(default_app_cfg, monkeypatch_session): """Overrides pytest-docker fixture""" - old = os.environ.copy() cfg = deepcopy(default_app_cfg["db"]["postgres"]) # docker-compose reads these environs - os.environ["TEST_POSTGRES_DB"] = cfg["database"] - os.environ["TEST_POSTGRES_USER"] = cfg["user"] - os.environ["TEST_POSTGRES_PASSWORD"] = cfg["password"] + monkeypatch_session.setenv("TEST_POSTGRES_DB", cfg["database"]) + monkeypatch_session.setenv("TEST_POSTGRES_USER", cfg["user"]) + monkeypatch_session.setenv("TEST_POSTGRES_PASSWORD", cfg["password"]) dc_path = current_dir / "docker-compose-devel.yml" assert dc_path.exists() yield str(dc_path) - os.environ = old - # WEB SERVER/CLIENT FIXTURES ------------------------------------------------ @pytest.fixture(scope="function") -def app_cfg(default_app_cfg, aiohttp_unused_port): +def app_cfg(default_app_cfg, aiohttp_unused_port) -> Dict: """Can be overriden in any test module to configure the app accordingly """ @@ -137,6 +130,7 @@ def web_server( "Inits webserver with app_cfg", json.dumps(app_cfg, indent=2, cls=_BaseSettingEncoder), ) + # original APP app = create_application(app_cfg) @@ -148,6 +142,13 @@ def web_server( disable_static_webserver(app) server = loop.run_until_complete(aiohttp_server(app, port=app_cfg["main"]["port"])) + + assert isinstance(postgres_db, sa.engine.Engine) + pg_settings = dict(e.split("=") for e in app[APP_DB_ENGINE_KEY].dsn.split()) + assert pg_settings["host"] == postgres_db.url.host + assert int(pg_settings["port"]) == postgres_db.url.port + assert pg_settings["user"] == postgres_db.url.username + return server @@ -166,12 +167,13 @@ def client( @pytest.fixture -def disable_static_webserver(monkeypatch) -> None: +def disable_static_webserver(monkeypatch) -> Callable: """ Disables the static-webserver module. Avoids fecthing and caching index.html pages Mocking a response for all the services which expect it. """ + async def _mocked_index_html(request: web.Request) -> web.Response: """ Emulates the reply of the '/' path when the static-webserver is disabled @@ -202,9 +204,8 @@ def add_index_route(app: web.Application) -> None: def computational_system_mock(mocker): mock_fun = mocker.patch( "simcore_service_webserver.projects.projects_handlers.update_pipeline_db", - return_value=Future(), + return_value="", ) - mock_fun.return_value.set_result("") return mock_fun @@ -226,12 +227,11 @@ async def _mock_copy_data_from_project(*args): mock.side_effect = _mock_copy_data_from_project # requests storage to delete data - # mock1 = mocker.patch('simcore_service_webserver.projects.projects_handlers.delete_data_folders_of_project', return_value=None) + async_mock = mocker.AsyncMock(return_value="") mock1 = mocker.patch( "simcore_service_webserver.projects.projects_handlers.projects_api.delete_data_folders_of_project", - return_value=Future(), + side_effect=async_mock, ) - mock1.return_value.set_result("") return mock, mock1 @@ -239,48 +239,30 @@ async def _mock_copy_data_from_project(*args): def asyncpg_storage_system_mock(mocker): mocked_method = mocker.patch( "simcore_service_webserver.login.storage.AsyncpgStorage.delete_user", - return_value=Future(), + return_value="", ) - mocked_method.return_value.set_result("") return mocked_method -@pytest.fixture -def mocked_director_subsystem(mocker): - mock_director_api = { - "get_running_interactive_services": mocker.patch( - "simcore_service_webserver.director.director_api.get_running_interactive_services", - return_value=future_with_result(""), - ), - "start_service": mocker.patch( - "simcore_service_webserver.director.director_api.start_service", - return_value=future_with_result(""), - ), - "stop_service": mocker.patch( - "simcore_service_webserver.director.director_api.stop_service", - return_value=future_with_result(""), - ), - } - return mock_director_api - - @pytest.fixture async def mocked_director_api(loop, mocker): + # NOTE: patches are done at 'simcore_service_webserver.director.director_api' + # + # Read carefully "where to patch" in https://docs.python.org/3/library/unittest.mock.html#id6 + # mocks = {} - mocked_running_services = mocker.patch( - "simcore_service_webserver.director.director_api.get_running_interactive_services", - return_value=Future(), - ) - mocked_running_services.return_value.set_result("") - mocks["get_running_interactive_services"] = mocked_running_services - mocked_stop_service = mocker.patch( - "simcore_service_webserver.director.director_api.stop_service", - return_value=Future(), - ) - mocked_stop_service.return_value.set_result("") - mocks["stop_service"] = mocked_stop_service + for func_name, fake_return in [ + ("get_running_interactive_services", []), + ("start_service", []), + ("stop_service", None), + ]: + mocks[func_name] = mocker.patch( + f"simcore_service_webserver.director.director_api.{func_name}", + return_value=fake_return, + name=f"{__name__}.mocked_director_api::director_api.{func_name}", + ) - yield mocks + return mocks @pytest.fixture @@ -310,10 +292,7 @@ async def create(user_id, project_id) -> Dict: services.append(running_service_dict) # reset the future or an invalidStateError will appear as set_result sets the future to done - mocked_director_api["get_running_interactive_services"].return_value = Future() - mocked_director_api["get_running_interactive_services"].return_value.set_result( - services - ) + mocked_director_api["get_running_interactive_services"].return_value = services return running_service_dict return create @@ -348,10 +327,12 @@ def postgres_service(docker_services, postgres_dsn): def postgres_db( postgres_dsn: Dict, postgres_service: str ) -> Iterator[sa.engine.Engine]: + # Overrides packages/pytest-simcore/src/pytest_simcore/postgres_service.py::postgres_db to reduce scope url = postgres_service # Configures db and initializes tables - pg_cli.discover.callback(**postgres_dsn) + kwargs = postgres_dsn.copy() + pg_cli.discover.callback(**kwargs) pg_cli.upgrade.callback("head") # Uses syncrounous engine for that engine = sa.create_engine(url, isolation_level="AUTOCOMMIT") diff --git a/tests/environment-setup/requirements/requirements.txt b/tests/environment-setup/requirements/requirements.txt index fb1f130a92c..8f26ff76678 100644 --- a/tests/environment-setup/requirements/requirements.txt +++ b/tests/environment-setup/requirements/requirements.txt @@ -14,16 +14,8 @@ attrs==21.2.0 # pytest chardet==4.0.0 # via aiohttp -idna-ssl==1.1.0 - # via aiohttp idna==3.1 - # via - # idna-ssl - # yarl -importlib-metadata==4.0.1 - # via - # pluggy - # pytest + # via yarl iniconfig==1.1.1 # via pytest multidict==5.1.0 @@ -63,11 +55,6 @@ termcolor==1.1.0 toml==0.10.2 # via pytest typing-extensions==3.10.0.0 - # via - # aiohttp - # importlib-metadata - # yarl + # via aiohttp yarl==1.6.3 # via aiohttp -zipp==3.4.1 - # via importlib-metadata diff --git a/tests/environment-setup/test_used_python.py b/tests/environment-setup/test_used_python.py index c728f4ec4a2..c70beb99386 100644 --- a/tests/environment-setup/test_used_python.py +++ b/tests/environment-setup/test_used_python.py @@ -3,18 +3,22 @@ # pylint: disable=redefined-outer-name +import configparser import re import sys from pathlib import Path from typing import List, Tuple import pytest +import yaml PIP_INSTALL_UPGRADE_PATTERN = re.compile( r"pip .* install\s+--upgrade .* pip([=~><]+)([\d\.]+)", re.DOTALL ) PYTHON_VERSION_DOCKER_PATTERN = re.compile(r"ARG PYTHON_VERSION=\"([\d\.]+)\"") +FROZEN_SERVICES = ["director"] + # TODO: enhance version comparison with from packaging.version from setuptools def to_version(version: str) -> Tuple[int, ...]: @@ -52,16 +56,17 @@ def expected_python_version(osparc_simcore_root_dir: Path) -> Tuple[int, ...]: def expected_pip_version(osparc_simcore_root_dir: Path) -> str: version = None ref_script = osparc_simcore_root_dir / "ci/helpers/ensure_python_pip.bash" + found = re.search(r"PIP_VERSION=([\d\.]+)", ref_script.read_text()) - if found: - version = found.group(1) + assert found + version = found.group(1) print( str(ref_script.relative_to(osparc_simcore_root_dir)), "->", version, ) - assert found and version + assert version return version @@ -102,16 +107,21 @@ def python_in_dockerfiles(osparc_simcore_root_dir: Path) -> List[Tuple[Path, str return res -def test_all_image_use_same_python_version( +def test_all_images_have_the_same_python_version( python_in_dockerfiles, expected_python_version ): for dockerfile, python_version in python_in_dockerfiles: - current_version, expected_version = make_versions_comparable( - python_version, expected_python_version - ) - assert ( - current_version == expected_version - ), f"Expected python {expected_python_version} in {dockerfile}, got {python_version}" + if dockerfile.parent.name not in FROZEN_SERVICES: + current_version, expected_version = make_versions_comparable( + python_version, expected_python_version + ) + assert ( + current_version == expected_version + ), f"Expected python {expected_python_version} in {dockerfile}, got {python_version}" + else: + print( + f"Skipping check on {dockerfile} since this service/package development was froozen " + ) def test_running_python_version(expected_python_version): @@ -120,11 +130,45 @@ def test_running_python_version(expected_python_version): ) assert ( current_version == expected_version - ), f"Expected python {to_str(sys.version_info)} installed, got {to_str(expected_python_version)}" + ), f"Expected python {to_str(tuple(sys.version_info))} installed, got {to_str(expected_python_version)}" -def test_all_pip_have_same_version(expected_pip_version, pip_in_dockerfiles): +def test_all_images_have_the_same_pip_version(expected_pip_version, pip_in_dockerfiles): for dockerfile, pip_version in pip_in_dockerfiles: - assert ( - pip_version == expected_pip_version - ), f"Expected pip {expected_pip_version} in {dockerfile}, got {pip_version}" + if dockerfile.parent.name in FROZEN_SERVICES: + print( + "Skipping check on {dockefile} since this service/package development was froozen " + ) + else: + assert ( + pip_version == expected_pip_version + ), f"Expected pip {expected_pip_version} in {dockerfile}, got {pip_version}" + + +def test_tooling_pre_commit_config( + osparc_simcore_root_dir: Path, expected_python_version +): + pre_commit_config = yaml.safe_load( + (osparc_simcore_root_dir / ".pre-commit-config.yaml").read_text() + ) + py_version = tuple( + map( + int, + pre_commit_config["default_language_version"]["python"] + .replace("python", "") + .split("."), + ) + ) + + assert py_version == expected_python_version + + +def test_tooling_mypy_ini(osparc_simcore_root_dir: Path, expected_python_version): + mypy_ini_path = osparc_simcore_root_dir / "mypy.ini" + + assert mypy_ini_path.exists() + + mypy_ini = configparser.ConfigParser() + mypy_ini.read(mypy_ini_path) + + assert mypy_ini["mypy"]["python_version"] == to_str(expected_python_version) diff --git a/tests/public-api/requirements/_test.txt b/tests/public-api/requirements/_test.txt index 8c16ea1880a..acfc796f13e 100644 --- a/tests/public-api/requirements/_test.txt +++ b/tests/public-api/requirements/_test.txt @@ -4,8 +4,6 @@ # # pip-compile --output-file=requirements/_test.txt requirements/_test.in # -async-generator==1.10 - # via httpx attrs==21.2.0 # via # jsonschema @@ -16,8 +14,6 @@ certifi==2020.12.5 # requests chardet==4.0.0 # via requests -contextvars==2.4 - # via sniffio coverage==5.5 # via pytest-cov docker==5.0.0 @@ -34,13 +30,6 @@ idna==2.10 # via # requests # rfc3986 -immutables==0.15 - # via contextvars -importlib-metadata==4.0.1 - # via - # jsonschema - # pluggy - # pytest iniconfig==1.1.1 # via pytest jsonschema==3.2.0 @@ -92,16 +81,12 @@ text-unidecode==1.3 # via faker toml==0.10.2 # via pytest -typing-extensions==3.10.0.0 - # via importlib-metadata urllib3==1.26.4 # via # -c requirements/../../../requirements/constraints.txt # requests websocket-client==0.59.0 # via docker -zipp==3.4.1 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/tests/public-api/requirements/_tools.txt b/tests/public-api/requirements/_tools.txt index ed6b099782d..792898eeef8 100644 --- a/tests/public-api/requirements/_tools.txt +++ b/tests/public-api/requirements/_tools.txt @@ -18,24 +18,12 @@ click==8.0.0 # via # black # pip-tools -dataclasses==0.8 - # via black distlib==0.3.1 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.4 # via pre-commit -importlib-metadata==4.0.1 - # via - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.3 - # via - # pre-commit - # virtualenv isort==5.8.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==0.4.3 @@ -67,21 +55,8 @@ toml==0.10.2 # black # pep517 # pre-commit -typed-ast==1.4.3 - # via black -typing-extensions==3.10.0.0 - # via - # -c requirements/_test.txt - # black - # importlib-metadata virtualenv==20.4.6 # via pre-commit -zipp==3.4.1 - # via - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index 7e5aad2acda..a277eb2051a 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -72,10 +72,6 @@ coverage==5.5 # via # -r requirements/_test.in # pytest-cov -dataclasses==0.8 - # via - # pydantic - # werkzeug decorator==4.4.2 # via networkx dnspython==2.1.0 @@ -86,8 +82,6 @@ docker==5.0.0 # -r requirements/_test.in email-validator==1.1.2 # via pydantic -idna-ssl==1.1.0 - # via aiohttp idna==2.10 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -97,14 +91,8 @@ idna==2.10 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # email-validator - # idna-ssl # requests # yarl -importlib-metadata==4.0.1 - # via - # jsonschema - # pluggy - # pytest iniconfig==1.1.1 # via pytest isodate==0.6.0 @@ -292,9 +280,7 @@ trafaret==2.1.0 typing-extensions==3.10.0.0 # via # aiohttp - # importlib-metadata # pydantic - # yarl ujson==4.0.2 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -325,8 +311,6 @@ yarl==1.6.3 # aio-pika # aiohttp # aiormq -zipp==3.4.1 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/tests/swarm-deploy/requirements/_tools.txt b/tests/swarm-deploy/requirements/_tools.txt index e0242337819..a88f30f60b8 100644 --- a/tests/swarm-deploy/requirements/_tools.txt +++ b/tests/swarm-deploy/requirements/_tools.txt @@ -21,26 +21,12 @@ click==8.0.0 # -c requirements/_test.txt # black # pip-tools -dataclasses==0.8 - # via - # -c requirements/_test.txt - # black distlib==0.3.1 # via virtualenv filelock==3.0.12 # via virtualenv identify==2.2.4 # via pre-commit -importlib-metadata==4.0.1 - # via - # -c requirements/_test.txt - # pep517 - # pre-commit - # virtualenv -importlib-resources==5.1.3 - # via - # pre-commit - # virtualenv isort==5.8.0 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==0.4.3 @@ -73,23 +59,10 @@ toml==0.10.2 # black # pep517 # pre-commit -typed-ast==1.4.3 - # via black -typing-extensions==3.10.0.0 - # via - # -c requirements/_test.txt - # black - # importlib-metadata virtualenv==20.4.6 # via pre-commit watchdog[watchmedo]==2.1.1 # via -r requirements/_tools.in -zipp==3.4.1 - # via - # -c requirements/_test.txt - # importlib-metadata - # importlib-resources - # pep517 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/tests/swarm-deploy/test_swarm_runs.py b/tests/swarm-deploy/test_swarm_runs.py index 3df58820fc1..836179141d7 100644 --- a/tests/swarm-deploy/test_swarm_runs.py +++ b/tests/swarm-deploy/test_swarm_runs.py @@ -81,17 +81,17 @@ def test_all_services_up( [ "api-server", "catalog", - "director", "director-v2", + "director", "migration", - "sidecar", - "storage", - "webserver", - "static-webserver", - "rabbit", "postgres", + "rabbit", "redis", + "sidecar", + "static-webserver", + "storage", "traefik", + "webserver", "whoami", ], )