diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index ae1464b7648..ee8a923d677 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -249,6 +249,7 @@ jobs: with: name: unit_sidecar_coverage path: codeclimate.unit_sidecar_coverage.json + unit-test-frontend: name: Unit-testing frontend runs-on: ${{ matrix.os }} @@ -320,6 +321,53 @@ jobs: - name: test run: ./ci/github/unit-testing/python-linting.bash test + unit-test-postgres-database: + name: Unit-testing postgres-database + runs-on: ${{ matrix.os }} + strategy: + matrix: + python: [3.6] + os: [ubuntu-20.04] + fail-fast: false + steps: + - uses: actions/checkout@v2 + - name: setup docker + run: | + sudo ./ci/github/helpers/setup_docker_compose.bash + ./ci/github/helpers/setup_docker_experimental.bash + ./ci/github/helpers/setup_docker_buildx.bash + echo ::set-env name=DOCKER_BUILDX::1 + - name: setup python environment + uses: actions/setup-python@v1.1.1 + with: + python-version: ${{ matrix.python }} + - name: show system version + run: ./ci/helpers/show_system_versions.bash + - uses: actions/cache@v1 + name: getting cached data + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: install + run: ./ci/github/unit-testing/postgres-database.bash install + - name: test + run: ./ci/github/unit-testing/postgres-database.bash test + - uses: codecov/codecov-action@v1 + with: + flags: unittests #optional + - name: prepare codeclimate coverage file + run: | + curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter + chmod +x ./cc-test-reporter + ./cc-test-reporter format-coverage -t coverage.py -o codeclimate.unit_postgresdb_coverage.json coverage.xml + - name: upload codeclimate coverage + uses: actions/upload-artifact@v2 + with: + name: unit_postgresdb_coverage + path: codeclimate.unit_postgresdb_coverage.json + unit-test-service-library: name: Unit-testing service-library runs-on: ${{ matrix.os }} diff --git a/api/tests/requirements.txt b/api/tests/requirements.txt index d37936ec15c..2aebb922871 100644 --- a/api/tests/requirements.txt +++ b/api/tests/requirements.txt @@ -6,7 +6,7 @@ # aiohttp==3.6.2 # via -r requirements.in, pytest-aiohttp async-timeout==3.0.1 # via aiohttp -attrs==19.3.0 # via aiohttp, jsonschema, openapi-core, pytest +attrs==20.1.0 # via aiohttp, jsonschema, openapi-core, pytest chardet==3.0.4 # via aiohttp coverage==5.2.1 # via -r requirements.in, pytest-cov idna-ssl==1.1.0 # via aiohttp @@ -37,7 +37,7 @@ six==1.15.0 # via isodate, jsonschema, openapi-core, openapi-schem strict-rfc3339==0.7 # via openapi-schema-validator termcolor==1.1.0 # via pytest-sugar toml==0.10.1 # via pytest -typing-extensions==3.7.4.2 # via aiohttp, yarl +typing-extensions==3.7.4.3 # via aiohttp, yarl werkzeug==1.0.1 # via openapi-core yarl==1.5.1 # via aiohttp zipp==3.1.0 # via importlib-metadata diff --git a/ci/github/unit-testing/postgres-database.bash b/ci/github/unit-testing/postgres-database.bash new file mode 100755 index 00000000000..0131a28bda3 --- /dev/null +++ b/ci/github/unit-testing/postgres-database.bash @@ -0,0 +1,34 @@ +#!/bin/bash +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ +set -euo pipefail +IFS=$'\n\t' + +install() { + bash ci/helpers/ensure_python_pip.bash + pushd packages/postgres-database; pip3 install -r requirements/ci.txt; popd; + pip list -v +} + +test() { + pytest \ + --color=yes \ + --durations=10 \ + --cov=simcore_postgres_database \ + --cov-append \ + --cov-report=term-missing \ + --cov-report=xml \ + --cov-config=.coveragerc \ + --verbose \ + packages/postgres-database/tests +} + +# Check if the function exists (bash specific) +if declare -f "$1" > /dev/null +then + # call arguments verbatim + "$@" +else + # Show a helpful error + echo "'$1' is not a known function name" >&2 + exit 1 +fi diff --git a/packages/postgres-database/requirements/_base.txt b/packages/postgres-database/requirements/_base.txt index 4216259f44e..a1a556ce21b 100644 --- a/packages/postgres-database/requirements/_base.txt +++ b/packages/postgres-database/requirements/_base.txt @@ -7,6 +7,6 @@ idna==2.10 # via yarl multidict==4.7.6 # via yarl psycopg2-binary==2.8.5 # via sqlalchemy -sqlalchemy[postgresql_psycopg2binary]==1.3.18 # via -r requirements/_base.in -typing-extensions==3.7.4.2 # via yarl +sqlalchemy[postgresql_psycopg2binary]==1.3.19 # via -r requirements/_base.in +typing-extensions==3.7.4.3 # via yarl yarl==1.5.1 # via -r requirements/_base.in diff --git a/packages/postgres-database/requirements/_migration.txt b/packages/postgres-database/requirements/_migration.txt index 569481efe01..99ec2763727 100644 --- a/packages/postgres-database/requirements/_migration.txt +++ b/packages/postgres-database/requirements/_migration.txt @@ -18,9 +18,9 @@ python-dateutil==2.8.1 # via alembic python-editor==1.0.4 # via alembic requests==2.24.0 # via docker six==1.15.0 # via docker, python-dateutil, tenacity, websocket-client -sqlalchemy[postgresql_psycopg2binary]==1.3.18 # via -r requirements/_base.txt, alembic +sqlalchemy[postgresql_psycopg2binary]==1.3.19 # via -r requirements/_base.txt, alembic tenacity==6.2.0 # via -r requirements/_migration.in -typing-extensions==3.7.4.2 # via -r requirements/_base.txt +typing-extensions==3.7.4.3 # via -r requirements/_base.txt, yarl urllib3==1.25.10 # via -r requirements/_migration.in, requests websocket-client==0.57.0 # via docker yarl==1.5.1 # via -r requirements/_base.txt diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt index 48712460fdc..192f4f0a0de 100644 --- a/packages/postgres-database/requirements/_test.txt +++ b/packages/postgres-database/requirements/_test.txt @@ -25,8 +25,10 @@ docker[ssh]==4.3.1 # via -r requirements/_migration.txt, docker-compose dockerpty==0.4.1 # via docker-compose docopt==0.6.2 # via coveralls, docker-compose faker==4.1.2 # via -r requirements/_test.in -idna==2.10 # via -r requirements/_migration.txt, requests, yarl -isort==4.3.21 # via pylint +idna-ssl==1.1.0 # via aiohttp +idna==2.10 # via -r requirements/_migration.txt, idna-ssl, requests, yarl +importlib-metadata==1.7.0 # via jsonschema, pluggy, pytest +isort==5.4.2 # via pylint jsonschema==3.2.0 # via docker-compose lazy-object-proxy==1.4.3 # via astroid mako==1.1.3 # via -r requirements/_migration.txt, alembic @@ -40,7 +42,7 @@ pluggy==0.13.1 # via pytest psycopg2-binary==2.8.5 # via -r requirements/_migration.txt, aiopg, sqlalchemy py==1.9.0 # via pytest pycparser==2.20 # via cffi -pylint==2.5.3 # via -r requirements/_test.in +pylint==2.6.0 # via -r requirements/_test.in pynacl==1.4.0 # via paramiko pyparsing==2.4.7 # via packaging pyrsistent==0.16.0 # via jsonschema @@ -56,17 +58,19 @@ python-editor==1.0.4 # via -r requirements/_migration.txt, alembic pyyaml==5.3.1 # via -r requirements/_test.in, docker-compose requests==2.24.0 # via -r requirements/_migration.txt, coveralls, docker, docker-compose six==1.15.0 # via -r requirements/_migration.txt, astroid, bcrypt, cryptography, docker, docker-compose, dockerpty, jsonschema, packaging, pynacl, pyrsistent, python-dateutil, tenacity, websocket-client -sqlalchemy[postgresql_psycopg2binary]==1.3.18 # via -r requirements/_migration.txt, aiopg, alembic +sqlalchemy[postgresql_psycopg2binary]==1.3.19 # via -r requirements/_migration.txt, aiopg, alembic tenacity==6.2.0 # via -r requirements/_migration.txt text-unidecode==1.3 # via faker texttable==1.6.2 # via docker-compose toml==0.10.1 # via pylint -typing-extensions==3.7.4.2 # via -r requirements/_migration.txt +typed-ast==1.4.1 # via astroid +typing-extensions==3.7.4.3 # via -r requirements/_migration.txt, aiohttp, yarl urllib3==1.25.10 # via -r requirements/_migration.txt, requests wcwidth==0.2.5 # via pytest websocket-client==0.57.0 # via -r requirements/_migration.txt, docker, docker-compose wrapt==1.12.1 # via astroid yarl==1.5.1 # via -r requirements/_migration.txt, aiohttp +zipp==3.1.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/packages/postgres-database/requirements/ci.txt b/packages/postgres-database/requirements/ci.txt index 8a60c977a55..8d7d91f343f 100644 --- a/packages/postgres-database/requirements/ci.txt +++ b/packages/postgres-database/requirements/ci.txt @@ -9,5 +9,8 @@ # installs base + tests requirements -r _test.txt +# installs this repo's packages +-e ../../packages/pytest-simcore/ + # current module -. +.[migration] diff --git a/packages/postgres-database/tests/test_delete_projects_and_users.py b/packages/postgres-database/tests/test_delete_projects_and_users.py index c01fb8c8ee5..b9a33bcb11d 100644 --- a/packages/postgres-database/tests/test_delete_projects_and_users.py +++ b/packages/postgres-database/tests/test_delete_projects_and_users.py @@ -16,29 +16,27 @@ @pytest.fixture -def engine(make_engine, loop): - async def start(): - engine = await make_engine() - sync_engine = make_engine(False) - metadata.drop_all(sync_engine) - metadata.create_all(sync_engine) - - async with engine.acquire() as conn: - await conn.execute(users.insert().values(**random_user(name="A"))) - await conn.execute(users.insert().values(**random_user())) - await conn.execute(users.insert().values(**random_user())) - - await conn.execute(projects.insert().values(**random_project(prj_owner=1))) - await conn.execute(projects.insert().values(**random_project(prj_owner=2))) - await conn.execute(projects.insert().values(**random_project(prj_owner=3))) - with pytest.raises(ForeignKeyViolation): - await conn.execute( - projects.insert().values(**random_project(prj_owner=4)) - ) - - return engine - - return loop.run_until_complete(start()) +async def engine(make_engine, loop): + engine = await make_engine() + sync_engine = make_engine(False) + metadata.drop_all(sync_engine) + metadata.create_all(sync_engine) + + async with engine.acquire() as conn: + await conn.execute(users.insert().values(**random_user(name="A"))) + await conn.execute(users.insert().values(**random_user())) + await conn.execute(users.insert().values(**random_user())) + + await conn.execute(projects.insert().values(**random_project(prj_owner=1))) + await conn.execute(projects.insert().values(**random_project(prj_owner=2))) + await conn.execute(projects.insert().values(**random_project(prj_owner=3))) + with pytest.raises(ForeignKeyViolation): + await conn.execute(projects.insert().values(**random_project(prj_owner=4))) + + yield engine + + engine.close() + await engine.wait_closed() @pytest.mark.skip(reason="sandbox for dev purposes") diff --git a/packages/postgres-database/tests/test_uniqueness_in_comp_tasks.py b/packages/postgres-database/tests/test_uniqueness_in_comp_tasks.py index 11329e1425e..897aa300c72 100644 --- a/packages/postgres-database/tests/test_uniqueness_in_comp_tasks.py +++ b/packages/postgres-database/tests/test_uniqueness_in_comp_tasks.py @@ -15,24 +15,26 @@ @pytest.fixture -def engine(make_engine, loop): - async def start(): - engine = await make_engine() - sync_engine = make_engine(False) - metadata.drop_all(sync_engine) - metadata.create_all(sync_engine) - - async with engine.acquire() as conn: - await conn.execute( - comp_pipeline.insert().values(**fake_pipeline(project_id="PA")) - ) - await conn.execute( - comp_pipeline.insert().values(**fake_pipeline(project_id="PB")) - ) +async def engine(loop, make_engine): + + engine = await make_engine() + sync_engine = make_engine(False) + metadata.drop_all(sync_engine) + metadata.create_all(sync_engine) + + async with engine.acquire() as conn: + await conn.execute( + comp_pipeline.insert().values(**fake_pipeline(project_id="PA")) + ) + await conn.execute( + comp_pipeline.insert().values(**fake_pipeline(project_id="PB")) + ) + + yield engine - return engine + engine.close() + await engine.wait_closed() - return loop.run_until_complete(start()) async def test_unique_project_node_pairs(engine): diff --git a/packages/s3wrapper/requirements/_test.txt b/packages/s3wrapper/requirements/_test.txt index 28f7cf417ae..c61a4bfae31 100644 --- a/packages/s3wrapper/requirements/_test.txt +++ b/packages/s3wrapper/requirements/_test.txt @@ -17,12 +17,12 @@ coveralls==2.1.2 # via -r requirements/_test.in cryptography==3.0 # via paramiko distro==1.5.0 # via docker-compose docker-compose==1.26.2 # via pytest-docker -docker[ssh]==4.3.0 # via docker-compose +docker[ssh]==4.3.1 # via docker-compose dockerpty==0.4.1 # via docker-compose docopt==0.6.2 # via coveralls, docker-compose idna==2.10 # via requests importlib-metadata==1.7.0 # via jsonschema, pluggy, pytest -isort==4.3.21 # via pylint +isort==5.4.2 # via pylint jsonschema==3.2.0 # via docker-compose lazy-object-proxy==1.4.3 # via astroid mccabe==0.6.1 # via pylint @@ -33,7 +33,7 @@ paramiko==2.7.1 # via docker pluggy==0.13.1 # via pytest py==1.9.0 # via pytest pycparser==2.20 # via cffi -pylint==2.5.3 # via -r requirements/_test.in +pylint==2.6.0 # via -r requirements/_test.in pynacl==1.4.0 # via paramiko pyparsing==2.4.7 # via packaging pyrsistent==0.16.0 # via jsonschema diff --git a/packages/service-library/requirements/_base.in b/packages/service-library/requirements/_base.in index 78d7c098b63..f0f3f07d9f6 100644 --- a/packages/service-library/requirements/_base.in +++ b/packages/service-library/requirements/_base.in @@ -16,6 +16,6 @@ werkzeug jsonschema prometheus_client tenacity -attrs +attrs<20,>=19 # from pytest-docker==0.8.0 trafaret aiodebug diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt index 0ec9dfdecfa..3a40ca6e357 100644 --- a/packages/service-library/requirements/_base.txt +++ b/packages/service-library/requirements/_base.txt @@ -25,11 +25,11 @@ psycopg2-binary==2.8.5 # via -r requirements/_base.in, aiopg, sqlalchemy pyrsistent==0.16.0 # via jsonschema pyyaml==5.3.1 # via -r requirements/_base.in, openapi-spec-validator six==1.15.0 # via isodate, jsonschema, openapi-core, openapi-spec-validator, pyrsistent, tenacity -sqlalchemy[postgresql_psycopg2binary]==1.3.18 # via -r requirements/_base.in, aiopg +sqlalchemy[postgresql_psycopg2binary]==1.3.19 # via -r requirements/_base.in, aiopg strict-rfc3339==0.7 # via openapi-core tenacity==6.2.0 # via -r requirements/_base.in trafaret==2.0.2 # via -r requirements/_base.in -typing-extensions==3.7.4.2 # via aiohttp, yarl +typing-extensions==3.7.4.3 # via aiohttp, yarl ujson==3.1.0 # via -r requirements/_base.in werkzeug==1.0.1 # via -r requirements/_base.in yarl==1.5.1 # via aiohttp diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt index 86c1745988e..255c680d834 100644 --- a/packages/service-library/requirements/_test.txt +++ b/packages/service-library/requirements/_test.txt @@ -21,14 +21,14 @@ coveralls==2.1.2 # via -r requirements/_test.in cryptography==3.0 # via paramiko distro==1.5.0 # via docker-compose docker-compose==1.26.2 # via pytest-docker -docker[ssh]==4.3.0 # via docker-compose +docker[ssh]==4.3.1 # via docker-compose dockerpty==0.4.1 # via docker-compose docopt==0.6.2 # via coveralls, docker-compose idna-ssl==1.1.0 # via -r requirements/_base.txt, aiohttp idna==2.10 # via -r requirements/_base.txt, idna-ssl, requests, yarl importlib-metadata==1.7.0 # via -r requirements/_base.txt, jsonschema, pluggy, pytest isodate==0.6.0 # via -r requirements/_base.txt, openapi-core -isort==4.3.21 # via pylint +isort==5.4.2 # via pylint jsonschema==3.2.0 # via -r requirements/_base.txt, docker-compose, openapi-spec-validator lazy-object-proxy==1.4.3 # via -r requirements/_base.txt, astroid, openapi-core mccabe==0.6.1 # via pylint @@ -43,7 +43,7 @@ prometheus-client==0.8.0 # via -r requirements/_base.txt psycopg2-binary==2.8.5 # via -r requirements/_base.txt, aiopg, sqlalchemy py==1.9.0 # via pytest pycparser==2.20 # via cffi -pylint==2.5.3 # via -r requirements/_test.in +pylint==2.6.0 # via -r requirements/_test.in pynacl==1.4.0 # via paramiko pyparsing==2.4.7 # via packaging pyrsistent==0.16.0 # via -r requirements/_base.txt, jsonschema @@ -51,7 +51,7 @@ pytest-aiohttp==0.3.0 # via -r requirements/_test.in pytest-cov==2.10.1 # via -r requirements/_test.in pytest-docker==0.8.0 # via -r requirements/_test.in pytest-instafail==0.4.2 # via -r requirements/_test.in -pytest-mock==3.2.0 # via -r requirements/_test.in +pytest-mock==3.3.0 # via -r requirements/_test.in pytest-runner==5.2 # via -r requirements/_test.in pytest-sugar==0.9.4 # via -r requirements/_test.in pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-docker, pytest-instafail, pytest-mock, pytest-sugar @@ -59,7 +59,7 @@ python-dotenv==0.14.0 # via docker-compose pyyaml==5.3.1 # via -r requirements/_base.txt, docker-compose, openapi-spec-validator requests==2.24.0 # via coveralls, docker, docker-compose six==1.15.0 # via -r requirements/_base.txt, astroid, bcrypt, cryptography, docker, docker-compose, dockerpty, isodate, jsonschema, openapi-core, openapi-spec-validator, packaging, pynacl, pyrsistent, tenacity, websocket-client -sqlalchemy[postgresql_psycopg2binary]==1.3.18 # via -r requirements/_base.txt, aiopg +sqlalchemy[postgresql_psycopg2binary]==1.3.19 # via -r requirements/_base.txt, aiopg strict-rfc3339==0.7 # via -r requirements/_base.txt, openapi-core tenacity==6.2.0 # via -r requirements/_base.txt termcolor==1.1.0 # via pytest-sugar @@ -67,7 +67,7 @@ texttable==1.6.2 # via docker-compose toml==0.10.1 # via pylint trafaret==2.0.2 # via -r requirements/_base.txt typed-ast==1.4.1 # via astroid -typing-extensions==3.7.4.2 # via -r requirements/_base.txt, aiohttp, yarl +typing-extensions==3.7.4.3 # via -r requirements/_base.txt, aiohttp, yarl ujson==3.1.0 # via -r requirements/_base.txt urllib3==1.25.10 # via requests wcwidth==0.2.5 # via pytest diff --git a/packages/service-library/src/servicelib/application_setup.py b/packages/service-library/src/servicelib/application_setup.py index d677e7359bc..2b75d859222 100644 --- a/packages/service-library/src/servicelib/application_setup.py +++ b/packages/service-library/src/servicelib/application_setup.py @@ -2,7 +2,7 @@ import inspect import logging from enum import Enum -from typing import Dict, List, Optional, Callable +from typing import Callable, Dict, List, Optional from aiohttp import web @@ -45,7 +45,7 @@ def app_module_setup( See packages/service-library/tests/test_application_setup.py - :param module_name: typically __name__ + :param module_name: typically __name__ :param depends: list of module_names that must be called first, defaults to None :param config_section: explicit configuration section, defaults to None (i.e. the name of the module, or last entry of the name if dotted) :param config_enabled: option in config to enable, defaults to None which is '$(module-section).enabled' (config_section and config_enabled are mutually exclusive) @@ -123,7 +123,7 @@ def _get(cfg_, parts): except KeyError as ee: raise ApplicationSetupError( f"Cannot find required option '{config_enabled}' in app config's section '{ee}'" - ) + ) from ee if not is_enabled: logger.info( diff --git a/packages/service-library/src/servicelib/incidents.py b/packages/service-library/src/servicelib/incidents.py index f5feba8c10a..dfab3e0e109 100644 --- a/packages/service-library/src/servicelib/incidents.py +++ b/packages/service-library/src/servicelib/incidents.py @@ -2,7 +2,6 @@ import attr - # UTILS --- ItemT = TypeVar("ItemT") @@ -26,12 +25,16 @@ class LimitedOrderedStack(Generic[ItemT]): _items: List[ItemT] = attr.ib(init=False, default=attr.Factory(list)) _hits: int = attr.ib(init=False, default=0) - def __len__(self): + def __len__(self) -> int: # called also for __bool__ return len(self._items) + def clear(self) -> None: + self._items.clear() + self._hits = 0 + @property - def hits(self): + def hits(self) -> int: return self._hits @property diff --git a/packages/service-library/src/servicelib/monitoring.py b/packages/service-library/src/servicelib/monitoring.py index 848dc341fb9..72a65b4dfe9 100644 --- a/packages/service-library/src/servicelib/monitoring.py +++ b/packages/service-library/src/servicelib/monitoring.py @@ -16,7 +16,6 @@ from aiohttp import web from prometheus_client import CONTENT_TYPE_LATEST, Counter, Gauge, Histogram - log = logging.getLogger(__name__) diff --git a/packages/service-library/src/servicelib/resources.py b/packages/service-library/src/servicelib/resources.py index b382ea2860d..cb4f42eaa67 100644 --- a/packages/service-library/src/servicelib/resources.py +++ b/packages/service-library/src/servicelib/resources.py @@ -3,10 +3,11 @@ See https://setuptools.readthedocs.io/en/latest/pkg_resources.html """ import pathlib -import pkg_resources from pathlib import Path from typing import TextIO + import attr +import pkg_resources @attr.s(frozen=True, auto_attribs=True) diff --git a/packages/service-library/src/servicelib/rest_codecs.py b/packages/service-library/src/servicelib/rest_codecs.py index 4e6effba714..d1ac706431e 100644 --- a/packages/service-library/src/servicelib/rest_codecs.py +++ b/packages/service-library/src/servicelib/rest_codecs.py @@ -1,9 +1,10 @@ """ rest - json data encoders/decodes """ -import attr import json +import attr + class DataEncoder(json.JSONEncoder): """ diff --git a/packages/service-library/src/servicelib/rest_models.py b/packages/service-library/src/servicelib/rest_models.py index a83abf15329..45739be596a 100644 --- a/packages/service-library/src/servicelib/rest_models.py +++ b/packages/service-library/src/servicelib/rest_models.py @@ -2,10 +2,10 @@ UNDER DEVELOPMENT """ -import attr import typing import warnings +import attr warnings.warn("DO NOT USE IN PRODUCTION, STILL UNDER DEVELOPMENT") diff --git a/packages/service-library/src/servicelib/rest_oas.py b/packages/service-library/src/servicelib/rest_oas.py index eb51bfbf53e..4cddc98a664 100644 --- a/packages/service-library/src/servicelib/rest_oas.py +++ b/packages/service-library/src/servicelib/rest_oas.py @@ -4,11 +4,10 @@ """ from aiohttp import web - from openapi_core.schema.specs.models import Spec -from .openapi import create_specs from .application_keys import APP_OPENAPI_SPECS_KEY +from .openapi import create_specs def set_specs(app: web.Application, specs: Spec) -> Spec: diff --git a/packages/service-library/src/servicelib/rest_responses.py b/packages/service-library/src/servicelib/rest_responses.py index 5282887cfe8..00a35e56972 100644 --- a/packages/service-library/src/servicelib/rest_responses.py +++ b/packages/service-library/src/servicelib/rest_responses.py @@ -1,14 +1,13 @@ """ Utils to check, convert and compose server responses for the RESTApi """ -from typing import Dict, Mapping, Tuple, Optional, List +from typing import Dict, List, Mapping, Optional, Tuple import attr from aiohttp import web -from .rest_models import LogMessageType -from .rest_codecs import jsonify, json -from .rest_models import ErrorItemType, ErrorType +from .rest_codecs import json, jsonify +from .rest_models import ErrorItemType, ErrorType, LogMessageType ENVELOPE_KEYS = ("data", "error") JSON_CONTENT_TYPE = "application/json" diff --git a/packages/service-library/src/servicelib/utils.py b/packages/service-library/src/servicelib/utils.py index 150452576cb..213cea22b6a 100644 --- a/packages/service-library/src/servicelib/utils.py +++ b/packages/service-library/src/servicelib/utils.py @@ -66,22 +66,38 @@ async def logged_gather( *tasks, reraise: bool = True, log: logging.Logger = logger ) -> List[Any]: """ - *all* coroutine passed are executed concurrently and once they are all - completed, the first error (if any) is reraised or all returned - - log: passing the logger gives a chance to identify the origin of the gather call + Thin wrapper around asyncio.gather that allows excuting ALL tasks concurently until the end + even if any of them fail. Finally, all errors are logged and the first raised (if reraise=True) + as asyncio.gather would do with return_exceptions=True + + WARNING: Notice that not stopping after the first exception is raised, adds the + risk that some tasks might terminate with unhandled exceptions. To avoid this + use directly asyncio.gather(*tasks, return_exceptions=True). + + :param reraise: reraises first exception (in order the tasks were passed) concurrent tasks, defaults to True + :type reraise: bool, optional + :param log: passing the logger gives a chance to identify the origin of the gather call, defaults to current submodule's logger + :type log: logging.Logger, optional + :return: list of tasks results and errors e.g. [1, 2, ValueError("task3 went wrong"), 33, "foo"] + :rtype: List[Any] """ results = await asyncio.gather(*tasks, return_exceptions=True) - for value in results: - # WARN: note that ONLY THE FIRST exception is raised + + error = None + for i, value in enumerate(results): if isinstance(value, Exception): - if reraise: - raise value - # Exception is returned, therefore it is not logged as error but as warning - # It was user's decision not to reraise them log.warning( - "Exception occured while running task %s in gather: %s", - str(tasks[results.index(value)]), + "Error in %i-th concurrent task %s: %s", + i + 1, + str(tasks[i]), str(value), ) + if not error: + error = value + + if reraise and error: + # WARNING: Notice that ONLY THE FIRST exception is raised. + # The rest is all logged above. + raise error + return results diff --git a/packages/service-library/tests/conftest.py b/packages/service-library/tests/conftest.py index 1c51329b588..d1d3e0e9400 100644 --- a/packages/service-library/tests/conftest.py +++ b/packages/service-library/tests/conftest.py @@ -5,12 +5,12 @@ import sys from pathlib import Path +import pytest import yaml + import servicelib from servicelib.openapi import create_openapi_specs -import pytest - @pytest.fixture(scope="session") def here(): diff --git a/packages/service-library/tests/test_application_setup.py b/packages/service-library/tests/test_application_setup.py index 8abd9ffe404..b6b360f27e1 100644 --- a/packages/service-library/tests/test_application_setup.py +++ b/packages/service-library/tests/test_application_setup.py @@ -1,6 +1,7 @@ # pylint:disable=unused-variable # pylint:disable=unused-argument # pylint:disable=redefined-outer-name + import logging from typing import Dict diff --git a/packages/service-library/tests/test_decorators.py b/packages/service-library/tests/test_decorators.py index a6a6cae3a2b..dcdf359af4a 100644 --- a/packages/service-library/tests/test_decorators.py +++ b/packages/service-library/tests/test_decorators.py @@ -1,3 +1,7 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + from servicelib.decorators import safe_return diff --git a/packages/service-library/tests/test_incidents_monitoring.py b/packages/service-library/tests/test_incidents_monitoring.py index 3cfe4b9400e..3cbc39004f8 100644 --- a/packages/service-library/tests/test_incidents_monitoring.py +++ b/packages/service-library/tests/test_incidents_monitoring.py @@ -7,12 +7,12 @@ import pytest +from servicelib import monitor_slow_callbacks from servicelib.aiopg_utils import ( DatabaseError, postgres_service_retry_policy_kwargs, retry, ) -from servicelib import monitor_slow_callbacks async def slow_task(delay): diff --git a/packages/service-library/tests/test_openapi_validation.py b/packages/service-library/tests/test_openapi_validation.py index 9d53d216eba..706df603309 100644 --- a/packages/service-library/tests/test_openapi_validation.py +++ b/packages/service-library/tests/test_openapi_validation.py @@ -3,8 +3,9 @@ SEE https://github.com/p1c2u/openapi-core """ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name import pytest from aiohttp import web diff --git a/packages/service-library/tests/test_package.py b/packages/service-library/tests/test_package.py index ee953ced402..ddec9e5b7ee 100644 --- a/packages/service-library/tests/test_package.py +++ b/packages/service-library/tests/test_package.py @@ -7,8 +7,9 @@ from pathlib import Path import pytest -from servicelib.utils import is_osparc_repo_dir, search_osparc_repo_dir + from pytest_simcore.helpers.utils_pylint import assert_pylint_is_passing +from servicelib.utils import is_osparc_repo_dir, search_osparc_repo_dir @pytest.fixture diff --git a/packages/service-library/tests/test_rest_middlewares.py b/packages/service-library/tests/test_rest_middlewares.py index 180b295071a..4e33964e72c 100644 --- a/packages/service-library/tests/test_rest_middlewares.py +++ b/packages/service-library/tests/test_rest_middlewares.py @@ -1,8 +1,10 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name import pytest from aiohttp import web + from servicelib import openapi from servicelib.application_keys import APP_OPENAPI_SPECS_KEY from servicelib.rest_middlewares import ( diff --git a/packages/service-library/tests/test_rest_routing.py b/packages/service-library/tests/test_rest_routing.py index fb4bc5e5f86..3d0b163a1de 100644 --- a/packages/service-library/tests/test_rest_routing.py +++ b/packages/service-library/tests/test_rest_routing.py @@ -1,5 +1,6 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name import pytest diff --git a/packages/service-library/tests/test_sandbox.py b/packages/service-library/tests/test_sandbox.py index 18d0f9ae519..734bb477e80 100644 --- a/packages/service-library/tests/test_sandbox.py +++ b/packages/service-library/tests/test_sandbox.py @@ -1,6 +1,7 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=W0212 +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + import pytest from servicelib import openapi diff --git a/packages/service-library/tests/test_utils.py b/packages/service-library/tests/test_utils.py new file mode 100644 index 00000000000..a903f98ef83 --- /dev/null +++ b/packages/service-library/tests/test_utils.py @@ -0,0 +1,86 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + +import asyncio + +import pytest + +from servicelib.utils import logged_gather + + +async def _value_error(uid, *, delay=1): + await _succeed(delay) + raise ValueError(f"task#{uid}") + + +async def _runtime_error(uid, *, delay=1): + await _succeed(delay) + raise RuntimeError(f"task#{uid}") + + +async def _succeed(uid, *, delay=1): + print(f"task#{uid} begin") + await asyncio.sleep(delay) + print(f"task#{uid} end") + return uid + + +@pytest.fixture +def coros(): + coros = [ + _succeed(0), + _value_error(1, delay=2), + _succeed(2), + _runtime_error(3), + _value_error(4, delay=0), + _succeed(5), + ] + return coros + + +@pytest.fixture +def mock_logger(mocker): + mock_logger = mocker.Mock() + + yield mock_logger + + assert mock_logger.mock_calls + mock_logger.warning.assert_called() + assert ( + len(mock_logger.warning.mock_calls) == 3 + ), "Expected all 3 errors ALWAYS logged as warnings" + + +async def test_logged_gather(loop, coros, mock_logger): + + with pytest.raises(ValueError) as excinfo: + await logged_gather(*coros, reraise=True, log=mock_logger) + + # NOTE: #4 fails first, the one raised in #1 + assert "task#1" in str(excinfo.value) + + # NOTE: only first error in the list is raised, since it is not RuntimeError, that task + assert isinstance(excinfo.value, ValueError) + + for task in asyncio.Task.all_tasks(loop): + if task is not asyncio.Task.current_task(): + # info + task.print_stack() + + if task.exception(): + assert type(task.exception()) in [ValueError, RuntimeError] + + assert task.done() + assert not task.cancelled() + + +async def test_logged_gather_wo_raising(coros, mock_logger): + results = await logged_gather(*coros, reraise=False, log=mock_logger) + + assert results[0] == 0 + assert isinstance(results[1], ValueError) + assert results[2] == 2 + assert isinstance(results[3], RuntimeError) + assert isinstance(results[4], ValueError) + assert results[5] == 5 diff --git a/packages/service-library/tests/tutils.py b/packages/service-library/tests/tutils.py index 29a2165e2d4..bce55348625 100644 --- a/packages/service-library/tests/tutils.py +++ b/packages/service-library/tests/tutils.py @@ -6,6 +6,7 @@ import attr from aiohttp import web + from servicelib.rest_codecs import DataEncoder diff --git a/packages/service-library/tests/with_postgres/test_aiopg_utils.py b/packages/service-library/tests/with_postgres/test_aiopg_utils.py index 90058be0c45..fac66209d9e 100644 --- a/packages/service-library/tests/with_postgres/test_aiopg_utils.py +++ b/packages/service-library/tests/with_postgres/test_aiopg_utils.py @@ -3,6 +3,7 @@ # pylint:disable=too-many-arguments # pylint:disable=broad-except +import asyncio import logging import sys from copy import deepcopy @@ -13,7 +14,7 @@ import pytest import sqlalchemy as sa from aiohttp import web -import asyncio + from servicelib.aiopg_utils import ( DatabaseError, DataSourceName, diff --git a/packages/simcore-sdk/requirements/_base.in b/packages/simcore-sdk/requirements/_base.in index 60d58bd0653..27e14d59700 100644 --- a/packages/simcore-sdk/requirements/_base.in +++ b/packages/simcore-sdk/requirements/_base.in @@ -9,3 +9,5 @@ psycopg2-binary pydantic tenacity trafaret-config + +attrs<20,>=19 # from pytest-docker==0.8.0 diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt index 3b68472699a..345b0a7d24f 100644 --- a/packages/simcore-sdk/requirements/_base.txt +++ b/packages/simcore-sdk/requirements/_base.txt @@ -8,21 +8,21 @@ aiofiles==0.5.0 # via -r requirements/_base.in aiohttp==3.6.2 # via -r requirements/_base.in aiopg[sa]==1.0.0 # via -r requirements/_base.in async-timeout==3.0.1 # via aiohttp -attrs==19.3.0 # via aiohttp +attrs==19.3.0 # via -r requirements/_base.in, aiohttp chardet==3.0.4 # via aiohttp dataclasses==0.7 # via pydantic decorator==4.4.2 # via networkx idna-ssl==1.1.0 # via aiohttp idna==2.10 # via idna-ssl, yarl multidict==4.7.6 # via aiohttp, yarl -networkx==2.4 # via -r requirements/_base.in +networkx==2.5 # via -r requirements/_base.in psycopg2-binary==2.8.5 # via -r requirements/_base.in, aiopg, sqlalchemy pydantic==1.6.1 # via -r requirements/_base.in pyyaml==5.3.1 # via trafaret-config six==1.15.0 # via tenacity -sqlalchemy[postgresql_psycopg2binary]==1.3.18 # via aiopg +sqlalchemy[postgresql_psycopg2binary]==1.3.19 # via aiopg tenacity==6.2.0 # via -r requirements/_base.in trafaret-config==2.0.2 # via -r requirements/_base.in trafaret==2.0.2 # via trafaret-config -typing-extensions==3.7.4.2 # via aiohttp, yarl +typing-extensions==3.7.4.3 # via aiohttp, yarl yarl==1.5.1 # via aiohttp diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt index 0e88dea3e5b..303a2b53524 100644 --- a/packages/simcore-sdk/requirements/_test.txt +++ b/packages/simcore-sdk/requirements/_test.txt @@ -22,20 +22,20 @@ dataclasses==0.7 # via -r requirements/_base.txt, pydantic decorator==4.4.2 # via -r requirements/_base.txt, networkx distro==1.5.0 # via docker-compose docker-compose==1.26.2 # via pytest-docker -docker[ssh]==4.3.0 # via -r requirements/_test.in, docker-compose +docker[ssh]==4.3.1 # via -r requirements/_test.in, docker-compose dockerpty==0.4.1 # via docker-compose docopt==0.6.2 # via coveralls, docker-compose idna-ssl==1.1.0 # via -r requirements/_base.txt, aiohttp idna==2.10 # via -r requirements/_base.txt, idna-ssl, requests, yarl importlib-metadata==1.7.0 # via jsonschema, pluggy, pytest -isort==4.3.21 # via pylint +isort==5.4.2 # via pylint jsonschema==3.2.0 # via docker-compose lazy-object-proxy==1.4.3 # via astroid mccabe==0.6.1 # via pylint mock==4.0.2 # via -r requirements/_test.in more-itertools==8.4.0 # via pytest multidict==4.7.6 # via -r requirements/_base.txt, aiohttp, yarl -networkx==2.4 # via -r requirements/_base.txt +networkx==2.5 # via -r requirements/_base.txt packaging==20.4 # via pytest, pytest-sugar paramiko==2.7.1 # via docker pluggy==0.13.1 # via pytest @@ -43,7 +43,7 @@ psycopg2-binary==2.8.5 # via -r requirements/_base.txt, aiopg, sqlalchemy py==1.9.0 # via pytest pycparser==2.20 # via cffi pydantic==1.6.1 # via -r requirements/_base.txt -pylint==2.5.3 # via -r requirements/_test.in +pylint==2.6.0 # via -r requirements/_test.in pynacl==1.4.0 # via paramiko pyparsing==2.4.7 # via packaging pyrsistent==0.16.0 # via jsonschema @@ -51,7 +51,7 @@ pytest-aiohttp==0.3.0 # via -r requirements/_test.in pytest-cov==2.10.1 # via -r requirements/_test.in pytest-docker==0.8.0 # via -r requirements/_test.in pytest-instafail==0.4.2 # via -r requirements/_test.in -pytest-mock==3.2.0 # via -r requirements/_test.in +pytest-mock==3.3.0 # via -r requirements/_test.in pytest-runner==5.2 # via -r requirements/_test.in pytest-sugar==0.9.4 # via -r requirements/_test.in pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-docker, pytest-instafail, pytest-mock, pytest-sugar @@ -59,7 +59,7 @@ python-dotenv==0.14.0 # via -r requirements/_test.in, docker-compose pyyaml==5.3.1 # via -r requirements/_base.txt, docker-compose, trafaret-config requests==2.24.0 # via -r requirements/_test.in, coveralls, docker, docker-compose six==1.15.0 # via -r requirements/_base.txt, astroid, bcrypt, cryptography, docker, docker-compose, dockerpty, jsonschema, packaging, pynacl, pyrsistent, tenacity, websocket-client -sqlalchemy[postgresql_psycopg2binary]==1.3.18 # via -r requirements/_base.txt, aiopg +sqlalchemy[postgresql_psycopg2binary]==1.3.19 # via -r requirements/_base.txt, aiopg tenacity==6.2.0 # via -r requirements/_base.txt termcolor==1.1.0 # via pytest-sugar texttable==1.6.2 # via docker-compose @@ -67,7 +67,7 @@ toml==0.10.1 # via pylint trafaret-config==2.0.2 # via -r requirements/_base.txt trafaret==2.0.2 # via -r requirements/_base.txt, trafaret-config typed-ast==1.4.1 # via astroid -typing-extensions==3.7.4.2 # via -r requirements/_base.txt, aiohttp, yarl +typing-extensions==3.7.4.3 # via -r requirements/_base.txt, aiohttp, yarl urllib3==1.25.10 # via requests wcwidth==0.2.5 # via pytest websocket-client==0.57.0 # via docker, docker-compose diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports/_data_item.py b/packages/simcore-sdk/src/simcore_sdk/node_ports/_data_item.py index d73a46d53e1..906a8bd8b1c 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports/_data_item.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports/_data_item.py @@ -28,4 +28,4 @@ def __new__(cls, **kwargs): return self def __init__(self, **_kwargs): - super(DataItem, self).__init__() + super().__init__() diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports/exceptions.py b/packages/simcore-sdk/src/simcore_sdk/node_ports/exceptions.py index fe1d636ae22..c62e2a85d63 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports/exceptions.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports/exceptions.py @@ -5,17 +5,14 @@ class NodeportsException(Exception): """Basic exception for errors raised in nodeports""" def __init__(self, msg=None): - if msg is None: - msg = "An error occured in simcore" - super(NodeportsException, self).__init__(msg) + super().__init__(msg or "An error occured in simcore") class ReadOnlyError(NodeportsException): """Trying to modify read-only object""" def __init__(self, obj): - msg = "Trying to modify read-only object %s" % (obj) - super(ReadOnlyError, self).__init__(msg) + super().__init__(f"Trying to modify read-only object {obj}") self.obj = obj @@ -23,11 +20,9 @@ class WrongProtocolVersionError(NodeportsException): """Using wrong protocol version""" def __init__(self, expected_version, found_version): - msg = "Expecting version %s, found version %s" % ( - expected_version, - found_version, + super().__init__( + f"Expecting version {expected_version}, found version {found_version}" ) - super(WrongProtocolVersionError, self).__init__(msg) self.expected_version = expected_version self.found_version = found_version @@ -36,8 +31,7 @@ class UnboundPortError(NodeportsException, IndexError): """Accessed port is not configured""" def __init__(self, port_index, msg=None): - msg = "No port bound at index %s" % (port_index) - super(UnboundPortError, self).__init__(msg) + super().__init__(f"No port bound at index {port_index}") self.port_index = port_index @@ -45,8 +39,7 @@ class InvalidKeyError(NodeportsException): """Accessed key does not exist""" def __init__(self, item_key, msg=None): - msg = "No port bound with key %s" % (item_key) - super(InvalidKeyError, self).__init__(msg) + super().__init__("No port bound with key {item_key}") self.item_key = item_key @@ -54,11 +47,9 @@ class InvalidItemTypeError(NodeportsException): """Item type incorrect""" def __init__(self, item_type, item_value): - msg = "Invalid item type, %s is set as being a %s type" % ( - item_value, - item_type, + super().__init__( + "Invalid item type, {item_value} is set as being a {item_type} type" ) - super(InvalidItemTypeError, self).__init__(msg) self.item_type = item_type self.item_value = item_value @@ -67,40 +58,30 @@ class InvalidProtocolError(NodeportsException): """Invalid protocol used""" def __init__(self, dct, msg=None): - msg = "Invalid protocol used: %s\n%s" % (dct, msg) - super(InvalidProtocolError, self).__init__(msg) + super().__init__(f"Invalid protocol used: {dct}\n{msg}") self.dct = dct class StorageInvalidCall(NodeportsException): """S3 transfer error""" - def __init__(self, msg): - super(StorageInvalidCall, self).__init__(msg) - class StorageServerIssue(NodeportsException): """S3 transfer error""" - def __init__(self, msg): - super(StorageServerIssue, self).__init__(msg) - class S3TransferError(NodeportsException): """S3 transfer error""" def __init__(self, msg=None): - if not msg: - msg = "Error while transferring to/from S3 storage" - super(S3TransferError, self).__init__(msg) + super().__init__(msg or "Error while transferring to/from S3 storage") class S3InvalidPathError(NodeportsException): """S3 transfer error""" def __init__(self, s3_object_name): - msg = "No object in S3 storage at {object}".format(object=s3_object_name) - super(S3InvalidPathError, self).__init__(msg) + super().__init__(f"No object in S3 storage at {s3_object_name}") self.object_name = s3_object_name @@ -108,8 +89,7 @@ class S3InvalidStore(NodeportsException): """S3 transfer error""" def __init__(self, s3_store): - msg = "Invalid store used: {store}".format(store=s3_store) - super(S3InvalidStore, self).__init__(msg) + super().__init__(f"Invalid store used: {s3_store}") self.store = s3_store @@ -117,23 +97,16 @@ class StorageConnectionError(NodeportsException): """S3 transfer error""" def __init__(self, s3_store, additional_msg=None): - msg = "Connection to store {store} failed: {yamsg}".format( - store=s3_store, yamsg=additional_msg - ) - super(StorageConnectionError, self).__init__(msg) + super().__init__(f"Connection to store {s3_store} failed: {additional_msg}") self.store = s3_store class PortNotFound(NodeportsException): """Accessed key does not exist""" - def __init__(self, msg): - super(PortNotFound, self).__init__(msg) - class NodeNotFound(NodeportsException): """The given node_uuid was not found""" def __init__(self, node_uuid): - message = f"the node id {node_uuid} was not found" - super(NodeNotFound, self).__init__(message) + super().__init__(f"the node id {node_uuid} was not found") diff --git a/scripts/requirements/Makefile b/scripts/requirements/Makefile index 1c9ba6d6b35..49d3f8cc343 100644 --- a/scripts/requirements/Makefile +++ b/scripts/requirements/Makefile @@ -44,6 +44,7 @@ touch: ## touches all package requirement inputs $(foreach p,${_input-requirements},touch $(p);) reqs: _check_py36_version ## updates requirements of all package libraries + # Upgrading $(upgrade) requirements $(foreach p,${_input-requirements},touch $(p); $(MAKE_C) $(dir $(p)) reqs $(UPGRADE_OPTION);) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/users.py b/services/api-server/src/simcore_service_api_server/api/routes/users.py index 096c39e2b9e..94a9fb0e87c 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/users.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/users.py @@ -25,9 +25,9 @@ async def get_my_profile( data["role"] = data["role"].upper() try: profile = Profile.parse_obj(data) - except ValidationError: + except ValidationError as err: logger.exception("webserver invalid response") - raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) + raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) from err return profile diff --git a/services/api-server/src/simcore_service_api_server/core/settings.py b/services/api-server/src/simcore_service_api_server/core/settings.py index 8fd229c62b3..bf8c6e6ddea 100644 --- a/services/api-server/src/simcore_service_api_server/core/settings.py +++ b/services/api-server/src/simcore_service_api_server/core/settings.py @@ -83,9 +83,10 @@ def create_default(cls) -> "AppSettings": def match_logging_level(cls, value) -> str: try: getattr(logging, value.upper()) - except AttributeError: - raise ValueError(f"{value.upper()} is not a valid level") - return value.upper() + return value.upper() + except AttributeError as err: + raise ValueError(f"{value.upper()} is not a valid level") from err + @property def loglevel(self) -> int: @@ -98,7 +99,7 @@ def loglevel(self) -> int: webserver: WebServerSettings # SERVICE SERVER (see : https://www.uvicorn.org/settings/) - host: str = "0.0.0.0" # "0.0.0.0" if is_containerized else "127.0.0.1", + host: str = "0.0.0.0" # nosec port: int = 8000 debug: bool = False # If True, debug tracebacks should be returned on errors. diff --git a/services/api-server/src/simcore_service_api_server/services/remote_debug.py b/services/api-server/src/simcore_service_api_server/services/remote_debug.py index a9568a73b12..04a0c6beba3 100644 --- a/services/api-server/src/simcore_service_api_server/services/remote_debug.py +++ b/services/api-server/src/simcore_service_api_server/services/remote_debug.py @@ -26,10 +26,10 @@ def setup_remote_debugging(force_enabled=False, *, boot_mode=None): ptvsd.enable_attach( address=("0.0.0.0", REMOTE_DEBUG_PORT), redirect_output=True # nosec ) # nosec - except ImportError: + except ImportError as err: raise ValueError( "Cannot enable remote debugging. Please install ptvsd first" - ) + ) from err logger.info("Remote debugging enabled: listening port %s", REMOTE_DEBUG_PORT) else: diff --git a/services/api-server/src/simcore_service_api_server/services/webserver.py b/services/api-server/src/simcore_service_api_server/services/webserver.py index 6d53909fdc8..a4f791222b8 100644 --- a/services/api-server/src/simcore_service_api_server/services/webserver.py +++ b/services/api-server/src/simcore_service_api_server/services/webserver.py @@ -1,6 +1,7 @@ import base64 import json import logging +from contextlib import suppress from typing import Dict, Optional import attr @@ -45,12 +46,10 @@ def setup_webserver(app: FastAPI) -> None: async def close_webserver(app: FastAPI) -> None: - try: + with suppress(AttributeError): client: AsyncClient = app.state.webserver_client await client.aclose() del app.state.webserver_client - except AttributeError: - pass logger.debug("Webserver closed successfully") @@ -115,9 +114,9 @@ async def get(self, path: str) -> Optional[Dict]: url = self._url(path) try: resp = await self.client.get(url, cookies=self.session_cookies) - except Exception: + except Exception as err: logger.exception("Failed to get %s", url) - raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) + raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) from err return self._process(resp) @@ -125,8 +124,8 @@ async def put(self, path: str, body: Dict) -> Optional[Dict]: url = self._url(path) try: resp = await self.client.put(url, json=body, cookies=self.session_cookies) - except Exception: + except Exception as err: logger.exception("Failed to put %s", url) - raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) + raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) from err return self._process(resp) diff --git a/services/catalog/src/simcore_service_catalog/api/routes/services.py b/services/catalog/src/simcore_service_catalog/api/routes/services.py index 19afc3a482f..a909cdd517e 100644 --- a/services/catalog/src/simcore_service_catalog/api/routes/services.py +++ b/services/catalog/src/simcore_service_catalog/api/routes/services.py @@ -13,8 +13,8 @@ VERSION_RE, ServiceAccessRightsAtDB, ServiceMetaDataAtDB, - ServiceUpdate, ServiceOut, + ServiceUpdate, ) from ..dependencies.database import get_repository from ..dependencies.director import AuthSession, get_director_session @@ -156,7 +156,9 @@ async def get_service( detail="You have insufficient rights to access the service", ) # access is allowed, override some of the values with what is in the db - service = service.copy(update=service_in_db.dict(exclude_unset=True, exclude={"owner"})) + service = service.copy( + update=service_in_db.dict(exclude_unset=True, exclude={"owner"}) + ) # the owner shall be converted to an email address if service_in_db.owner: service.owner = await groups_repository.get_user_email_from_gid( diff --git a/services/catalog/src/simcore_service_catalog/core/settings.py b/services/catalog/src/simcore_service_catalog/core/settings.py index 8f5b98eb9ec..eea61b32d42 100644 --- a/services/catalog/src/simcore_service_catalog/core/settings.py +++ b/services/catalog/src/simcore_service_catalog/core/settings.py @@ -82,8 +82,8 @@ def create_default(cls) -> "AppSettings": def match_logging_level(cls, value) -> str: try: getattr(logging, value.upper()) - except AttributeError: - raise ValueError(f"{value.upper()} is not a valid level") + except AttributeError as err: + raise ValueError(f"{value.upper()} is not a valid level") from err return value.upper() @property diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/projects.py b/services/catalog/src/simcore_service_catalog/db/repositories/projects.py index 08185761274..2554c67f352 100644 --- a/services/catalog/src/simcore_service_catalog/db/repositories/projects.py +++ b/services/catalog/src/simcore_service_catalog/db/repositories/projects.py @@ -1,3 +1,4 @@ +import logging from typing import List import sqlalchemy as sa @@ -7,8 +8,6 @@ from ..tables import ProjectType, projects from ._base import BaseRepository -import logging - logger = logging.getLogger(__name__) @@ -25,7 +24,10 @@ async def list_services_from_published_templates(self) -> List[ServiceKeyVersion for node in project_workbench: service = project_workbench[node] try: - if "file-picker" in service["key"] or "nodes-group" in service["key"]: + if ( + "file-picker" in service["key"] + or "nodes-group" in service["key"] + ): # these 2 are not going to pass the validation tests, they are frontend only nodes. continue list_of_published_services.append(ServiceKeyVersion(**service)) diff --git a/services/catalog/src/simcore_service_catalog/models/domain/group.py b/services/catalog/src/simcore_service_catalog/models/domain/group.py index 4c27b500a2c..cd868a0dc72 100644 --- a/services/catalog/src/simcore_service_catalog/models/domain/group.py +++ b/services/catalog/src/simcore_service_catalog/models/domain/group.py @@ -2,6 +2,7 @@ from pydantic import BaseModel, Field from pydantic.types import PositiveInt + from ...db.tables import GroupType diff --git a/services/catalog/src/simcore_service_catalog/services/director.py b/services/catalog/src/simcore_service_catalog/services/director.py index 853297654ac..f5e877c94dd 100644 --- a/services/catalog/src/simcore_service_catalog/services/director.py +++ b/services/catalog/src/simcore_service_catalog/services/director.py @@ -1,16 +1,15 @@ import json import logging +from contextlib import suppress from typing import Dict, Optional import attr from fastapi import FastAPI, HTTPException from httpx import AsyncClient, Response, StatusCode - from starlette import status from ..core.settings import DirectorSettings - logger = logging.getLogger(__name__) @@ -28,12 +27,11 @@ def setup_director(app: FastAPI) -> None: async def close_director(app: FastAPI) -> None: - try: + with suppress(AttributeError): client: AsyncClient = app.state.director_client await client.aclose() del app.state.director_client - except AttributeError: - pass + logger.debug("Director client closed successfully") @@ -98,9 +96,9 @@ async def get(self, path: str) -> Optional[Dict]: url = self._url(path) try: resp = await self.client.get(url) - except Exception: + except Exception as err: logger.exception("Failed to get %s", url) - raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) + raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) from err return self._process(resp) @@ -108,8 +106,8 @@ async def put(self, path: str, body: Dict) -> Optional[Dict]: url = self._url(path) try: resp = await self.client.put(url, json=body) - except Exception: + except Exception as err: logger.exception("Failed to put %s", url) - raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) + raise HTTPException(status.HTTP_503_SERVICE_UNAVAILABLE) from err return self._process(resp) diff --git a/services/catalog/src/simcore_service_catalog/services/remote_debug.py b/services/catalog/src/simcore_service_catalog/services/remote_debug.py index d1ba21d5016..e47f7efd180 100644 --- a/services/catalog/src/simcore_service_catalog/services/remote_debug.py +++ b/services/catalog/src/simcore_service_catalog/services/remote_debug.py @@ -26,10 +26,10 @@ def setup_remote_debugging(force_enabled=False, *, boot_mode=None): ptvsd.enable_attach( address=("0.0.0.0", REMOTE_DEBUG_PORT), # nosec ) # nosec - except ImportError: + except ImportError as err: raise ValueError( "Cannot enable remote debugging. Please install ptvsd first" - ) + ) from err logger.info("Remote debugging enabled: listening port %s", REMOTE_DEBUG_PORT) else: diff --git a/services/catalog/tests/unit/test_package.py b/services/catalog/tests/unit/test_package.py index 11121fc8d4d..2b2a685b40a 100644 --- a/services/catalog/tests/unit/test_package.py +++ b/services/catalog/tests/unit/test_package.py @@ -7,7 +7,6 @@ from pathlib import Path import pytest - from pytest_simcore.helpers.utils_pylint import assert_pylint_is_passing diff --git a/services/catalog/tests/unit/test_schemas.py b/services/catalog/tests/unit/test_schemas.py index 0cfe82f8042..6d0ccf74e3c 100644 --- a/services/catalog/tests/unit/test_schemas.py +++ b/services/catalog/tests/unit/test_schemas.py @@ -6,8 +6,8 @@ import json import pytest - import simcore_postgres_database.models.direct_acyclic_graphs as orm + from simcore_service_catalog.db import tables from simcore_service_catalog.models.domain.dag import DAGAtDB from simcore_service_catalog.models.schemas.dag import DAGIn, DAGOut diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py index 340e94d01c4..514ea71c639 100644 --- a/services/catalog/tests/unit/with_dbs/conftest.py +++ b/services/catalog/tests/unit/with_dbs/conftest.py @@ -8,11 +8,11 @@ import pytest import sqlalchemy as sa - from fastapi import FastAPI +from starlette.testclient import TestClient + from simcore_service_catalog.api.dependencies.director import get_director_session from simcore_service_catalog.core.application import init_app -from starlette.testclient import TestClient current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent diff --git a/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py b/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py index b8c1d3243bd..623b06fcbf9 100644 --- a/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py +++ b/services/catalog/tests/unit/with_dbs/test_entrypoint_dags.py @@ -5,9 +5,10 @@ from typing import Dict, List import pytest +from starlette.testclient import TestClient + from simcore_service_catalog.__version__ import api_version from simcore_service_catalog.models.schemas.meta import Meta -from starlette.testclient import TestClient core_services = ["postgres"] ops_services = ["adminer"] diff --git a/services/catalog/tests/unit/with_dbs/test_entrypoint_services.py b/services/catalog/tests/unit/with_dbs/test_entrypoint_services.py index d088b1bde8c..1acdaf99390 100644 --- a/services/catalog/tests/unit/with_dbs/test_entrypoint_services.py +++ b/services/catalog/tests/unit/with_dbs/test_entrypoint_services.py @@ -9,19 +9,19 @@ import pytest from fastapi import FastAPI -from simcore_service_catalog.api.dependencies.database import get_repository -from simcore_service_catalog.db.repositories.groups import GroupsRepository -from simcore_service_catalog.db.repositories.services import ServicesRepository -from simcore_service_catalog.models.domain.group import GroupAtDB, GroupType from starlette.testclient import TestClient from yarl import URL +from simcore_service_catalog.api.dependencies.database import get_repository from simcore_service_catalog.api.dependencies.director import get_director_session from simcore_service_catalog.api.routes import services +from simcore_service_catalog.db.repositories.groups import GroupsRepository +from simcore_service_catalog.db.repositories.services import ServicesRepository +from simcore_service_catalog.models.domain.group import GroupAtDB, GroupType from simcore_service_catalog.models.domain.service import ( ServiceAccessRightsAtDB, - ServiceType, ServiceOut, + ServiceType, ) core_services = ["postgres"] diff --git a/services/director/src/simcore_service_director/exceptions.py b/services/director/src/simcore_service_director/exceptions.py index a8448bad7cc..b76c854a6d8 100644 --- a/services/director/src/simcore_service_director/exceptions.py +++ b/services/director/src/simcore_service_director/exceptions.py @@ -26,17 +26,14 @@ class DirectorException(Exception): """Basic exception""" def __init__(self, msg: Optional[str] = None): - if msg is None: - msg = "Unexpected error was triggered" - super(DirectorException, self).__init__(msg) + super().__init__(msg or "Unexpected error was triggered") class GenericDockerError(DirectorException): """Generic docker library error""" def __init__(self, msg: str, original_exception: Exception): - msg = msg + (": %s" % original_exception) - super(GenericDockerError, self).__init__(msg) + super().__init__(msg + ": {original_exception}") self.original_exception = original_exception @@ -44,10 +41,8 @@ class ServiceNotAvailableError(DirectorException): """Service not found""" def __init__(self, service_name: str, service_tag: Optional[str] = None): - if not service_tag: - service_tag = "not defined" - msg = "The service %s:%s does not exist" % (service_name, service_tag) - super(ServiceNotAvailableError, self).__init__(msg) + service_tag = service_tag or "UNDEFINED" + super().__init__(f"The service {service_name}:{service_tag} does not exist") self.service_name = service_name self.service_tag = service_tag @@ -56,8 +51,7 @@ class ServiceUUIDNotFoundError(DirectorException): """Service not found""" def __init__(self, service_uuid: str): - msg = "The service with uuid %s was not found" % (service_uuid) - super(ServiceUUIDNotFoundError, self).__init__(msg) + super().__init__(f"The service with uuid {service_uuid} was not found") self.service_uuid = service_uuid @@ -65,8 +59,7 @@ class ServiceUUIDInUseError(DirectorException): """Service UUID is already in use""" def __init__(self, service_uuid: str): - msg = "The service uuid %s is already in use" % (service_uuid) - super(ServiceUUIDInUseError, self).__init__(msg) + super().__init__(f"The service uuid {service_uuid} is already in use") self.service_uuid = service_uuid @@ -74,16 +67,13 @@ class RegistryConnectionError(DirectorException): """Error while connecting to the docker regitry""" def __init__(self, msg: str): - if msg is None: - msg = "Unexpected connection error while accessing registry" - super(RegistryConnectionError, self).__init__(msg) + super().__init__(msg or "Unexpected connection error while accessing registry") class ServiceStartTimeoutError(DirectorException): """The service was created but never run (time-out)""" def __init__(self, service_name: str, service_uuid: str): - msg = "Service %s:%s failed to start " % (service_name, service_uuid) - super(ServiceStartTimeoutError, self).__init__(msg) + super().__init__(f"Service {service_name}:{service_uuid} failed to start ") self.service_name = service_name self.service_uuid = service_uuid diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 33be4bac537..7770dcbfc5c 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -978,7 +978,9 @@ async def stop_service(app: web.Application, node_uuid: str) -> None: await client.services.delete(service["Spec"]["Name"]) log.debug("removed services, now removing network...") except aiodocker.exceptions.DockerError as err: - raise exceptions.GenericDockerError("Error while removing services", err) + raise exceptions.GenericDockerError( + "Error while removing services", err + ) from err # remove network(s) await _remove_overlay_network_of_swarm(client, node_uuid) log.debug("removed network") diff --git a/services/director/src/simcore_service_director/rest/node_validator.py b/services/director/src/simcore_service_director/rest/node_validator.py index 6aee14bea7e..45c07732c9a 100644 --- a/services/director/src/simcore_service_director/rest/node_validator.py +++ b/services/director/src/simcore_service_director/rest/node_validator.py @@ -19,12 +19,12 @@ def is_service_valid(service: Dict): except ValidationError as exc: log.debug("Node validation error: %s", exc.message) return False - except SchemaError: + except SchemaError as exc: log.exception("Schema error:") raise exceptions.DirectorException( "Incorrect json schema used from %s" % (resources.get_path(resources.RESOURCE_NODE_SCHEMA)) - ) + ) from exc def validate_nodes(services: List[Dict]): diff --git a/services/sidecar/src/simcore_service_sidecar/exceptions.py b/services/sidecar/src/simcore_service_sidecar/exceptions.py index 1c7dc3e77c3..eab12334915 100644 --- a/services/sidecar/src/simcore_service_sidecar/exceptions.py +++ b/services/sidecar/src/simcore_service_sidecar/exceptions.py @@ -5,16 +5,14 @@ class SidecarException(Exception): """Basic exception for errors raised with sidecar""" def __init__(self, msg: Optional[str] = None): - if msg is None: - msg = "Unexpected error occurred in director subpackage" - super(SidecarException, self).__init__(msg) + super().__init__(msg or "Unexpected error occurred in director subpackage") class DatabaseError(SidecarException): """Service was not found in swarm""" def __init__(self, msg: str): - super(DatabaseError, self).__init__(msg) + super().__init__(msg) class TaskNotFound(SidecarException): @@ -27,6 +25,4 @@ def __init__(self, msg: str): class MoreThenOneItemDetected(Exception): """Raised during the docker's container_id validation""" def __init__(self, msg: Optional[str] = None): - if msg is None: - msg = "Unexpected error occurred in director subpackage" - super().__init__(msg) + super().__init__(msg or "Unexpected error occurred in director subpackage") diff --git a/services/web/server/src/simcore_service_webserver/computation_handlers.py b/services/web/server/src/simcore_service_webserver/computation_handlers.py index 1c74009dae7..e231259e8c3 100644 --- a/services/web/server/src/simcore_service_webserver/computation_handlers.py +++ b/services/web/server/src/simcore_service_webserver/computation_handlers.py @@ -53,8 +53,8 @@ async def update_pipeline(request: web.Request) -> web.Response: try: project = await get_project_for_user(request.app, project_id, user_id) await update_pipeline_db(request.app, project_id, project["workbench"]) - except ProjectNotFoundError: - raise web.HTTPNotFound(reason=f"Project {project_id} not found") + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Project {project_id} not found") from exc raise web.HTTPNoContent() @@ -72,8 +72,8 @@ async def start_pipeline(request: web.Request) -> web.Response: try: project = await get_project_for_user(request.app, project_id, user_id) await update_pipeline_db(request.app, project_id, project["workbench"]) - except ProjectNotFoundError: - raise web.HTTPNotFound(reason=f"Project {project_id} not found") + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Project {project_id} not found") from exc # commit the tasks to celery _ = get_celery(request.app).send_task( diff --git a/services/web/server/src/simcore_service_webserver/diagnostics_core.py b/services/web/server/src/simcore_service_webserver/diagnostics_core.py index ea4ea8fa051..5706b2cf8bc 100644 --- a/services/web/server/src/simcore_service_webserver/diagnostics_core.py +++ b/services/web/server/src/simcore_service_webserver/diagnostics_core.py @@ -1,5 +1,6 @@ import logging import statistics +import time from typing import List, Optional import attr @@ -16,10 +17,13 @@ kMAX_TASK_DELAY = f"{__name__}.max_task_delay" kLATENCY_PROBE = f"{__name__}.latency_probe" +kPLUGIN_START_TIME = f"{__name__}.plugin_start_time" + +kSTART_SENSING_DELAY_SECS = f"{__name__}.start_sensing_delay" class HealthError(Exception): - pass + """ Service is set as unhealty """ class IncidentsRegistry(LimitedOrderedStack[SlowCallback]): @@ -30,7 +34,7 @@ def max_delay(self) -> float: @attr.s(auto_attribs=True) class DelayWindowProbe: """ - Collects a window of delay samples that satisfy + Collects a window of delay samples that satisfy some conditions (see observe code) """ @@ -52,6 +56,21 @@ def value(self) -> float: return 0 +def is_sensing_enabled(app: web.Application): + """ Diagnostics will not activate sensing inmediatly but after some + time since the app started + """ + time_elapsed_since_setup = time.time() - app[kPLUGIN_START_TIME] + enabled = time_elapsed_since_setup > app[kSTART_SENSING_DELAY_SECS] + if enabled: + log.debug( + "Diagnostics starts sensing after waiting %3.2f secs [> %3.2f secs] since submodule init", + time_elapsed_since_setup, + app[kSTART_SENSING_DELAY_SECS], + ) + return enabled + + def assert_healthy_app(app: web.Application) -> None: """ Diagnostics function that determins whether current application is healthy based on incidents @@ -59,10 +78,15 @@ def assert_healthy_app(app: web.Application) -> None: raises DiagnosticError if any incient detected """ - # CRITERIA 1: incidents: Optional[IncidentsRegistry] = app.get(kINCIDENTS_REGISTRY) if incidents: + + if not is_sensing_enabled(app): + # NOTE: this is the only way to avoid accounting + # before sensing is enabled + incidents.clear() + max_delay_allowed: float = app[kMAX_TASK_DELAY] max_delay: float = incidents.max_delay() diff --git a/services/web/server/src/simcore_service_webserver/diagnostics_monitoring.py b/services/web/server/src/simcore_service_webserver/diagnostics_monitoring.py index 6acd6f234f9..d926c16daa3 100644 --- a/services/web/server/src/simcore_service_webserver/diagnostics_monitoring.py +++ b/services/web/server/src/simcore_service_webserver/diagnostics_monitoring.py @@ -12,7 +12,7 @@ from servicelib.monitor_services import add_instrumentation -from .diagnostics_core import DelayWindowProbe, kLATENCY_PROBE +from .diagnostics_core import DelayWindowProbe, is_sensing_enabled, kLATENCY_PROBE log = logging.getLogger(__name__) @@ -63,6 +63,7 @@ async def _middleware_handler(request: web.Request, handler): # Transforms unhandled exceptions into responses with status 500 # NOTE: Prevents issue #1025 resp = web.HTTPInternalServerError(reason=str(exc)) + resp.__cause__ = exc log_exception = exc finally: @@ -75,7 +76,9 @@ async def _middleware_handler(request: web.Request, handler): # Probes request latency # NOTE: sockets connection is long # FIXME: tmp by hand, add filters directly in probe - if not str(request.path).startswith("/socket.io"): + if not str(request.path).startswith("/socket.io") and is_sensing_enabled( + request.app + ): request.app[kLATENCY_PROBE].observe(resp_time_secs) # prometheus probes diff --git a/services/web/server/src/simcore_service_webserver/diagnostics_plugin.py b/services/web/server/src/simcore_service_webserver/diagnostics_plugin.py index cb614baa874..7ce7db2b977 100644 --- a/services/web/server/src/simcore_service_webserver/diagnostics_plugin.py +++ b/services/web/server/src/simcore_service_webserver/diagnostics_plugin.py @@ -4,6 +4,7 @@ from typing import Optional from aiohttp import web +import time from servicelib import monitor_slow_callbacks from servicelib.application_setup import ModuleCategory, app_module_setup @@ -13,6 +14,8 @@ kINCIDENTS_REGISTRY, kMAX_AVG_RESP_LATENCY, kMAX_TASK_DELAY, + kPLUGIN_START_TIME, + kSTART_SENSING_DELAY_SECS ) from .diagnostics_entrypoints import create_rest_routes from .diagnostics_monitoring import setup_monitoring @@ -34,9 +37,11 @@ def setup_diagnostics( slow_duration_secs: Optional[float] = None, max_task_delay: Optional[float] = None, max_avg_response_latency: Optional[float] = None, + start_sensing_delay: Optional[float] = 60 ): # NOTE: keep all environs getters inside setup so they can be patched easier for testing + # # Any task blocked more than slow_duration_secs is logged as WARNING # Aims to identify possible blocking calls @@ -72,6 +77,14 @@ def setup_diagnostics( app[kMAX_AVG_RESP_LATENCY] = max_avg_response_latency log.info("max_avg_response_latency = %3.2f secs ", max_avg_response_latency) + # + # Time to start sensinng (secs) for diagnostics since this modules inits + # + app[kSTART_SENSING_DELAY_SECS] = start_sensing_delay + log.info("start_sensing_delay = %3.2f secs ", start_sensing_delay) + + # ------- + # # TODO: redesign ... too convoluted!! registry = IncidentsRegistry(order_by=attrgetter("delay_secs")) @@ -85,3 +98,5 @@ def setup_diagnostics( # adds other diagnostic routes: healthcheck, etc routes = create_rest_routes(specs=app[APP_OPENAPI_SPECS_KEY]) app.router.add_routes(routes) + + app[kPLUGIN_START_TIME] = time.time() diff --git a/services/web/server/src/simcore_service_webserver/director/director_exceptions.py b/services/web/server/src/simcore_service_webserver/director/director_exceptions.py index d90e6839b09..e08cd6d5794 100644 --- a/services/web/server/src/simcore_service_webserver/director/director_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/director/director_exceptions.py @@ -4,7 +4,7 @@ class DirectorException(Exception): def __init__(self, msg=None): if msg is None: msg = "Unexpected error occured in director subpackage" - super(DirectorException, self).__init__(msg) + super().__init__(msg) class ServiceNotFoundError(DirectorException): @@ -12,5 +12,5 @@ class ServiceNotFoundError(DirectorException): def __init__(self, service_uuid): msg = "Service with uuid {} not found".format(service_uuid) - super(ServiceNotFoundError, self).__init__(msg) + super().__init__(msg) self.service_uuid = service_uuid diff --git a/services/web/server/src/simcore_service_webserver/groups_handlers.py b/services/web/server/src/simcore_service_webserver/groups_handlers.py index b2f59afb714..7b2de41553f 100644 --- a/services/web/server/src/simcore_service_webserver/groups_handlers.py +++ b/services/web/server/src/simcore_service_webserver/groups_handlers.py @@ -37,8 +37,8 @@ async def get_group(request: web.Request): gid = request.match_info["gid"] try: return await groups_api.get_user_group(request.app, user_id, gid) - except GroupNotFoundError: - raise web.HTTPNotFound(reason=f"Group {gid} not found") + except GroupNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Group {gid} not found") from exc @login_required @@ -52,8 +52,8 @@ async def create_group(request: web.Request): raise web.HTTPCreated( text=json.dumps({"data": new_group}), content_type="application/json" ) - except UserNotFoundError: - raise web.HTTPNotFound(reason=f"User {user_id} not found") + except UserNotFoundError as exc: + raise web.HTTPNotFound(reason=f"User {user_id} not found") from exc @login_required @@ -67,10 +67,10 @@ async def update_group(request: web.Request): return await groups_api.update_user_group( request.app, user_id, gid, new_group_values ) - except GroupNotFoundError: - raise web.HTTPNotFound(reason=f"Group {gid} not found") - except UserInsufficientRightsError: - raise web.HTTPForbidden() + except GroupNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Group {gid} not found") from exc + except UserInsufficientRightsError as exc: + raise web.HTTPForbidden() from exc @login_required @@ -81,10 +81,10 @@ async def delete_group(request: web.Request): try: await groups_api.delete_user_group(request.app, user_id, gid) raise web.HTTPNoContent() - except GroupNotFoundError: - raise web.HTTPNotFound(reason=f"Group {gid} not found") - except UserInsufficientRightsError: - raise web.HTTPForbidden() + except GroupNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Group {gid} not found") from exc + except UserInsufficientRightsError as exc: + raise web.HTTPForbidden() from exc # groups/{gid}/users -------------------------------------------- @@ -95,10 +95,10 @@ async def get_group_users(request: web.Request): gid = request.match_info["gid"] try: return await groups_api.list_users_in_group(request.app, user_id, gid) - except GroupNotFoundError: - raise web.HTTPNotFound(reason=f"Group {gid} not found") - except UserInsufficientRightsError: - raise web.HTTPForbidden() + except GroupNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Group {gid} not found") from exc + except UserInsufficientRightsError as exc: + raise web.HTTPForbidden() from exc @login_required @@ -123,12 +123,12 @@ async def add_group_user(request: web.Request): new_user_email=new_user_email, ) raise web.HTTPNoContent() - except GroupNotFoundError: - raise web.HTTPNotFound(reason=f"Group {gid} not found") - except UserInGroupNotFoundError: - raise web.HTTPNotFound(reason=f"User not found in group {gid}") - except UserInsufficientRightsError: - raise web.HTTPForbidden() + except GroupNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Group {gid} not found") from exc + except UserInGroupNotFoundError as exc: + raise web.HTTPNotFound(reason=f"User not found in group {gid}") from exc + except UserInsufficientRightsError as exc: + raise web.HTTPForbidden() from exc @login_required @@ -141,12 +141,12 @@ async def get_group_user(request: web.Request): return await groups_api.get_user_in_group( request.app, user_id, gid, the_user_id_in_group ) - except GroupNotFoundError: - raise web.HTTPNotFound(reason=f"Group {gid} not found") - except UserInGroupNotFoundError: - raise web.HTTPNotFound(reason=f"User {the_user_id_in_group} not found") - except UserInsufficientRightsError: - raise web.HTTPForbidden() + except GroupNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Group {gid} not found") from exc + except UserInGroupNotFoundError as exc: + raise web.HTTPNotFound(reason=f"User {the_user_id_in_group} not found") from exc + except UserInsufficientRightsError as exc: + raise web.HTTPForbidden() from exc @login_required @@ -164,12 +164,12 @@ async def update_group_user(request: web.Request): the_user_id_in_group, new_values_for_user_in_group, ) - except GroupNotFoundError: - raise web.HTTPNotFound(reason=f"Group {gid} not found") - except UserInGroupNotFoundError: - raise web.HTTPNotFound(reason=f"User {the_user_id_in_group} not found") - except UserInsufficientRightsError: - raise web.HTTPForbidden() + except GroupNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Group {gid} not found") from exc + except UserInGroupNotFoundError as exc: + raise web.HTTPNotFound(reason=f"User {the_user_id_in_group} not found") from exc + except UserInsufficientRightsError as exc: + raise web.HTTPForbidden() from exc @login_required @@ -183,12 +183,12 @@ async def delete_group_user(request: web.Request): request.app, user_id, gid, the_user_id_in_group ) raise web.HTTPNoContent() - except GroupNotFoundError: - raise web.HTTPNotFound(reason=f"Group {gid} not found") - except UserInGroupNotFoundError: - raise web.HTTPNotFound(reason=f"User {the_user_id_in_group} not found") - except UserInsufficientRightsError: - raise web.HTTPForbidden() + except GroupNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Group {gid} not found") from exc + except UserInGroupNotFoundError as exc: + raise web.HTTPNotFound(reason=f"User {the_user_id_in_group} not found") from exc + except UserInsufficientRightsError as exc: + raise web.HTTPForbidden() from exc # groups/{gid}/classifiers -------------------------------------------- diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py b/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py index 15a9eb40fa0..c79c1154009 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_exceptions.py @@ -5,19 +5,16 @@ class ProjectsException(Exception): """Basic exception for errors raised in projects""" def __init__(self, msg=None): - if msg is None: - msg = "Unexpected error occured in projects subpackage" - super(ProjectsException, self).__init__(msg) + super().__init__(msg or "Unexpected error occured in projects submodule") class ProjectInvalidRightsError(ProjectsException): """Invalid rights to access project""" def __init__(self, user_id, project_uuid): - msg = "User {} has no rights to access project with uuid {}".format( - user_id, project_uuid + super().__init__( + f"User {user_id} has no rights to access project with uuid {project_uuid}" ) - super(ProjectInvalidRightsError, self).__init__(msg) self.user_id = user_id self.project_uuid = project_uuid @@ -26,8 +23,7 @@ class ProjectNotFoundError(ProjectsException): """Project was not found in DB""" def __init__(self, project_uuid): - msg = "Project with uuid {} not found".format(project_uuid) - super(ProjectNotFoundError, self).__init__(msg) + super().__init__(f"Project with uuid {project_uuid} not found") self.project_uuid = project_uuid @@ -35,7 +31,6 @@ class NodeNotFoundError(ProjectsException): """Node was not found in project""" def __init__(self, project_uuid: str, node_uuid: str): - msg = f"Node {node_uuid} not found in project {project_uuid}" - super(NodeNotFoundError, self).__init__(msg) + super().__init__(f"Node {node_uuid} not found in project {project_uuid}") self.node_uuid = node_uuid self.project_uuid = project_uuid diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py index 2b8ba73cb5d..7efe7fc8749 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_handlers.py @@ -101,12 +101,12 @@ async def create_projects(request: web.Request): # This is a new project and every new graph needs to be reflected in the pipeline db await update_pipeline_db(request.app, project["uuid"], project["workbench"]) - except ValidationError: - raise web.HTTPBadRequest(reason="Invalid project data") - except ProjectNotFoundError: - raise web.HTTPNotFound(reason="Project not found") - except ProjectInvalidRightsError: - raise web.HTTPUnauthorized + except ValidationError as exc: + raise web.HTTPBadRequest(reason="Invalid project data") from exc + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason="Project not found") from exc + except ProjectInvalidRightsError as exc: + raise web.HTTPUnauthorized from exc else: raise web.HTTPCreated(text=json.dumps(project), content_type="application/json") @@ -169,12 +169,12 @@ async def get_project(request: web.Request): ) return {"data": project} - except ProjectInvalidRightsError: + except ProjectInvalidRightsError as exc: raise web.HTTPForbidden( reason=f"You do not have sufficient rights to read project {project_uuid}" - ) - except ProjectNotFoundError: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") + ) from exc + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from exc @login_required @@ -233,15 +233,15 @@ async def replace_project(request: web.Request): request.app, project_uuid, new_project["workbench"], replace_pipeline ) - except ValidationError: - raise web.HTTPBadRequest + except ValidationError as exc: + raise web.HTTPBadRequest from exc - except ProjectInvalidRightsError: + except ProjectInvalidRightsError as exc: raise web.HTTPForbidden( reason="You do not have sufficient rights to save the project" - ) - except ProjectNotFoundError: - raise web.HTTPNotFound + ) from exc + except ProjectNotFoundError as exc: + raise web.HTTPNotFound from exc return {"data": new_project} @@ -277,12 +277,12 @@ async def delete_project(request: web.Request): raise web.HTTPForbidden(reason=message) await projects_api.delete_project(request, project_uuid, user_id) - except ProjectInvalidRightsError: + except ProjectInvalidRightsError as err: raise web.HTTPForbidden( reason="You do not have sufficient rights to delete this project" - ) - except ProjectNotFoundError: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") + ) from err + except ProjectNotFoundError as err: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from err raise web.HTTPNoContent(content_type="application/json") @@ -325,10 +325,10 @@ async def try_add_project() -> Optional[Set[int]]: if other_users: return other_users await rt.add("project_id", project_uuid) - except aioredlock.LockError: + except aioredlock.LockError as exc: # TODO: this lock is not a good solution for long term # maybe a project key in redis might improve spped of checking - raise HTTPLocked(reason="Project is locked") + raise HTTPLocked(reason="Project is locked") from exc other_users = await try_add_project() if other_users: @@ -348,8 +348,8 @@ async def try_add_project() -> Optional[Set[int]]: request.app, project, project_state ) return web.json_response({"data": project}) - except ProjectNotFoundError: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from exc @login_required @@ -393,8 +393,8 @@ async def _close_project_task() -> None: fire_and_forget_task(_close_project_task()) raise web.HTTPNoContent(content_type="application/json") - except ProjectNotFoundError: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from exc @login_required @@ -449,8 +449,8 @@ async def get_active_project(request: web.Request) -> web.Response: ) return web.json_response({"data": project}) - except ProjectNotFoundError: - raise web.HTTPNotFound(reason="Project not found") + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason="Project not found") from exc @login_required @@ -482,8 +482,8 @@ async def create_node(request: web.Request) -> web.Response: ) } return web.json_response({"data": data}, status=web.HTTPCreated.status_code) - except ProjectNotFoundError: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from exc @login_required @@ -508,8 +508,8 @@ async def get_node(request: web.Request) -> web.Response: request, project_uuid, user_id, node_uuid ) return web.json_response({"data": node_details}) - except ProjectNotFoundError: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from exc @login_required @@ -535,8 +535,8 @@ async def delete_node(request: web.Request) -> web.Response: ) raise web.HTTPNoContent(content_type="application/json") - except ProjectNotFoundError: - raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") + except ProjectNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Project {project_uuid} not found") from exc @login_required diff --git a/services/web/server/src/simcore_service_webserver/publication_handlers.py b/services/web/server/src/simcore_service_webserver/publication_handlers.py index 752e2ff7b86..ecdff196a0c 100644 --- a/services/web/server/src/simcore_service_webserver/publication_handlers.py +++ b/services/web/server/src/simcore_service_webserver/publication_handlers.py @@ -78,8 +78,8 @@ async def service_submission(request: web.Request): }, attachments=attachments, ) - except Exception: + except Exception as exc: log.exception("Error while sending the 'new service submission' mail.") - raise web.HTTPServiceUnavailable() + raise web.HTTPServiceUnavailable() from exc raise web.HTTPNoContent(content_type="application/json") diff --git a/services/web/server/src/simcore_service_webserver/socketio/handlers.py b/services/web/server/src/simcore_service_webserver/socketio/handlers.py index 74811730f2f..61bb9268ffe 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/handlers.py +++ b/services/web/server/src/simcore_service_webserver/socketio/handlers.py @@ -47,10 +47,10 @@ async def connect(sid: str, environ: Dict, app: web.Application) -> bool: try: await authenticate_user(sid, app, request) await set_user_in_rooms(sid, app, request) - except web.HTTPUnauthorized: - raise SocketIOConnectionError("authentification failed") + except web.HTTPUnauthorized as exc: + raise SocketIOConnectionError("authentification failed") from exc except Exception as exc: # pylint: disable=broad-except - raise SocketIOConnectionError(f"Unexpected error: {exc}") + raise SocketIOConnectionError(f"Unexpected error: {exc}") from exc # Send service_deletion_timeout to client # the interval should be < get_service_deletion_timeout(app) to avoid diff --git a/services/web/server/src/simcore_service_webserver/studies_access.py b/services/web/server/src/simcore_service_webserver/studies_access.py index b69282f7c42..3b1083239b4 100644 --- a/services/web/server/src/simcore_service_webserver/studies_access.py +++ b/services/web/server/src/simcore_service_webserver/studies_access.py @@ -195,7 +195,7 @@ async def access_study(request: web.Request) -> web.Response: log.debug("Study %s copied", copied_project_id) - except Exception: # pylint: disable=broad-except + except Exception as exc: # pylint: disable=broad-except log.exception( "Failed while copying project '%s' to '%s'", template_project.get("name"), @@ -203,7 +203,7 @@ async def access_study(request: web.Request) -> web.Response: ) raise web.HTTPInternalServerError( reason=compose_error_msg("Unable to copy project.") - ) + ) from exc try: redirect_url = ( @@ -211,13 +211,13 @@ async def access_study(request: web.Request) -> web.Response: .url_for() .with_fragment("/study/{}".format(copied_project_id)) ) - except KeyError: + except KeyError as exc: log.exception( "Cannot redirect to website because route was not registered. Probably qx output was not ready and it was disabled (see statics.py)" ) raise web.HTTPInternalServerError( reason=compose_error_msg("Unable to serve front-end.") - ) + ) from exc response = web.HTTPFound(location=redirect_url) if is_anonymous_user: diff --git a/services/web/server/src/simcore_service_webserver/users_handlers.py b/services/web/server/src/simcore_service_webserver/users_handlers.py index ec96851a1f9..09ab4e32c29 100644 --- a/services/web/server/src/simcore_service_webserver/users_handlers.py +++ b/services/web/server/src/simcore_service_webserver/users_handlers.py @@ -20,9 +20,9 @@ async def get_my_profile(request: web.Request): uid = request[RQT_USERID_KEY] try: return await users_api.get_user_profile(request.app, uid) - except UserNotFoundError: + except UserNotFoundError as exc: # NOTE: invalid user_id could happen due to timed-cache in AuthorizationPolicy - raise web.HTTPNotFound(reason="Could not find profile!") + raise web.HTTPNotFound(reason="Could not find profile!") from exc @login_required @@ -99,5 +99,5 @@ async def delete_token(request: web.Request): try: await users_api.delete_token(request.app, uid, service_id) raise web.HTTPNoContent(content_type="application/json") - except TokenNotFoundError: - raise web.HTTPNotFound(reason=f"Token for {service_id} not found") + except TokenNotFoundError as exc: + raise web.HTTPNotFound(reason=f"Token for {service_id} not found") from exc diff --git a/services/web/server/tests/unit/isolated/test_activity.py b/services/web/server/tests/unit/isolated/test_activity.py index cb3a3c5f6d1..5dbe95960f2 100644 --- a/services/web/server/tests/unit/isolated/test_activity.py +++ b/services/web/server/tests/unit/isolated/test_activity.py @@ -3,7 +3,6 @@ # pylint:disable=redefined-outer-name import importlib -from asyncio import Future from pathlib import Path import pytest @@ -12,6 +11,7 @@ from aiohttp.client_exceptions import ClientConnectionError from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.utils_mock import future_with_result from servicelib.application import create_safe_application from simcore_service_webserver.activity import handlers, setup_activity from simcore_service_webserver.rest import setup_rest @@ -19,12 +19,6 @@ from simcore_service_webserver.session import setup_session -def future_with_result(result): - f = Future() - f.set_result(result) - return f - - @pytest.fixture def mocked_login_required(mocker): mock = mocker.patch( diff --git a/services/web/server/tests/unit/isolated/test_diagnostics.py b/services/web/server/tests/unit/isolated/test_diagnostics.py new file mode 100644 index 00000000000..d26153a3b68 --- /dev/null +++ b/services/web/server/tests/unit/isolated/test_diagnostics.py @@ -0,0 +1,66 @@ +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name +# pylint:disable=no-name-in-module + +from typing import Dict +from unittest.mock import Mock + +import pytest + +from servicelib.application_keys import APP_CONFIG_KEY, APP_OPENAPI_SPECS_KEY +from servicelib.application_setup import APP_SETUP_KEY +from simcore_service_webserver.diagnostics_plugin import setup_diagnostics +from simcore_service_webserver.rest import get_openapi_specs_path, load_openapi_specs + + +class App(dict): + middlewares = [] + router = Mock() + _overriden = [] + + def __setitem__(self, key, value): + if key in self: + current_value = self.__getitem__(key) + self._overriden.append((key, value)) + + print(f"ERROR app['{key}'] = {current_value} overriden with {value}") + + super().__setitem__(key, value) + + def assert_none_overriden(self): + assert not self._overriden + + +@pytest.fixture +def openapi_specs(api_version_prefix) -> Dict: + spec_path = get_openapi_specs_path(api_version_prefix) + return load_openapi_specs(spec_path) + + +@pytest.fixture +def app_mock(openapi_specs): + app = App() + + # some inits to emulate app setup + app[APP_CONFIG_KEY] = { + "diagnostics": {"diagnostics.enabled": True}, + } + + # some inits to emulate simcore_service_webserver.rest setup + app[APP_SETUP_KEY] = ["simcore_service_webserver.rest"] + app[APP_OPENAPI_SPECS_KEY] = openapi_specs + + return app + + +def test_unique_application_keys(app_mock, openapi_specs): + # this module has A LOT of constants and it is easy to override them + + setup_diagnostics(app_mock) + + for key, value in app_mock.items(): + print(f"app['{key}'] = {value}") + + assert any(key for key in app_mock if "diagnostics" in key) + + app_mock.assert_none_overriden() diff --git a/services/web/server/tests/unit/isolated/test_healthcheck.py b/services/web/server/tests/unit/isolated/test_healthcheck.py index 4a13aa8c51e..a54718ea7b6 100644 --- a/services/web/server/tests/unit/isolated/test_healthcheck.py +++ b/services/web/server/tests/unit/isolated/test_healthcheck.py @@ -125,6 +125,7 @@ async def delay_response(request: web.Request): slow_duration_secs=SLOW_HANDLER_DELAY_SECS / 10, max_task_delay=SLOW_HANDLER_DELAY_SECS, max_avg_response_latency=2.0, + start_sensing_delay=0 # inmidiately! ) assert app[kMAX_AVG_RESP_LATENCY] == 2.0 diff --git a/services/web/server/tests/unit/with_dbs/_helpers.py b/services/web/server/tests/unit/with_dbs/_helpers.py index ed826046a94..5da6a29e1e3 100644 --- a/services/web/server/tests/unit/with_dbs/_helpers.py +++ b/services/web/server/tests/unit/with_dbs/_helpers.py @@ -63,9 +63,3 @@ def standard_role_response() -> Tuple[str, List[Tuple[UserRole, ExpectedResponse ), ], ) - - -def future_with_result(result) -> Future: - f = Future() - f.set_result(result) - return f diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index e3b59b16ce5..61ddbc6a2fc 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -31,8 +31,8 @@ import simcore_service_webserver.utils from pytest_simcore.helpers.utils_assert import assert_status from pytest_simcore.helpers.utils_login import NewUser +from pytest_simcore.helpers.utils_mock import future_with_result from servicelib.aiopg_utils import DSN -from servicelib.rest_responses import unwrap_envelope from simcore_service_webserver.application import create_application from simcore_service_webserver.application_config import app_schema as app_schema from simcore_service_webserver.groups_api import ( @@ -46,6 +46,9 @@ current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent +# DEPLOYED SERVICES FOR TESTSUITE SESSION ----------------------------------- + + @pytest.fixture(scope="session") def default_app_cfg(osparc_simcore_root_dir, fake_static_dir): # NOTE: ONLY used at the session scopes @@ -67,18 +70,6 @@ def default_app_cfg(osparc_simcore_root_dir, fake_static_dir): return cfg_dict -@pytest.fixture(scope="function") -def app_cfg(default_app_cfg, aiohttp_unused_port): - cfg = deepcopy(default_app_cfg) - - # fills ports on the fly - cfg["main"]["port"] = aiohttp_unused_port() - cfg["storage"]["port"] = aiohttp_unused_port() - - # this fixture can be safely modified during test since it is renovated on every call - return cfg - - @pytest.fixture(scope="session") def docker_compose_file(default_app_cfg): """ Overrides pytest-docker fixture @@ -101,45 +92,22 @@ def docker_compose_file(default_app_cfg): os.environ = old -@pytest.fixture(scope="session") -def postgres_dsn(docker_services, docker_ip, default_app_cfg: Dict) -> Dict: - cfg = deepcopy(default_app_cfg["db"]["postgres"]) - cfg["host"] = docker_ip - cfg["port"] = docker_services.port_for("postgres", 5432) - return cfg - - -@pytest.fixture(scope="session") -def postgres_service(docker_services, postgres_dsn): - url = DSN.format(**postgres_dsn) - - # Wait until service is responsive. - docker_services.wait_until_responsive( - check=lambda: is_postgres_responsive(url), timeout=30.0, pause=0.1, - ) - - return url - - -@pytest.fixture -def postgres_db( - app_cfg: Dict, postgres_dsn: Dict, postgres_service: str -) -> sa.engine.Engine: - url = postgres_service +# WEB SERVER/CLIENT FIXTURES ------------------------------------------------ - # Configures db and initializes tables - pg_cli.discover.callback(**postgres_dsn) - pg_cli.upgrade.callback("head") - # Uses syncrounous engine for that - engine = sa.create_engine(url, isolation_level="AUTOCOMMIT") - yield engine +@pytest.fixture(scope="function") +def app_cfg(default_app_cfg, aiohttp_unused_port): + """ Can be overriden in any test module to configure + the app accordingly + """ + cfg = deepcopy(default_app_cfg) - pg_cli.downgrade.callback("base") - pg_cli.clean.callback() + # fills ports on the fly + cfg["main"]["port"] = aiohttp_unused_port() + cfg["storage"]["port"] = aiohttp_unused_port() - orm.metadata.drop_all(engine) - engine.dispose() + # this fixture can be safely modified during test since it is renovated on every call + return cfg @pytest.fixture @@ -147,8 +115,12 @@ def web_server(loop, aiohttp_server, app_cfg, monkeypatch, postgres_db): # original APP app = create_application(app_cfg) + from servicelib.application_keys import APP_CONFIG_KEY + import json + print("Inits webserver with config", json.dumps(app[APP_CONFIG_KEY], indent=2)) + # with patched email - path_mail(monkeypatch) + _path_mail(monkeypatch) server = loop.run_until_complete(aiohttp_server(app, port=app_cfg["main"]["port"])) return server @@ -160,6 +132,22 @@ def client(loop, aiohttp_client, web_server, mock_orphaned_services): return client +# SUBSYSTEM MOCKS FIXTURES ------------------------------------------------ +# +# Mocks entirely or part of the calls to the web-server subsystems +# + + +@pytest.fixture +def computational_system_mock(mocker): + mock_fun = mocker.patch( + "simcore_service_webserver.projects.projects_handlers.update_pipeline_db", + return_value=Future(), + ) + mock_fun.return_value.set_result("") + return mock_fun + + @pytest.fixture async def storage_subsystem_mock(loop, mocker): """ @@ -187,27 +175,135 @@ async def _mock_copy_data_from_project(*args): return mock, mock1 -# helpers --------------- +@pytest.fixture +def asyncpg_storage_system_mock(mocker): + mocked_method = mocker.patch( + "simcore_service_webserver.login.storage.AsyncpgStorage.delete_user", + return_value=Future(), + ) + mocked_method.return_value.set_result("") + return mocked_method -def path_mail(monkeypatch): - async def send_mail(*args): - print("=== EMAIL TO: {}\n=== SUBJECT: {}\n=== BODY:\n{}".format(*args)) +@pytest.fixture +def mocked_director_subsystem(mocker): + mock_director_api = { + "get_running_interactive_services": mocker.patch( + "simcore_service_webserver.director.director_api.get_running_interactive_services", + return_value=future_with_result(""), + ), + "start_service": mocker.patch( + "simcore_service_webserver.director.director_api.start_service", + return_value=future_with_result(""), + ), + "stop_service": mocker.patch( + "simcore_service_webserver.director.director_api.stop_service", + return_value=future_with_result(""), + ), + } + return mock_director_api - monkeypatch.setattr( - simcore_service_webserver.login.utils, "compose_mail", send_mail + +@pytest.fixture +async def mocked_director_api(loop, mocker): + mocks = {} + mocked_running_services = mocker.patch( + "simcore_service_webserver.director.director_api.get_running_interactive_services", + return_value=Future(), ) + mocked_running_services.return_value.set_result("") + mocks["get_running_interactive_services"] = mocked_running_services + mocked_stop_service = mocker.patch( + "simcore_service_webserver.director.director_api.stop_service", + return_value=Future(), + ) + mocked_stop_service.return_value.set_result("") + mocks["stop_service"] = mocked_stop_service + yield mocks -def is_postgres_responsive(url): - """Check if something responds to ``url`` """ - try: - engine = sa.create_engine(url) - conn = engine.connect() - conn.close() - except sa.exc.OperationalError: - return False - return True + +@pytest.fixture +async def mocked_dynamic_service(loop, client, mocked_director_api): + services = [] + + async def create(user_id, project_id) -> Dict: + SERVICE_UUID = str(uuid4()) + SERVICE_KEY = "simcore/services/dynamic/3d-viewer" + SERVICE_VERSION = "1.4.2" + url = client.app.router["create_node"].url_for(project_id=project_id) + create_node_data = { + "service_key": SERVICE_KEY, + "service_version": SERVICE_VERSION, + "service_uuid": SERVICE_UUID, + } + + running_service_dict = { + "published_port": "23423", + "service_uuid": SERVICE_UUID, + "service_key": SERVICE_KEY, + "service_version": SERVICE_VERSION, + "service_host": "some_service_host", + "service_port": "some_service_port", + "service_state": "some_service_state", + } + + services.append(running_service_dict) + # reset the future or an invalidStateError will appear as set_result sets the future to done + mocked_director_api["get_running_interactive_services"].return_value = Future() + mocked_director_api["get_running_interactive_services"].return_value.set_result( + services + ) + return running_service_dict + + return create + + +# POSTGRES CORE SERVICE --------------------------------------------------- + + +@pytest.fixture(scope="session") +def postgres_dsn(docker_services, docker_ip, default_app_cfg: Dict) -> Dict: + cfg = deepcopy(default_app_cfg["db"]["postgres"]) + cfg["host"] = docker_ip + cfg["port"] = docker_services.port_for("postgres", 5432) + return cfg + + +@pytest.fixture(scope="session") +def postgres_service(docker_services, postgres_dsn): + url = DSN.format(**postgres_dsn) + + # Wait until service is responsive. + docker_services.wait_until_responsive( + check=lambda: _is_postgres_responsive(url), timeout=30.0, pause=0.1, + ) + + return url + + +@pytest.fixture +def postgres_db( + app_cfg: Dict, postgres_dsn: Dict, postgres_service: str +) -> sa.engine.Engine: + url = postgres_service + + # Configures db and initializes tables + pg_cli.discover.callback(**postgres_dsn) + pg_cli.upgrade.callback("head") + # Uses syncrounous engine for that + engine = sa.create_engine(url, isolation_level="AUTOCOMMIT") + + yield engine + + pg_cli.downgrade.callback("base") + pg_cli.clean.callback() + + orm.metadata.drop_all(engine) + engine.dispose() + + +# REDIS CORE SERVICE ------------------------------------------------------ @pytest.fixture(scope="session") @@ -218,16 +314,11 @@ def redis_service(docker_services, docker_ip): url = URL(f"redis://{host}:{port}") docker_services.wait_until_responsive( - check=lambda: is_redis_responsive(host, port), timeout=30.0, pause=0.1, + check=lambda: _is_redis_responsive(host, port), timeout=30.0, pause=0.1, ) return url -def is_redis_responsive(host: str, port: int) -> bool: - r = redis.Redis(host=host, port=port) - return r.ping() == True - - @pytest.fixture async def redis_client(loop, redis_service): client = await aioredis.create_redis_pool(str(redis_service), encoding="utf-8") @@ -238,6 +329,14 @@ async def redis_client(loop, redis_service): await client.wait_closed() +def _is_redis_responsive(host: str, port: int) -> bool: + r = redis.Redis(host=host, port=port) + return r.ping() == True + + +# SOCKETS FIXTURES -------------------------------------------------------- + + @pytest.fixture() def socketio_url(client) -> Callable: def create_url(client_override: Optional = None) -> str: @@ -308,59 +407,7 @@ def create() -> str(): return create -@pytest.fixture -async def mocked_director_api(loop, mocker): - mocks = {} - mocked_running_services = mocker.patch( - "simcore_service_webserver.director.director_api.get_running_interactive_services", - return_value=Future(), - ) - mocked_running_services.return_value.set_result("") - mocks["get_running_interactive_services"] = mocked_running_services - mocked_stop_service = mocker.patch( - "simcore_service_webserver.director.director_api.stop_service", - return_value=Future(), - ) - mocked_stop_service.return_value.set_result("") - mocks["stop_service"] = mocked_stop_service - - yield mocks - - -@pytest.fixture -async def mocked_dynamic_service(loop, client, mocked_director_api): - services = [] - - async def create(user_id, project_id) -> Dict: - SERVICE_UUID = str(uuid4()) - SERVICE_KEY = "simcore/services/dynamic/3d-viewer" - SERVICE_VERSION = "1.4.2" - url = client.app.router["create_node"].url_for(project_id=project_id) - create_node_data = { - "service_key": SERVICE_KEY, - "service_version": SERVICE_VERSION, - "service_uuid": SERVICE_UUID, - } - - running_service_dict = { - "published_port": "23423", - "service_uuid": SERVICE_UUID, - "service_key": SERVICE_KEY, - "service_version": SERVICE_VERSION, - "service_host": "some_service_host", - "service_port": "some_service_port", - "service_state": "some_service_state", - } - - services.append(running_service_dict) - # reset the future or an invalidStateError will appear as set_result sets the future to done - mocked_director_api["get_running_interactive_services"].return_value = Future() - mocked_director_api["get_running_interactive_services"].return_value.set_result( - services - ) - return running_service_dict - - return create +# USER GROUP FIXTURES ------------------------------------------------------- @pytest.fixture @@ -419,11 +466,24 @@ async def all_group(client, logged_user) -> Dict[str, str]: return all_group -@pytest.fixture -def asyncpg_storage_system_mock(mocker): - mocked_method = mocker.patch( - "simcore_service_webserver.login.storage.AsyncpgStorage.delete_user", - return_value=Future(), +# GENERIC HELPER FUNCTIONS ---------------------------------------------------- + + +def _path_mail(monkeypatch): + async def send_mail(*args): + print("=== EMAIL TO: {}\n=== SUBJECT: {}\n=== BODY:\n{}".format(*args)) + + monkeypatch.setattr( + simcore_service_webserver.login.utils, "compose_mail", send_mail ) - mocked_method.return_value.set_result("") - return mocked_method + + +def _is_postgres_responsive(url): + """Check if something responds to ``url`` """ + try: + engine = sa.create_engine(url) + conn = engine.connect() + conn.close() + except sa.exc.OperationalError: + return False + return True diff --git a/services/web/server/tests/unit/with_dbs/fast/test_garbage_collector.py b/services/web/server/tests/unit/with_dbs/fast/test_garbage_collector.py new file mode 100644 index 00000000000..f26abdc1f04 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/fast/test_garbage_collector.py @@ -0,0 +1,63 @@ +# TODO: tests for garbage collector +# - a User with more then 2 projects +# - a user without projects +# - a user with just 1 project +# +# The user can be: +# - connected via browser (websocket connection is up) +# - disconnected (no websocket connection) +import pytest +from copy import deepcopy + +from pytest_simcore.helpers.utils_assert import assert_status +from pytest_simcore.helpers.utils_login import LoggedUser, create_user, log_client_in +from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects + + +DEFAULT_GARBAGE_COLLECTOR_INTERVAL_SECONDS: int = 3 +DEFAULT_GARBAGE_COLLECTOR_DELETION_TIMEOUT_SECONDS: int = 3 + + + +@pytest.fixture(scope="function") +def app_cfg(default_app_cfg, aiohttp_unused_port): + """ OVERRIDES services/web/server/tests/unit/with_dbs/conftest.py:app_cfg fixture + to create a webserver with customized config + """ + cfg = deepcopy(default_app_cfg) + + # fills ports on the fly + cfg["main"]["port"] = aiohttp_unused_port() + cfg["storage"]["port"] = aiohttp_unused_port() + + cfg["projects"]["enabled"] = True + cfg["director"]["enabled"] = True + cfg["diagnostics"]["enabled"] = False + cfg["tags"]["enabled"] = False + + cfg["resource_manager"]["enabled"] = True + cfg["resource_manager"][ + "garbage_collection_interval_seconds" + ] = DEFAULT_GARBAGE_COLLECTOR_INTERVAL_SECONDS # increase speed of garbage collection + cfg["resource_manager"][ + "resource_deletion_timeout_seconds" + ] = DEFAULT_GARBAGE_COLLECTOR_DELETION_TIMEOUT_SECONDS # reduce deletion delay + + + import logging + log_level = getattr(logging, cfg["main"]["log_level"]) + logging.root.setLevel(log_level) + # this fixture can be safely modified during test since it is renovated on every call + return cfg + +from aiohttp import web + + + +async def test_webserver_config(client, api_version_prefix): + resp = await client.get(f"/{api_version_prefix}/config") + + data, error = await assert_status(resp, web.HTTPOk) + + assert data + assert not error diff --git a/services/web/server/tests/unit/with_dbs/slow/test_projects.py b/services/web/server/tests/unit/with_dbs/slow/test_projects.py index da6a9c8428b..e1228115652 100644 --- a/services/web/server/tests/unit/with_dbs/slow/test_projects.py +++ b/services/web/server/tests/unit/with_dbs/slow/test_projects.py @@ -15,16 +15,12 @@ import socketio from aiohttp import web from mock import call -from socketio.exceptions import ConnectionError +from socketio.exceptions import ConnectionError as SocketConnectionError -from _helpers import ( - ExpectedResponse, - HTTPLocked, - future_with_result, - standard_role_response, -) +from _helpers import ExpectedResponse, HTTPLocked, standard_role_response from pytest_simcore.helpers.utils_assert import assert_status -from pytest_simcore.helpers.utils_login import LoggedUser, create_user, log_client_in +from pytest_simcore.helpers.utils_login import LoggedUser, log_client_in +from pytest_simcore.helpers.utils_mock import future_with_result from pytest_simcore.helpers.utils_projects import NewProject, delete_all_projects from servicelib.application import create_safe_application from simcore_service_webserver.db import setup_db @@ -54,34 +50,10 @@ API_PREFIX = "/" + API_VERSION -@pytest.fixture -def mocked_director_subsystem(mocker): - mock_director_api = { - "get_running_interactive_services": mocker.patch( - "simcore_service_webserver.director.director_api.get_running_interactive_services", - return_value=future_with_result(""), - ), - "start_service": mocker.patch( - "simcore_service_webserver.director.director_api.start_service", - return_value=future_with_result(""), - ), - "stop_service": mocker.patch( - "simcore_service_webserver.director.director_api.stop_service", - return_value=future_with_result(""), - ), - } - return mock_director_api - - DEFAULT_GARBAGE_COLLECTOR_INTERVAL_SECONDS: int = 3 DEFAULT_GARBAGE_COLLECTOR_DELETION_TIMEOUT_SECONDS: int = 3 -@pytest.fixture -def gc_long_deletion_timeout(): - DEFAULT_GARBAGE_COLLECTOR_DELETION_TIMEOUT_SECONDS = 900 - - @pytest.fixture def client( loop, @@ -188,16 +160,6 @@ async def template_project( print("<----- removed template project", template_project["name"]) -@pytest.fixture -def computational_system_mock(mocker): - mock_fun = mocker.patch( - "simcore_service_webserver.projects.projects_handlers.update_pipeline_db", - return_value=Future(), - ) - mock_fun.return_value.set_result("") - return mock_fun - - @pytest.fixture def fake_services(): def create_fakes(number_services: int) -> List[Dict]: @@ -937,7 +899,7 @@ async def test_get_active_project( try: sio = await socketio_client(client_id1) assert sio.sid - except ConnectionError: + except SocketConnectionError: if expected == web.HTTPOk: pytest.fail("socket io connection should not fail") @@ -970,7 +932,7 @@ async def test_get_active_project( try: sio = await socketio_client(client_id2) assert sio.sid - except ConnectionError: + except SocketConnectionError: if expected == web.HTTPOk: pytest.fail("socket io connection should not fail") # get active projects -> empty @@ -1014,7 +976,7 @@ async def test_delete_multiple_opened_project_forbidden( client_session_id1 = client_session_id() try: sio1 = await socketio_client(client_session_id1) - except ConnectionError: + except SocketConnectionError: if user_role != UserRole.ANONYMOUS: pytest.fail("socket io connection should not fail") url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) @@ -1024,7 +986,7 @@ async def test_delete_multiple_opened_project_forbidden( client_session_id2 = client_session_id() try: sio2 = await socketio_client(client_session_id2) - except ConnectionError: + except SocketConnectionError: if user_role != UserRole.ANONYMOUS: pytest.fail("socket io connection should not fail") await _delete_project(client, user_project, expected_forbidden) @@ -1215,7 +1177,7 @@ async def _connect_websocket( for event, handler in events.items(): sio.on(event, handler=handler) return sio - except ConnectionError: + except SocketConnectionError: if check_connection: pytest.fail("socket io connection should not fail") diff --git a/tests/e2e/requirements/requirements.txt b/tests/e2e/requirements/requirements.txt index a4c21f10566..0b48582db89 100644 --- a/tests/e2e/requirements/requirements.txt +++ b/tests/e2e/requirements/requirements.txt @@ -6,7 +6,7 @@ # certifi==2020.6.20 # via requests chardet==3.0.4 # via requests -docker==4.3.0 # via -r requirements.in +docker==4.3.1 # via -r requirements.in idna==2.10 # via requests pyyaml==5.3.1 # via -r requirements.in requests==2.24.0 # via docker diff --git a/tests/swarm-deploy/requirements/requirements.txt b/tests/swarm-deploy/requirements/requirements.txt index 98bb086edb0..5899b6d9a9f 100644 --- a/tests/swarm-deploy/requirements/requirements.txt +++ b/tests/swarm-deploy/requirements/requirements.txt @@ -9,12 +9,12 @@ aiohttp==3.6.2 # via pytest-aiohttp aiormq==3.2.3 # via aio-pika alembic==1.4.2 # via -r requirements/requirements.in async-timeout==3.0.1 # via aiohttp -attrs==19.3.0 # via aiohttp, pytest +attrs==20.1.0 # via aiohttp, pytest certifi==2020.6.20 # via requests chardet==3.0.4 # via aiohttp, requests click==7.1.2 # via -r requirements/requirements.in coverage==5.2.1 # via -r requirements/requirements.in, pytest-cov -docker==4.3.0 # via -r requirements/requirements.in +docker==4.3.1 # via -r requirements/requirements.in idna-ssl==1.1.0 # via aiohttp idna==2.10 # via requests, yarl importlib-metadata==1.7.0 # via pluggy, pytest @@ -31,7 +31,7 @@ pyparsing==2.4.7 # via packaging pytest-aiohttp==0.3.0 # via -r requirements/requirements.in pytest-cov==2.10.1 # via -r requirements/requirements.in pytest-instafail==0.4.2 # via -r requirements/requirements.in -pytest-mock==3.2.0 # via -r requirements/requirements.in +pytest-mock==3.3.0 # via -r requirements/requirements.in pytest-runner==5.2 # via -r requirements/requirements.in pytest-sugar==0.9.4 # via -r requirements/requirements.in pytest==6.0.1 # via -r requirements/requirements.in, pytest-aiohttp, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar @@ -41,11 +41,11 @@ python-editor==1.0.4 # via alembic pyyaml==5.3.1 # via -r requirements/requirements.in requests==2.24.0 # via docker six==1.15.0 # via docker, packaging, python-dateutil, tenacity, websocket-client -sqlalchemy==1.3.18 # via alembic +sqlalchemy==1.3.19 # via alembic tenacity==6.2.0 # via -r requirements/requirements.in termcolor==1.1.0 # via pytest-sugar toml==0.10.1 # via pytest -typing-extensions==3.7.4.2 # via aiohttp, yarl +typing-extensions==3.7.4.3 # via aiohttp, yarl urllib3==1.25.10 # via requests websocket-client==0.57.0 # via docker yarl==1.5.1 # via aio-pika, aiohttp, aiormq diff --git a/tests/swarm-deploy/test_service_restart.py b/tests/swarm-deploy/test_service_restart.py index a309cbd78d3..fae1a91175c 100644 --- a/tests/swarm-deploy/test_service_restart.py +++ b/tests/swarm-deploy/test_service_restart.py @@ -95,6 +95,9 @@ def test_graceful_restart_services( https://kkc.github.io/2018/06/06/gracefully-shutdown-docker-container/ https://docs.docker.com/develop/develop-images/dockerfile_best-practices/ + SEE Gracefully Stopping Docker Containers by DeHamer: https://www.ctl.io/developers/blog/post/gracefully-stopping-docker-containers/ + SEE Gracefully Shutdown Docker Container by Kakashi: https://kkc.github.io/2018/06/06/gracefully-shutdown-docker-container/ + """ service = next( s for s in deployed_simcore_stack if s.name == service_name )